7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
22 #include "exec/exec-all.h"
23 #include "disas/disas.h"
25 #include "qemu-common.h"
28 #include "qemu/bitops.h"
29 #include "exec/cpu_ldst.h"
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
34 #include "trace-tcg.h"
37 #define OPENRISC_DISAS
40 # define LOG_DIS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
42 # define LOG_DIS(...) do { } while (0)
45 typedef struct DisasContext {
47 target_ulong pc, ppc, npc;
48 uint32_t tb_flags, synced_flags, flags;
51 int singlestep_enabled;
52 uint32_t delayed_branch;
55 static TCGv_ptr cpu_env;
57 static TCGv cpu_R[32];
59 static TCGv jmp_pc; /* l.jr/l.jalr temp pc */
62 static TCGv_i32 env_btaken; /* bf/bnf , F flag taken */
63 static TCGv_i32 fpcsr;
64 static TCGv machi, maclo;
65 static TCGv fpmaddhi, fpmaddlo;
66 static TCGv_i32 env_flags;
67 #include "exec/gen-icount.h"
69 void openrisc_translate_init(void)
71 static const char * const regnames[] = {
72 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
73 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
74 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
75 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
79 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
80 cpu_sr = tcg_global_mem_new(TCG_AREG0,
81 offsetof(CPUOpenRISCState, sr), "sr");
82 env_flags = tcg_global_mem_new_i32(TCG_AREG0,
83 offsetof(CPUOpenRISCState, flags),
85 cpu_pc = tcg_global_mem_new(TCG_AREG0,
86 offsetof(CPUOpenRISCState, pc), "pc");
87 cpu_npc = tcg_global_mem_new(TCG_AREG0,
88 offsetof(CPUOpenRISCState, npc), "npc");
89 cpu_ppc = tcg_global_mem_new(TCG_AREG0,
90 offsetof(CPUOpenRISCState, ppc), "ppc");
91 jmp_pc = tcg_global_mem_new(TCG_AREG0,
92 offsetof(CPUOpenRISCState, jmp_pc), "jmp_pc");
93 env_btaken = tcg_global_mem_new_i32(TCG_AREG0,
94 offsetof(CPUOpenRISCState, btaken),
96 fpcsr = tcg_global_mem_new_i32(TCG_AREG0,
97 offsetof(CPUOpenRISCState, fpcsr),
99 machi = tcg_global_mem_new(TCG_AREG0,
100 offsetof(CPUOpenRISCState, machi),
102 maclo = tcg_global_mem_new(TCG_AREG0,
103 offsetof(CPUOpenRISCState, maclo),
105 fpmaddhi = tcg_global_mem_new(TCG_AREG0,
106 offsetof(CPUOpenRISCState, fpmaddhi),
108 fpmaddlo = tcg_global_mem_new(TCG_AREG0,
109 offsetof(CPUOpenRISCState, fpmaddlo),
111 for (i = 0; i < 32; i++) {
112 cpu_R[i] = tcg_global_mem_new(TCG_AREG0,
113 offsetof(CPUOpenRISCState, gpr[i]),
118 /* Writeback SR_F translation space to execution space. */
119 static inline void wb_SR_F(void)
121 TCGLabel *label = gen_new_label();
122 tcg_gen_andi_tl(cpu_sr, cpu_sr, ~SR_F);
123 tcg_gen_brcondi_tl(TCG_COND_EQ, env_btaken, 0, label);
124 tcg_gen_ori_tl(cpu_sr, cpu_sr, SR_F);
125 gen_set_label(label);
128 static inline int zero_extend(unsigned int val, int width)
130 return val & ((1 << width) - 1);
133 static inline int sign_extend(unsigned int val, int width)
138 val <<= TARGET_LONG_BITS - width;
141 sval >>= TARGET_LONG_BITS - width;
145 static inline void gen_sync_flags(DisasContext *dc)
147 /* Sync the tb dependent flag between translate and runtime. */
148 if (dc->tb_flags != dc->synced_flags) {
149 tcg_gen_movi_tl(env_flags, dc->tb_flags);
150 dc->synced_flags = dc->tb_flags;
154 static void gen_exception(DisasContext *dc, unsigned int excp)
156 TCGv_i32 tmp = tcg_const_i32(excp);
157 gen_helper_exception(cpu_env, tmp);
158 tcg_temp_free_i32(tmp);
161 static void gen_illegal_exception(DisasContext *dc)
163 tcg_gen_movi_tl(cpu_pc, dc->pc);
164 gen_exception(dc, EXCP_ILLEGAL);
165 dc->is_jmp = DISAS_UPDATE;
168 /* not used yet, open it when we need or64. */
169 /*#ifdef TARGET_OPENRISC64
170 static void check_ob64s(DisasContext *dc)
172 if (!(dc->flags & CPUCFGR_OB64S)) {
173 gen_illegal_exception(dc);
177 static void check_of64s(DisasContext *dc)
179 if (!(dc->flags & CPUCFGR_OF64S)) {
180 gen_illegal_exception(dc);
184 static void check_ov64s(DisasContext *dc)
186 if (!(dc->flags & CPUCFGR_OV64S)) {
187 gen_illegal_exception(dc);
192 static void gen_goto_tb(DisasContext *dc, int n, target_ulong dest)
194 TranslationBlock *tb;
196 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
197 likely(!dc->singlestep_enabled)) {
198 tcg_gen_movi_tl(cpu_pc, dest);
200 tcg_gen_exit_tb((uintptr_t)tb + n);
202 tcg_gen_movi_tl(cpu_pc, dest);
203 if (dc->singlestep_enabled) {
204 gen_exception(dc, EXCP_DEBUG);
210 static void gen_jump(DisasContext *dc, uint32_t imm, uint32_t reg, uint32_t op0)
213 /* N26, 26bits imm */
214 tmp_pc = sign_extend((imm<<2), 26) + dc->pc;
218 tcg_gen_movi_tl(jmp_pc, tmp_pc);
220 case 0x01: /* l.jal */
221 tcg_gen_movi_tl(cpu_R[9], (dc->pc + 8));
222 tcg_gen_movi_tl(jmp_pc, tmp_pc);
224 case 0x03: /* l.bnf */
225 case 0x04: /* l.bf */
227 TCGLabel *lab = gen_new_label();
228 TCGv sr_f = tcg_temp_new();
229 tcg_gen_movi_tl(jmp_pc, dc->pc+8);
230 tcg_gen_andi_tl(sr_f, cpu_sr, SR_F);
231 tcg_gen_brcondi_i32(op0 == 0x03 ? TCG_COND_EQ : TCG_COND_NE,
233 tcg_gen_movi_tl(jmp_pc, tmp_pc);
238 case 0x11: /* l.jr */
239 tcg_gen_mov_tl(jmp_pc, cpu_R[reg]);
241 case 0x12: /* l.jalr */
242 tcg_gen_movi_tl(cpu_R[9], (dc->pc + 8));
243 tcg_gen_mov_tl(jmp_pc, cpu_R[reg]);
246 gen_illegal_exception(dc);
250 dc->delayed_branch = 2;
251 dc->tb_flags |= D_FLAG;
256 static void dec_calc(DisasContext *dc, uint32_t insn)
258 uint32_t op0, op1, op2;
260 op0 = extract32(insn, 0, 4);
261 op1 = extract32(insn, 8, 2);
262 op2 = extract32(insn, 6, 2);
263 ra = extract32(insn, 16, 5);
264 rb = extract32(insn, 11, 5);
265 rd = extract32(insn, 21, 5);
270 case 0x00: /* l.add */
271 LOG_DIS("l.add r%d, r%d, r%d\n", rd, ra, rb);
273 TCGLabel *lab = gen_new_label();
274 TCGv_i64 ta = tcg_temp_new_i64();
275 TCGv_i64 tb = tcg_temp_new_i64();
276 TCGv_i64 td = tcg_temp_local_new_i64();
277 TCGv_i32 res = tcg_temp_local_new_i32();
278 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
279 tcg_gen_extu_i32_i64(ta, cpu_R[ra]);
280 tcg_gen_extu_i32_i64(tb, cpu_R[rb]);
281 tcg_gen_add_i64(td, ta, tb);
282 tcg_gen_extrl_i64_i32(res, td);
283 tcg_gen_shri_i64(td, td, 31);
284 tcg_gen_andi_i64(td, td, 0x3);
285 /* Jump to lab when no overflow. */
286 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x0, lab);
287 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x3, lab);
288 tcg_gen_ori_i32(cpu_sr, cpu_sr, (SR_OV | SR_CY));
289 tcg_gen_andi_i32(sr_ove, cpu_sr, SR_OVE);
290 tcg_gen_brcondi_i32(TCG_COND_NE, sr_ove, SR_OVE, lab);
291 gen_exception(dc, EXCP_RANGE);
293 tcg_gen_mov_i32(cpu_R[rd], res);
294 tcg_temp_free_i64(ta);
295 tcg_temp_free_i64(tb);
296 tcg_temp_free_i64(td);
297 tcg_temp_free_i32(res);
298 tcg_temp_free_i32(sr_ove);
302 gen_illegal_exception(dc);
307 case 0x0001: /* l.addc */
310 LOG_DIS("l.addc r%d, r%d, r%d\n", rd, ra, rb);
312 TCGLabel *lab = gen_new_label();
313 TCGv_i64 ta = tcg_temp_new_i64();
314 TCGv_i64 tb = tcg_temp_new_i64();
315 TCGv_i64 tcy = tcg_temp_local_new_i64();
316 TCGv_i64 td = tcg_temp_local_new_i64();
317 TCGv_i32 res = tcg_temp_local_new_i32();
318 TCGv_i32 sr_cy = tcg_temp_local_new_i32();
319 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
320 tcg_gen_extu_i32_i64(ta, cpu_R[ra]);
321 tcg_gen_extu_i32_i64(tb, cpu_R[rb]);
322 tcg_gen_andi_i32(sr_cy, cpu_sr, SR_CY);
323 tcg_gen_extu_i32_i64(tcy, sr_cy);
324 tcg_gen_shri_i64(tcy, tcy, 10);
325 tcg_gen_add_i64(td, ta, tb);
326 tcg_gen_add_i64(td, td, tcy);
327 tcg_gen_extrl_i64_i32(res, td);
328 tcg_gen_shri_i64(td, td, 32);
329 tcg_gen_andi_i64(td, td, 0x3);
330 /* Jump to lab when no overflow. */
331 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x0, lab);
332 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x3, lab);
333 tcg_gen_ori_i32(cpu_sr, cpu_sr, (SR_OV | SR_CY));
334 tcg_gen_andi_i32(sr_ove, cpu_sr, SR_OVE);
335 tcg_gen_brcondi_i32(TCG_COND_NE, sr_ove, SR_OVE, lab);
336 gen_exception(dc, EXCP_RANGE);
338 tcg_gen_mov_i32(cpu_R[rd], res);
339 tcg_temp_free_i64(ta);
340 tcg_temp_free_i64(tb);
341 tcg_temp_free_i64(tcy);
342 tcg_temp_free_i64(td);
343 tcg_temp_free_i32(res);
344 tcg_temp_free_i32(sr_cy);
345 tcg_temp_free_i32(sr_ove);
349 gen_illegal_exception(dc);
354 case 0x0002: /* l.sub */
357 LOG_DIS("l.sub r%d, r%d, r%d\n", rd, ra, rb);
359 TCGLabel *lab = gen_new_label();
360 TCGv_i64 ta = tcg_temp_new_i64();
361 TCGv_i64 tb = tcg_temp_new_i64();
362 TCGv_i64 td = tcg_temp_local_new_i64();
363 TCGv_i32 res = tcg_temp_local_new_i32();
364 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
366 tcg_gen_extu_i32_i64(ta, cpu_R[ra]);
367 tcg_gen_extu_i32_i64(tb, cpu_R[rb]);
368 tcg_gen_sub_i64(td, ta, tb);
369 tcg_gen_extrl_i64_i32(res, td);
370 tcg_gen_shri_i64(td, td, 31);
371 tcg_gen_andi_i64(td, td, 0x3);
372 /* Jump to lab when no overflow. */
373 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x0, lab);
374 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x3, lab);
375 tcg_gen_ori_i32(cpu_sr, cpu_sr, (SR_OV | SR_CY));
376 tcg_gen_andi_i32(sr_ove, cpu_sr, SR_OVE);
377 tcg_gen_brcondi_i32(TCG_COND_NE, sr_ove, SR_OVE, lab);
378 gen_exception(dc, EXCP_RANGE);
380 tcg_gen_mov_i32(cpu_R[rd], res);
381 tcg_temp_free_i64(ta);
382 tcg_temp_free_i64(tb);
383 tcg_temp_free_i64(td);
384 tcg_temp_free_i32(res);
385 tcg_temp_free_i32(sr_ove);
389 gen_illegal_exception(dc);
394 case 0x0003: /* l.and */
397 LOG_DIS("l.and r%d, r%d, r%d\n", rd, ra, rb);
398 tcg_gen_and_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
401 gen_illegal_exception(dc);
406 case 0x0004: /* l.or */
409 LOG_DIS("l.or r%d, r%d, r%d\n", rd, ra, rb);
410 tcg_gen_or_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
413 gen_illegal_exception(dc);
420 case 0x00: /* l.xor */
421 LOG_DIS("l.xor r%d, r%d, r%d\n", rd, ra, rb);
422 tcg_gen_xor_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
425 gen_illegal_exception(dc);
432 case 0x03: /* l.mul */
433 LOG_DIS("l.mul r%d, r%d, r%d\n", rd, ra, rb);
434 if (ra != 0 && rb != 0) {
435 gen_helper_mul32(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
437 tcg_gen_movi_tl(cpu_R[rd], 0x0);
441 gen_illegal_exception(dc);
448 case 0x03: /* l.div */
449 LOG_DIS("l.div r%d, r%d, r%d\n", rd, ra, rb);
451 TCGLabel *lab0 = gen_new_label();
452 TCGLabel *lab1 = gen_new_label();
453 TCGLabel *lab2 = gen_new_label();
454 TCGLabel *lab3 = gen_new_label();
455 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
457 tcg_gen_ori_tl(cpu_sr, cpu_sr, (SR_OV | SR_CY));
458 tcg_gen_andi_tl(sr_ove, cpu_sr, SR_OVE);
459 tcg_gen_brcondi_tl(TCG_COND_NE, sr_ove, SR_OVE, lab0);
460 gen_exception(dc, EXCP_RANGE);
463 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_R[rb],
465 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_R[ra],
467 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_R[rb],
470 tcg_gen_ori_tl(cpu_sr, cpu_sr, (SR_OV | SR_CY));
471 tcg_gen_andi_tl(sr_ove, cpu_sr, SR_OVE);
472 tcg_gen_brcondi_tl(TCG_COND_NE, sr_ove, SR_OVE, lab3);
473 gen_exception(dc, EXCP_RANGE);
475 tcg_gen_div_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
478 tcg_temp_free_i32(sr_ove);
483 gen_illegal_exception(dc);
490 case 0x03: /* l.divu */
491 LOG_DIS("l.divu r%d, r%d, r%d\n", rd, ra, rb);
493 TCGLabel *lab0 = gen_new_label();
494 TCGLabel *lab1 = gen_new_label();
495 TCGLabel *lab2 = gen_new_label();
496 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
498 tcg_gen_ori_tl(cpu_sr, cpu_sr, (SR_OV | SR_CY));
499 tcg_gen_andi_tl(sr_ove, cpu_sr, SR_OVE);
500 tcg_gen_brcondi_tl(TCG_COND_NE, sr_ove, SR_OVE, lab0);
501 gen_exception(dc, EXCP_RANGE);
504 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_R[rb],
506 tcg_gen_ori_tl(cpu_sr, cpu_sr, (SR_OV | SR_CY));
507 tcg_gen_andi_tl(sr_ove, cpu_sr, SR_OVE);
508 tcg_gen_brcondi_tl(TCG_COND_NE, sr_ove, SR_OVE, lab2);
509 gen_exception(dc, EXCP_RANGE);
511 tcg_gen_divu_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
514 tcg_temp_free_i32(sr_ove);
519 gen_illegal_exception(dc);
526 case 0x03: /* l.mulu */
527 LOG_DIS("l.mulu r%d, r%d, r%d\n", rd, ra, rb);
528 if (rb != 0 && ra != 0) {
529 TCGv_i64 result = tcg_temp_local_new_i64();
530 TCGv_i64 tra = tcg_temp_local_new_i64();
531 TCGv_i64 trb = tcg_temp_local_new_i64();
532 TCGv_i64 high = tcg_temp_new_i64();
533 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
534 TCGLabel *lab = gen_new_label();
535 /* Calculate each result. */
536 tcg_gen_extu_i32_i64(tra, cpu_R[ra]);
537 tcg_gen_extu_i32_i64(trb, cpu_R[rb]);
538 tcg_gen_mul_i64(result, tra, trb);
539 tcg_temp_free_i64(tra);
540 tcg_temp_free_i64(trb);
541 tcg_gen_shri_i64(high, result, TARGET_LONG_BITS);
542 /* Overflow or not. */
543 tcg_gen_brcondi_i64(TCG_COND_EQ, high, 0x00000000, lab);
544 tcg_gen_ori_tl(cpu_sr, cpu_sr, (SR_OV | SR_CY));
545 tcg_gen_andi_tl(sr_ove, cpu_sr, SR_OVE);
546 tcg_gen_brcondi_tl(TCG_COND_NE, sr_ove, SR_OVE, lab);
547 gen_exception(dc, EXCP_RANGE);
549 tcg_temp_free_i64(high);
550 tcg_gen_trunc_i64_tl(cpu_R[rd], result);
551 tcg_temp_free_i64(result);
552 tcg_temp_free_i32(sr_ove);
554 tcg_gen_movi_tl(cpu_R[rd], 0);
559 gen_illegal_exception(dc);
566 case 0x00: /* l.cmov */
567 LOG_DIS("l.cmov r%d, r%d, r%d\n", rd, ra, rb);
569 TCGLabel *lab = gen_new_label();
570 TCGv res = tcg_temp_local_new();
571 TCGv sr_f = tcg_temp_new();
572 tcg_gen_andi_tl(sr_f, cpu_sr, SR_F);
573 tcg_gen_mov_tl(res, cpu_R[rb]);
574 tcg_gen_brcondi_tl(TCG_COND_NE, sr_f, SR_F, lab);
575 tcg_gen_mov_tl(res, cpu_R[ra]);
577 tcg_gen_mov_tl(cpu_R[rd], res);
584 gen_illegal_exception(dc);
591 case 0x00: /* l.ff1 */
592 LOG_DIS("l.ff1 r%d, r%d, r%d\n", rd, ra, rb);
593 gen_helper_ff1(cpu_R[rd], cpu_R[ra]);
595 case 0x01: /* l.fl1 */
596 LOG_DIS("l.fl1 r%d, r%d, r%d\n", rd, ra, rb);
597 gen_helper_fl1(cpu_R[rd], cpu_R[ra]);
601 gen_illegal_exception(dc);
610 case 0x00: /* l.sll */
611 LOG_DIS("l.sll r%d, r%d, r%d\n", rd, ra, rb);
612 tcg_gen_shl_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
614 case 0x01: /* l.srl */
615 LOG_DIS("l.srl r%d, r%d, r%d\n", rd, ra, rb);
616 tcg_gen_shr_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
618 case 0x02: /* l.sra */
619 LOG_DIS("l.sra r%d, r%d, r%d\n", rd, ra, rb);
620 tcg_gen_sar_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
622 case 0x03: /* l.ror */
623 LOG_DIS("l.ror r%d, r%d, r%d\n", rd, ra, rb);
624 tcg_gen_rotr_tl(cpu_R[rd], cpu_R[ra], cpu_R[rb]);
628 gen_illegal_exception(dc);
634 gen_illegal_exception(dc);
643 case 0x00: /* l.exths */
644 LOG_DIS("l.exths r%d, r%d\n", rd, ra);
645 tcg_gen_ext16s_tl(cpu_R[rd], cpu_R[ra]);
647 case 0x01: /* l.extbs */
648 LOG_DIS("l.extbs r%d, r%d\n", rd, ra);
649 tcg_gen_ext8s_tl(cpu_R[rd], cpu_R[ra]);
651 case 0x02: /* l.exthz */
652 LOG_DIS("l.exthz r%d, r%d\n", rd, ra);
653 tcg_gen_ext16u_tl(cpu_R[rd], cpu_R[ra]);
655 case 0x03: /* l.extbz */
656 LOG_DIS("l.extbz r%d, r%d\n", rd, ra);
657 tcg_gen_ext8u_tl(cpu_R[rd], cpu_R[ra]);
661 gen_illegal_exception(dc);
667 gen_illegal_exception(dc);
676 case 0x00: /* l.extws */
677 LOG_DIS("l.extws r%d, r%d\n", rd, ra);
678 tcg_gen_ext32s_tl(cpu_R[rd], cpu_R[ra]);
680 case 0x01: /* l.extwz */
681 LOG_DIS("l.extwz r%d, r%d\n", rd, ra);
682 tcg_gen_ext32u_tl(cpu_R[rd], cpu_R[ra]);
686 gen_illegal_exception(dc);
692 gen_illegal_exception(dc);
698 gen_illegal_exception(dc);
703 static void dec_misc(DisasContext *dc, uint32_t insn)
707 #ifdef OPENRISC_DISAS
710 uint32_t I16, I5, I11, N26, tmp;
713 op0 = extract32(insn, 26, 6);
714 op1 = extract32(insn, 24, 2);
715 ra = extract32(insn, 16, 5);
716 rb = extract32(insn, 11, 5);
717 rd = extract32(insn, 21, 5);
718 #ifdef OPENRISC_DISAS
719 L6 = extract32(insn, 5, 6);
720 K5 = extract32(insn, 0, 5);
722 I16 = extract32(insn, 0, 16);
723 I5 = extract32(insn, 21, 5);
724 I11 = extract32(insn, 0, 11);
725 N26 = extract32(insn, 0, 26);
726 tmp = (I5<<11) + I11;
730 LOG_DIS("l.j %d\n", N26);
731 gen_jump(dc, N26, 0, op0);
734 case 0x01: /* l.jal */
735 LOG_DIS("l.jal %d\n", N26);
736 gen_jump(dc, N26, 0, op0);
739 case 0x03: /* l.bnf */
740 LOG_DIS("l.bnf %d\n", N26);
741 gen_jump(dc, N26, 0, op0);
744 case 0x04: /* l.bf */
745 LOG_DIS("l.bf %d\n", N26);
746 gen_jump(dc, N26, 0, op0);
751 case 0x01: /* l.nop */
752 LOG_DIS("l.nop %d\n", I16);
756 gen_illegal_exception(dc);
761 case 0x11: /* l.jr */
762 LOG_DIS("l.jr r%d\n", rb);
763 gen_jump(dc, 0, rb, op0);
766 case 0x12: /* l.jalr */
767 LOG_DIS("l.jalr r%d\n", rb);
768 gen_jump(dc, 0, rb, op0);
771 case 0x13: /* l.maci */
772 LOG_DIS("l.maci %d, r%d, %d\n", I5, ra, I11);
774 TCGv_i64 t1 = tcg_temp_new_i64();
775 TCGv_i64 t2 = tcg_temp_new_i64();
776 TCGv_i32 dst = tcg_temp_new_i32();
777 TCGv ttmp = tcg_const_tl(tmp);
778 tcg_gen_mul_tl(dst, cpu_R[ra], ttmp);
779 tcg_gen_ext_i32_i64(t1, dst);
780 tcg_gen_concat_i32_i64(t2, maclo, machi);
781 tcg_gen_add_i64(t2, t2, t1);
782 tcg_gen_extrl_i64_i32(maclo, t2);
783 tcg_gen_shri_i64(t2, t2, 32);
784 tcg_gen_extrl_i64_i32(machi, t2);
785 tcg_temp_free_i32(dst);
787 tcg_temp_free_i64(t1);
788 tcg_temp_free_i64(t2);
792 case 0x09: /* l.rfe */
795 #if defined(CONFIG_USER_ONLY)
798 if (dc->mem_idx == MMU_USER_IDX) {
799 gen_illegal_exception(dc);
802 gen_helper_rfe(cpu_env);
803 dc->is_jmp = DISAS_UPDATE;
808 case 0x1c: /* l.cust1 */
809 LOG_DIS("l.cust1\n");
812 case 0x1d: /* l.cust2 */
813 LOG_DIS("l.cust2\n");
816 case 0x1e: /* l.cust3 */
817 LOG_DIS("l.cust3\n");
820 case 0x1f: /* l.cust4 */
821 LOG_DIS("l.cust4\n");
824 case 0x3c: /* l.cust5 */
825 LOG_DIS("l.cust5 r%d, r%d, r%d, %d, %d\n", rd, ra, rb, L6, K5);
828 case 0x3d: /* l.cust6 */
829 LOG_DIS("l.cust6\n");
832 case 0x3e: /* l.cust7 */
833 LOG_DIS("l.cust7\n");
836 case 0x3f: /* l.cust8 */
837 LOG_DIS("l.cust8\n");
840 /* not used yet, open it when we need or64. */
841 /*#ifdef TARGET_OPENRISC64
843 LOG_DIS("l.ld r%d, r%d, %d\n", rd, ra, I16);
849 case 0x21: /* l.lwz */
850 LOG_DIS("l.lwz r%d, r%d, %d\n", rd, ra, I16);
854 case 0x22: /* l.lws */
855 LOG_DIS("l.lws r%d, r%d, %d\n", rd, ra, I16);
859 case 0x23: /* l.lbz */
860 LOG_DIS("l.lbz r%d, r%d, %d\n", rd, ra, I16);
864 case 0x24: /* l.lbs */
865 LOG_DIS("l.lbs r%d, r%d, %d\n", rd, ra, I16);
869 case 0x25: /* l.lhz */
870 LOG_DIS("l.lhz r%d, r%d, %d\n", rd, ra, I16);
874 case 0x26: /* l.lhs */
875 LOG_DIS("l.lhs r%d, r%d, %d\n", rd, ra, I16);
881 TCGv t0 = tcg_temp_new();
882 tcg_gen_addi_tl(t0, cpu_R[ra], sign_extend(I16, 16));
883 tcg_gen_qemu_ld_tl(cpu_R[rd], t0, dc->mem_idx, mop);
888 case 0x27: /* l.addi */
889 LOG_DIS("l.addi r%d, r%d, %d\n", rd, ra, I16);
892 tcg_gen_mov_tl(cpu_R[rd], cpu_R[ra]);
894 TCGLabel *lab = gen_new_label();
895 TCGv_i64 ta = tcg_temp_new_i64();
896 TCGv_i64 td = tcg_temp_local_new_i64();
897 TCGv_i32 res = tcg_temp_local_new_i32();
898 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
899 tcg_gen_extu_i32_i64(ta, cpu_R[ra]);
900 tcg_gen_addi_i64(td, ta, sign_extend(I16, 16));
901 tcg_gen_extrl_i64_i32(res, td);
902 tcg_gen_shri_i64(td, td, 32);
903 tcg_gen_andi_i64(td, td, 0x3);
904 /* Jump to lab when no overflow. */
905 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x0, lab);
906 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x3, lab);
907 tcg_gen_ori_i32(cpu_sr, cpu_sr, (SR_OV | SR_CY));
908 tcg_gen_andi_i32(sr_ove, cpu_sr, SR_OVE);
909 tcg_gen_brcondi_i32(TCG_COND_NE, sr_ove, SR_OVE, lab);
910 gen_exception(dc, EXCP_RANGE);
912 tcg_gen_mov_i32(cpu_R[rd], res);
913 tcg_temp_free_i64(ta);
914 tcg_temp_free_i64(td);
915 tcg_temp_free_i32(res);
916 tcg_temp_free_i32(sr_ove);
921 case 0x28: /* l.addic */
922 LOG_DIS("l.addic r%d, r%d, %d\n", rd, ra, I16);
924 TCGLabel *lab = gen_new_label();
925 TCGv_i64 ta = tcg_temp_new_i64();
926 TCGv_i64 td = tcg_temp_local_new_i64();
927 TCGv_i64 tcy = tcg_temp_local_new_i64();
928 TCGv_i32 res = tcg_temp_local_new_i32();
929 TCGv_i32 sr_cy = tcg_temp_local_new_i32();
930 TCGv_i32 sr_ove = tcg_temp_local_new_i32();
931 tcg_gen_extu_i32_i64(ta, cpu_R[ra]);
932 tcg_gen_andi_i32(sr_cy, cpu_sr, SR_CY);
933 tcg_gen_shri_i32(sr_cy, sr_cy, 10);
934 tcg_gen_extu_i32_i64(tcy, sr_cy);
935 tcg_gen_addi_i64(td, ta, sign_extend(I16, 16));
936 tcg_gen_add_i64(td, td, tcy);
937 tcg_gen_extrl_i64_i32(res, td);
938 tcg_gen_shri_i64(td, td, 32);
939 tcg_gen_andi_i64(td, td, 0x3);
940 /* Jump to lab when no overflow. */
941 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x0, lab);
942 tcg_gen_brcondi_i64(TCG_COND_EQ, td, 0x3, lab);
943 tcg_gen_ori_i32(cpu_sr, cpu_sr, (SR_OV | SR_CY));
944 tcg_gen_andi_i32(sr_ove, cpu_sr, SR_OVE);
945 tcg_gen_brcondi_i32(TCG_COND_NE, sr_ove, SR_OVE, lab);
946 gen_exception(dc, EXCP_RANGE);
948 tcg_gen_mov_i32(cpu_R[rd], res);
949 tcg_temp_free_i64(ta);
950 tcg_temp_free_i64(td);
951 tcg_temp_free_i64(tcy);
952 tcg_temp_free_i32(res);
953 tcg_temp_free_i32(sr_cy);
954 tcg_temp_free_i32(sr_ove);
958 case 0x29: /* l.andi */
959 LOG_DIS("l.andi r%d, r%d, %d\n", rd, ra, I16);
960 tcg_gen_andi_tl(cpu_R[rd], cpu_R[ra], zero_extend(I16, 16));
963 case 0x2a: /* l.ori */
964 LOG_DIS("l.ori r%d, r%d, %d\n", rd, ra, I16);
965 tcg_gen_ori_tl(cpu_R[rd], cpu_R[ra], zero_extend(I16, 16));
968 case 0x2b: /* l.xori */
969 LOG_DIS("l.xori r%d, r%d, %d\n", rd, ra, I16);
970 tcg_gen_xori_tl(cpu_R[rd], cpu_R[ra], sign_extend(I16, 16));
973 case 0x2c: /* l.muli */
974 LOG_DIS("l.muli r%d, r%d, %d\n", rd, ra, I16);
975 if (ra != 0 && I16 != 0) {
976 TCGv_i32 im = tcg_const_i32(I16);
977 gen_helper_mul32(cpu_R[rd], cpu_env, cpu_R[ra], im);
978 tcg_temp_free_i32(im);
980 tcg_gen_movi_tl(cpu_R[rd], 0x0);
984 case 0x2d: /* l.mfspr */
985 LOG_DIS("l.mfspr r%d, r%d, %d\n", rd, ra, I16);
987 #if defined(CONFIG_USER_ONLY)
990 TCGv_i32 ti = tcg_const_i32(I16);
991 if (dc->mem_idx == MMU_USER_IDX) {
992 gen_illegal_exception(dc);
995 gen_helper_mfspr(cpu_R[rd], cpu_env, cpu_R[rd], cpu_R[ra], ti);
996 tcg_temp_free_i32(ti);
1001 case 0x30: /* l.mtspr */
1002 LOG_DIS("l.mtspr %d, r%d, r%d, %d\n", I5, ra, rb, I11);
1004 #if defined(CONFIG_USER_ONLY)
1007 TCGv_i32 im = tcg_const_i32(tmp);
1008 if (dc->mem_idx == MMU_USER_IDX) {
1009 gen_illegal_exception(dc);
1012 gen_helper_mtspr(cpu_env, cpu_R[ra], cpu_R[rb], im);
1013 tcg_temp_free_i32(im);
1018 /* not used yet, open it when we need or64. */
1019 /*#ifdef TARGET_OPENRISC64
1021 LOG_DIS("l.sd %d, r%d, r%d, %d\n", I5, ra, rb, I11);
1027 case 0x35: /* l.sw */
1028 LOG_DIS("l.sw %d, r%d, r%d, %d\n", I5, ra, rb, I11);
1032 case 0x36: /* l.sb */
1033 LOG_DIS("l.sb %d, r%d, r%d, %d\n", I5, ra, rb, I11);
1037 case 0x37: /* l.sh */
1038 LOG_DIS("l.sh %d, r%d, r%d, %d\n", I5, ra, rb, I11);
1044 TCGv t0 = tcg_temp_new();
1045 tcg_gen_addi_tl(t0, cpu_R[ra], sign_extend(tmp, 16));
1046 tcg_gen_qemu_st_tl(cpu_R[rb], t0, dc->mem_idx, mop);
1052 gen_illegal_exception(dc);
1057 static void dec_mac(DisasContext *dc, uint32_t insn)
1061 op0 = extract32(insn, 0, 4);
1062 ra = extract32(insn, 16, 5);
1063 rb = extract32(insn, 11, 5);
1066 case 0x0001: /* l.mac */
1067 LOG_DIS("l.mac r%d, r%d\n", ra, rb);
1069 TCGv_i32 t0 = tcg_temp_new_i32();
1070 TCGv_i64 t1 = tcg_temp_new_i64();
1071 TCGv_i64 t2 = tcg_temp_new_i64();
1072 tcg_gen_mul_tl(t0, cpu_R[ra], cpu_R[rb]);
1073 tcg_gen_ext_i32_i64(t1, t0);
1074 tcg_gen_concat_i32_i64(t2, maclo, machi);
1075 tcg_gen_add_i64(t2, t2, t1);
1076 tcg_gen_extrl_i64_i32(maclo, t2);
1077 tcg_gen_shri_i64(t2, t2, 32);
1078 tcg_gen_extrl_i64_i32(machi, t2);
1079 tcg_temp_free_i32(t0);
1080 tcg_temp_free_i64(t1);
1081 tcg_temp_free_i64(t2);
1085 case 0x0002: /* l.msb */
1086 LOG_DIS("l.msb r%d, r%d\n", ra, rb);
1088 TCGv_i32 t0 = tcg_temp_new_i32();
1089 TCGv_i64 t1 = tcg_temp_new_i64();
1090 TCGv_i64 t2 = tcg_temp_new_i64();
1091 tcg_gen_mul_tl(t0, cpu_R[ra], cpu_R[rb]);
1092 tcg_gen_ext_i32_i64(t1, t0);
1093 tcg_gen_concat_i32_i64(t2, maclo, machi);
1094 tcg_gen_sub_i64(t2, t2, t1);
1095 tcg_gen_extrl_i64_i32(maclo, t2);
1096 tcg_gen_shri_i64(t2, t2, 32);
1097 tcg_gen_extrl_i64_i32(machi, t2);
1098 tcg_temp_free_i32(t0);
1099 tcg_temp_free_i64(t1);
1100 tcg_temp_free_i64(t2);
1105 gen_illegal_exception(dc);
1110 static void dec_logic(DisasContext *dc, uint32_t insn)
1113 uint32_t rd, ra, L6;
1114 op0 = extract32(insn, 6, 2);
1115 rd = extract32(insn, 21, 5);
1116 ra = extract32(insn, 16, 5);
1117 L6 = extract32(insn, 0, 6);
1120 case 0x00: /* l.slli */
1121 LOG_DIS("l.slli r%d, r%d, %d\n", rd, ra, L6);
1122 tcg_gen_shli_tl(cpu_R[rd], cpu_R[ra], (L6 & 0x1f));
1125 case 0x01: /* l.srli */
1126 LOG_DIS("l.srli r%d, r%d, %d\n", rd, ra, L6);
1127 tcg_gen_shri_tl(cpu_R[rd], cpu_R[ra], (L6 & 0x1f));
1130 case 0x02: /* l.srai */
1131 LOG_DIS("l.srai r%d, r%d, %d\n", rd, ra, L6);
1132 tcg_gen_sari_tl(cpu_R[rd], cpu_R[ra], (L6 & 0x1f)); break;
1134 case 0x03: /* l.rori */
1135 LOG_DIS("l.rori r%d, r%d, %d\n", rd, ra, L6);
1136 tcg_gen_rotri_tl(cpu_R[rd], cpu_R[ra], (L6 & 0x1f));
1140 gen_illegal_exception(dc);
1145 static void dec_M(DisasContext *dc, uint32_t insn)
1150 op0 = extract32(insn, 16, 1);
1151 rd = extract32(insn, 21, 5);
1152 K16 = extract32(insn, 0, 16);
1155 case 0x0: /* l.movhi */
1156 LOG_DIS("l.movhi r%d, %d\n", rd, K16);
1157 tcg_gen_movi_tl(cpu_R[rd], (K16 << 16));
1160 case 0x1: /* l.macrc */
1161 LOG_DIS("l.macrc r%d\n", rd);
1162 tcg_gen_mov_tl(cpu_R[rd], maclo);
1163 tcg_gen_movi_tl(maclo, 0x0);
1164 tcg_gen_movi_tl(machi, 0x0);
1168 gen_illegal_exception(dc);
1173 static void dec_comp(DisasContext *dc, uint32_t insn)
1178 op0 = extract32(insn, 21, 5);
1179 ra = extract32(insn, 16, 5);
1180 rb = extract32(insn, 11, 5);
1182 tcg_gen_movi_i32(env_btaken, 0x0);
1183 /* unsigned integers */
1184 tcg_gen_ext32u_tl(cpu_R[ra], cpu_R[ra]);
1185 tcg_gen_ext32u_tl(cpu_R[rb], cpu_R[rb]);
1188 case 0x0: /* l.sfeq */
1189 LOG_DIS("l.sfeq r%d, r%d\n", ra, rb);
1190 tcg_gen_setcond_tl(TCG_COND_EQ, env_btaken, cpu_R[ra], cpu_R[rb]);
1193 case 0x1: /* l.sfne */
1194 LOG_DIS("l.sfne r%d, r%d\n", ra, rb);
1195 tcg_gen_setcond_tl(TCG_COND_NE, env_btaken, cpu_R[ra], cpu_R[rb]);
1198 case 0x2: /* l.sfgtu */
1199 LOG_DIS("l.sfgtu r%d, r%d\n", ra, rb);
1200 tcg_gen_setcond_tl(TCG_COND_GTU, env_btaken, cpu_R[ra], cpu_R[rb]);
1203 case 0x3: /* l.sfgeu */
1204 LOG_DIS("l.sfgeu r%d, r%d\n", ra, rb);
1205 tcg_gen_setcond_tl(TCG_COND_GEU, env_btaken, cpu_R[ra], cpu_R[rb]);
1208 case 0x4: /* l.sfltu */
1209 LOG_DIS("l.sfltu r%d, r%d\n", ra, rb);
1210 tcg_gen_setcond_tl(TCG_COND_LTU, env_btaken, cpu_R[ra], cpu_R[rb]);
1213 case 0x5: /* l.sfleu */
1214 LOG_DIS("l.sfleu r%d, r%d\n", ra, rb);
1215 tcg_gen_setcond_tl(TCG_COND_LEU, env_btaken, cpu_R[ra], cpu_R[rb]);
1218 case 0xa: /* l.sfgts */
1219 LOG_DIS("l.sfgts r%d, r%d\n", ra, rb);
1220 tcg_gen_setcond_tl(TCG_COND_GT, env_btaken, cpu_R[ra], cpu_R[rb]);
1223 case 0xb: /* l.sfges */
1224 LOG_DIS("l.sfges r%d, r%d\n", ra, rb);
1225 tcg_gen_setcond_tl(TCG_COND_GE, env_btaken, cpu_R[ra], cpu_R[rb]);
1228 case 0xc: /* l.sflts */
1229 LOG_DIS("l.sflts r%d, r%d\n", ra, rb);
1230 tcg_gen_setcond_tl(TCG_COND_LT, env_btaken, cpu_R[ra], cpu_R[rb]);
1233 case 0xd: /* l.sfles */
1234 LOG_DIS("l.sfles r%d, r%d\n", ra, rb);
1235 tcg_gen_setcond_tl(TCG_COND_LE, env_btaken, cpu_R[ra], cpu_R[rb]);
1239 gen_illegal_exception(dc);
1245 static void dec_compi(DisasContext *dc, uint32_t insn)
1250 op0 = extract32(insn, 21, 5);
1251 ra = extract32(insn, 16, 5);
1252 I16 = extract32(insn, 0, 16);
1254 tcg_gen_movi_i32(env_btaken, 0x0);
1255 I16 = sign_extend(I16, 16);
1258 case 0x0: /* l.sfeqi */
1259 LOG_DIS("l.sfeqi r%d, %d\n", ra, I16);
1260 tcg_gen_setcondi_tl(TCG_COND_EQ, env_btaken, cpu_R[ra], I16);
1263 case 0x1: /* l.sfnei */
1264 LOG_DIS("l.sfnei r%d, %d\n", ra, I16);
1265 tcg_gen_setcondi_tl(TCG_COND_NE, env_btaken, cpu_R[ra], I16);
1268 case 0x2: /* l.sfgtui */
1269 LOG_DIS("l.sfgtui r%d, %d\n", ra, I16);
1270 tcg_gen_setcondi_tl(TCG_COND_GTU, env_btaken, cpu_R[ra], I16);
1273 case 0x3: /* l.sfgeui */
1274 LOG_DIS("l.sfgeui r%d, %d\n", ra, I16);
1275 tcg_gen_setcondi_tl(TCG_COND_GEU, env_btaken, cpu_R[ra], I16);
1278 case 0x4: /* l.sfltui */
1279 LOG_DIS("l.sfltui r%d, %d\n", ra, I16);
1280 tcg_gen_setcondi_tl(TCG_COND_LTU, env_btaken, cpu_R[ra], I16);
1283 case 0x5: /* l.sfleui */
1284 LOG_DIS("l.sfleui r%d, %d\n", ra, I16);
1285 tcg_gen_setcondi_tl(TCG_COND_LEU, env_btaken, cpu_R[ra], I16);
1288 case 0xa: /* l.sfgtsi */
1289 LOG_DIS("l.sfgtsi r%d, %d\n", ra, I16);
1290 tcg_gen_setcondi_tl(TCG_COND_GT, env_btaken, cpu_R[ra], I16);
1293 case 0xb: /* l.sfgesi */
1294 LOG_DIS("l.sfgesi r%d, %d\n", ra, I16);
1295 tcg_gen_setcondi_tl(TCG_COND_GE, env_btaken, cpu_R[ra], I16);
1298 case 0xc: /* l.sfltsi */
1299 LOG_DIS("l.sfltsi r%d, %d\n", ra, I16);
1300 tcg_gen_setcondi_tl(TCG_COND_LT, env_btaken, cpu_R[ra], I16);
1303 case 0xd: /* l.sflesi */
1304 LOG_DIS("l.sflesi r%d, %d\n", ra, I16);
1305 tcg_gen_setcondi_tl(TCG_COND_LE, env_btaken, cpu_R[ra], I16);
1309 gen_illegal_exception(dc);
1315 static void dec_sys(DisasContext *dc, uint32_t insn)
1318 #ifdef OPENRISC_DISAS
1321 op0 = extract32(insn, 16, 10);
1322 #ifdef OPENRISC_DISAS
1323 K16 = extract32(insn, 0, 16);
1327 case 0x000: /* l.sys */
1328 LOG_DIS("l.sys %d\n", K16);
1329 tcg_gen_movi_tl(cpu_pc, dc->pc);
1330 gen_exception(dc, EXCP_SYSCALL);
1331 dc->is_jmp = DISAS_UPDATE;
1334 case 0x100: /* l.trap */
1335 LOG_DIS("l.trap %d\n", K16);
1336 #if defined(CONFIG_USER_ONLY)
1339 if (dc->mem_idx == MMU_USER_IDX) {
1340 gen_illegal_exception(dc);
1343 tcg_gen_movi_tl(cpu_pc, dc->pc);
1344 gen_exception(dc, EXCP_TRAP);
1348 case 0x300: /* l.csync */
1349 LOG_DIS("l.csync\n");
1350 #if defined(CONFIG_USER_ONLY)
1353 if (dc->mem_idx == MMU_USER_IDX) {
1354 gen_illegal_exception(dc);
1360 case 0x200: /* l.msync */
1361 LOG_DIS("l.msync\n");
1362 #if defined(CONFIG_USER_ONLY)
1365 if (dc->mem_idx == MMU_USER_IDX) {
1366 gen_illegal_exception(dc);
1372 case 0x270: /* l.psync */
1373 LOG_DIS("l.psync\n");
1374 #if defined(CONFIG_USER_ONLY)
1377 if (dc->mem_idx == MMU_USER_IDX) {
1378 gen_illegal_exception(dc);
1385 gen_illegal_exception(dc);
1390 static void dec_float(DisasContext *dc, uint32_t insn)
1393 uint32_t ra, rb, rd;
1394 op0 = extract32(insn, 0, 8);
1395 ra = extract32(insn, 16, 5);
1396 rb = extract32(insn, 11, 5);
1397 rd = extract32(insn, 21, 5);
1400 case 0x00: /* lf.add.s */
1401 LOG_DIS("lf.add.s r%d, r%d, r%d\n", rd, ra, rb);
1402 gen_helper_float_add_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1405 case 0x01: /* lf.sub.s */
1406 LOG_DIS("lf.sub.s r%d, r%d, r%d\n", rd, ra, rb);
1407 gen_helper_float_sub_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1411 case 0x02: /* lf.mul.s */
1412 LOG_DIS("lf.mul.s r%d, r%d, r%d\n", rd, ra, rb);
1413 if (ra != 0 && rb != 0) {
1414 gen_helper_float_mul_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1416 tcg_gen_ori_tl(fpcsr, fpcsr, FPCSR_ZF);
1417 tcg_gen_movi_i32(cpu_R[rd], 0x0);
1421 case 0x03: /* lf.div.s */
1422 LOG_DIS("lf.div.s r%d, r%d, r%d\n", rd, ra, rb);
1423 gen_helper_float_div_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1426 case 0x04: /* lf.itof.s */
1427 LOG_DIS("lf.itof r%d, r%d\n", rd, ra);
1428 gen_helper_itofs(cpu_R[rd], cpu_env, cpu_R[ra]);
1431 case 0x05: /* lf.ftoi.s */
1432 LOG_DIS("lf.ftoi r%d, r%d\n", rd, ra);
1433 gen_helper_ftois(cpu_R[rd], cpu_env, cpu_R[ra]);
1436 case 0x06: /* lf.rem.s */
1437 LOG_DIS("lf.rem.s r%d, r%d, r%d\n", rd, ra, rb);
1438 gen_helper_float_rem_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1441 case 0x07: /* lf.madd.s */
1442 LOG_DIS("lf.madd.s r%d, r%d, r%d\n", rd, ra, rb);
1443 gen_helper_float_muladd_s(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1446 case 0x08: /* lf.sfeq.s */
1447 LOG_DIS("lf.sfeq.s r%d, r%d\n", ra, rb);
1448 gen_helper_float_eq_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1451 case 0x09: /* lf.sfne.s */
1452 LOG_DIS("lf.sfne.s r%d, r%d\n", ra, rb);
1453 gen_helper_float_ne_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1456 case 0x0a: /* lf.sfgt.s */
1457 LOG_DIS("lf.sfgt.s r%d, r%d\n", ra, rb);
1458 gen_helper_float_gt_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1461 case 0x0b: /* lf.sfge.s */
1462 LOG_DIS("lf.sfge.s r%d, r%d\n", ra, rb);
1463 gen_helper_float_ge_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1466 case 0x0c: /* lf.sflt.s */
1467 LOG_DIS("lf.sflt.s r%d, r%d\n", ra, rb);
1468 gen_helper_float_lt_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1471 case 0x0d: /* lf.sfle.s */
1472 LOG_DIS("lf.sfle.s r%d, r%d\n", ra, rb);
1473 gen_helper_float_le_s(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1476 /* not used yet, open it when we need or64. */
1477 /*#ifdef TARGET_OPENRISC64
1479 LOG_DIS("lf.add.d r%d, r%d, r%d\n", rd, ra, rb);
1481 gen_helper_float_add_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1485 LOG_DIS("lf.sub.d r%d, r%d, r%d\n", rd, ra, rb);
1487 gen_helper_float_sub_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1491 LOG_DIS("lf.mul.d r%d, r%d, r%d\n", rd, ra, rb);
1493 if (ra != 0 && rb != 0) {
1494 gen_helper_float_mul_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1496 tcg_gen_ori_tl(fpcsr, fpcsr, FPCSR_ZF);
1497 tcg_gen_movi_i64(cpu_R[rd], 0x0);
1502 LOG_DIS("lf.div.d r%d, r%d, r%d\n", rd, ra, rb);
1504 gen_helper_float_div_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1507 case 0x14: lf.itof.d
1508 LOG_DIS("lf.itof r%d, r%d\n", rd, ra);
1510 gen_helper_itofd(cpu_R[rd], cpu_env, cpu_R[ra]);
1513 case 0x15: lf.ftoi.d
1514 LOG_DIS("lf.ftoi r%d, r%d\n", rd, ra);
1516 gen_helper_ftoid(cpu_R[rd], cpu_env, cpu_R[ra]);
1520 LOG_DIS("lf.rem.d r%d, r%d, r%d\n", rd, ra, rb);
1522 gen_helper_float_rem_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1525 case 0x17: lf.madd.d
1526 LOG_DIS("lf.madd.d r%d, r%d, r%d\n", rd, ra, rb);
1528 gen_helper_float_muladd_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1531 case 0x18: lf.sfeq.d
1532 LOG_DIS("lf.sfeq.d r%d, r%d\n", ra, rb);
1534 gen_helper_float_eq_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1537 case 0x1a: lf.sfgt.d
1538 LOG_DIS("lf.sfgt.d r%d, r%d\n", ra, rb);
1540 gen_helper_float_gt_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1543 case 0x1b: lf.sfge.d
1544 LOG_DIS("lf.sfge.d r%d, r%d\n", ra, rb);
1546 gen_helper_float_ge_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1549 case 0x19: lf.sfne.d
1550 LOG_DIS("lf.sfne.d r%d, r%d\n", ra, rb);
1552 gen_helper_float_ne_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1555 case 0x1c: lf.sflt.d
1556 LOG_DIS("lf.sflt.d r%d, r%d\n", ra, rb);
1558 gen_helper_float_lt_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1561 case 0x1d: lf.sfle.d
1562 LOG_DIS("lf.sfle.d r%d, r%d\n", ra, rb);
1564 gen_helper_float_le_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1569 gen_illegal_exception(dc);
1575 static void disas_openrisc_insn(DisasContext *dc, OpenRISCCPU *cpu)
1579 insn = cpu_ldl_code(&cpu->env, dc->pc);
1580 op0 = extract32(insn, 26, 6);
1592 dec_logic(dc, insn);
1596 dec_compi(dc, insn);
1604 dec_float(dc, insn);
1621 static void check_breakpoint(OpenRISCCPU *cpu, DisasContext *dc)
1623 CPUState *cs = CPU(cpu);
1626 if (unlikely(!QTAILQ_EMPTY(&cs->breakpoints))) {
1627 QTAILQ_FOREACH(bp, &cs->breakpoints, entry) {
1628 if (bp->pc == dc->pc) {
1629 tcg_gen_movi_tl(cpu_pc, dc->pc);
1630 gen_exception(dc, EXCP_DEBUG);
1631 dc->is_jmp = DISAS_UPDATE;
1637 static inline void gen_intermediate_code_internal(OpenRISCCPU *cpu,
1638 TranslationBlock *tb,
1641 CPUState *cs = CPU(cpu);
1642 struct DisasContext ctx, *dc = &ctx;
1645 uint32_t next_page_start;
1652 dc->is_jmp = DISAS_NEXT;
1655 dc->flags = cpu->env.cpucfgr;
1656 dc->mem_idx = cpu_mmu_index(&cpu->env);
1657 dc->synced_flags = dc->tb_flags = tb->flags;
1658 dc->delayed_branch = !!(dc->tb_flags & D_FLAG);
1659 dc->singlestep_enabled = cs->singlestep_enabled;
1660 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
1661 qemu_log("-----------------------------------------\n");
1662 log_cpu_state(CPU(cpu), 0);
1665 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
1668 max_insns = tb->cflags & CF_COUNT_MASK;
1670 if (max_insns == 0) {
1671 max_insns = CF_COUNT_MASK;
1677 check_breakpoint(cpu, dc);
1679 j = tcg_op_buf_count();
1683 tcg_ctx.gen_opc_instr_start[k++] = 0;
1686 tcg_ctx.gen_opc_pc[k] = dc->pc;
1687 tcg_ctx.gen_opc_instr_start[k] = 1;
1688 tcg_ctx.gen_opc_icount[k] = num_insns;
1691 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
1692 tcg_gen_debug_insn_start(dc->pc);
1695 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO)) {
1698 dc->ppc = dc->pc - 4;
1699 dc->npc = dc->pc + 4;
1700 tcg_gen_movi_tl(cpu_ppc, dc->ppc);
1701 tcg_gen_movi_tl(cpu_npc, dc->npc);
1702 disas_openrisc_insn(dc, cpu);
1706 if (dc->delayed_branch) {
1707 dc->delayed_branch--;
1708 if (!dc->delayed_branch) {
1709 dc->tb_flags &= ~D_FLAG;
1711 tcg_gen_mov_tl(cpu_pc, jmp_pc);
1712 tcg_gen_mov_tl(cpu_npc, jmp_pc);
1713 tcg_gen_movi_tl(jmp_pc, 0);
1715 dc->is_jmp = DISAS_JUMP;
1719 } while (!dc->is_jmp
1720 && !tcg_op_buf_full()
1721 && !cs->singlestep_enabled
1723 && (dc->pc < next_page_start)
1724 && num_insns < max_insns);
1726 if (tb->cflags & CF_LAST_IO) {
1729 if (dc->is_jmp == DISAS_NEXT) {
1730 dc->is_jmp = DISAS_UPDATE;
1731 tcg_gen_movi_tl(cpu_pc, dc->pc);
1733 if (unlikely(cs->singlestep_enabled)) {
1734 if (dc->is_jmp == DISAS_NEXT) {
1735 tcg_gen_movi_tl(cpu_pc, dc->pc);
1737 gen_exception(dc, EXCP_DEBUG);
1739 switch (dc->is_jmp) {
1741 gen_goto_tb(dc, 0, dc->pc);
1747 /* indicate that the hash table must be used
1748 to find the next TB */
1752 /* nothing more to generate */
1757 gen_tb_end(tb, num_insns);
1760 j = tcg_op_buf_count();
1763 tcg_ctx.gen_opc_instr_start[k++] = 0;
1766 tb->size = dc->pc - pc_start;
1767 tb->icount = num_insns;
1771 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
1773 log_target_disas(cs, pc_start, dc->pc - pc_start, 0);
1774 qemu_log("\nisize=%d osize=%d\n",
1775 dc->pc - pc_start, tcg_op_buf_count());
1780 void gen_intermediate_code(CPUOpenRISCState *env, struct TranslationBlock *tb)
1782 gen_intermediate_code_internal(openrisc_env_get_cpu(env), tb, 0);
1785 void gen_intermediate_code_pc(CPUOpenRISCState *env,
1786 struct TranslationBlock *tb)
1788 gen_intermediate_code_internal(openrisc_env_get_cpu(env), tb, 1);
1791 void openrisc_cpu_dump_state(CPUState *cs, FILE *f,
1792 fprintf_function cpu_fprintf,
1795 OpenRISCCPU *cpu = OPENRISC_CPU(cs);
1796 CPUOpenRISCState *env = &cpu->env;
1799 cpu_fprintf(f, "PC=%08x\n", env->pc);
1800 for (i = 0; i < 32; ++i) {
1801 cpu_fprintf(f, "R%02d=%08x%c", i, env->gpr[i],
1802 (i % 4) == 3 ? '\n' : ' ');
1806 void restore_state_to_opc(CPUOpenRISCState *env, TranslationBlock *tb,
1809 env->pc = tcg_ctx.gen_opc_pc[pc_pos];