2 * CRIS emulation for qemu: main translation routines.
4 * Copyright (c) 2008 AXIS Communications AB
5 * Written by Edgar E. Iglesias.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
23 * The condition code translation is in need of attention.
38 #include "crisv32-decode.h"
39 #include "qemu-common.h"
46 # define LOG_DIS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
48 # define LOG_DIS(...) do { } while (0)
52 #define BUG() (gen_BUG(dc, __FILE__, __LINE__))
53 #define BUG_ON(x) ({if (x) BUG();})
57 /* Used by the decoder. */
58 #define EXTRACT_FIELD(src, start, end) \
59 (((src) >> start) & ((1 << (end - start + 1)) - 1))
61 #define CC_MASK_NZ 0xc
62 #define CC_MASK_NZV 0xe
63 #define CC_MASK_NZVC 0xf
64 #define CC_MASK_RNZV 0x10e
66 static TCGv_ptr cpu_env;
67 static TCGv cpu_R[16];
68 static TCGv cpu_PR[16];
72 static TCGv cc_result;
77 static TCGv env_btaken;
78 static TCGv env_btarget;
81 #include "gen-icount.h"
83 /* This is the state at translation time. */
84 typedef struct DisasContext {
93 unsigned int zsize, zzsize;
102 int cc_size_uptodate; /* -1 invalid or last written value. */
104 int cc_x_uptodate; /* 1 - ccs, 2 - known | X_FLAG. 0 not uptodate. */
105 int flags_uptodate; /* Wether or not $ccs is uptodate. */
106 int flagx_known; /* Wether or not flags_x has the x flag known at
110 int clear_x; /* Clear x after this insn? */
111 int cpustate_changed;
112 unsigned int tb_flags; /* tb dependent flags. */
117 #define JMP_INDIRECT 2
118 int jmp; /* 0=nojmp, 1=direct, 2=indirect. */
123 struct TranslationBlock *tb;
124 int singlestep_enabled;
127 static void gen_BUG(DisasContext *dc, const char *file, int line)
129 printf ("BUG: pc=%x %s %d\n", dc->pc, file, line);
130 qemu_log("BUG: pc=%x %s %d\n", dc->pc, file, line);
131 cpu_abort(dc->env, "%s:%d\n", file, line);
134 static const char *regnames[] =
136 "$r0", "$r1", "$r2", "$r3",
137 "$r4", "$r5", "$r6", "$r7",
138 "$r8", "$r9", "$r10", "$r11",
139 "$r12", "$r13", "$sp", "$acr",
141 static const char *pregnames[] =
143 "$bz", "$vr", "$pid", "$srs",
144 "$wz", "$exs", "$eda", "$mof",
145 "$dz", "$ebp", "$erp", "$srp",
146 "$nrp", "$ccs", "$usp", "$spc",
149 /* We need this table to handle preg-moves with implicit width. */
150 static int preg_sizes[] = {
161 #define t_gen_mov_TN_env(tn, member) \
162 _t_gen_mov_TN_env((tn), offsetof(CPUState, member))
163 #define t_gen_mov_env_TN(member, tn) \
164 _t_gen_mov_env_TN(offsetof(CPUState, member), (tn))
166 static inline void t_gen_mov_TN_reg(TCGv tn, int r)
169 fprintf(stderr, "wrong register read $r%d\n", r);
170 tcg_gen_mov_tl(tn, cpu_R[r]);
172 static inline void t_gen_mov_reg_TN(int r, TCGv tn)
175 fprintf(stderr, "wrong register write $r%d\n", r);
176 tcg_gen_mov_tl(cpu_R[r], tn);
179 static inline void _t_gen_mov_TN_env(TCGv tn, int offset)
181 if (offset > sizeof (CPUState))
182 fprintf(stderr, "wrong load from env from off=%d\n", offset);
183 tcg_gen_ld_tl(tn, cpu_env, offset);
185 static inline void _t_gen_mov_env_TN(int offset, TCGv tn)
187 if (offset > sizeof (CPUState))
188 fprintf(stderr, "wrong store to env at off=%d\n", offset);
189 tcg_gen_st_tl(tn, cpu_env, offset);
192 static inline void t_gen_mov_TN_preg(TCGv tn, int r)
195 fprintf(stderr, "wrong register read $p%d\n", r);
196 if (r == PR_BZ || r == PR_WZ || r == PR_DZ)
197 tcg_gen_mov_tl(tn, tcg_const_tl(0));
199 tcg_gen_mov_tl(tn, tcg_const_tl(32));
200 else if (r == PR_EDA) {
201 printf("read from EDA!\n");
202 tcg_gen_mov_tl(tn, cpu_PR[r]);
205 tcg_gen_mov_tl(tn, cpu_PR[r]);
207 static inline void t_gen_mov_preg_TN(DisasContext *dc, int r, TCGv tn)
210 fprintf(stderr, "wrong register write $p%d\n", r);
211 if (r == PR_BZ || r == PR_WZ || r == PR_DZ)
213 else if (r == PR_SRS)
214 tcg_gen_andi_tl(cpu_PR[r], tn, 3);
217 gen_helper_tlb_flush_pid(tn);
218 if (dc->tb_flags & S_FLAG && r == PR_SPC)
219 gen_helper_spc_write(tn);
220 else if (r == PR_CCS)
221 dc->cpustate_changed = 1;
222 tcg_gen_mov_tl(cpu_PR[r], tn);
226 static inline void t_gen_raise_exception(uint32_t index)
228 TCGv_i32 tmp = tcg_const_i32(index);
229 gen_helper_raise_exception(tmp);
230 tcg_temp_free_i32(tmp);
233 static void t_gen_lsl(TCGv d, TCGv a, TCGv b)
238 t_31 = tcg_const_tl(31);
239 tcg_gen_shl_tl(d, a, b);
241 tcg_gen_sub_tl(t0, t_31, b);
242 tcg_gen_sar_tl(t0, t0, t_31);
243 tcg_gen_and_tl(t0, t0, d);
244 tcg_gen_xor_tl(d, d, t0);
249 static void t_gen_lsr(TCGv d, TCGv a, TCGv b)
254 t_31 = tcg_temp_new();
255 tcg_gen_shr_tl(d, a, b);
257 tcg_gen_movi_tl(t_31, 31);
258 tcg_gen_sub_tl(t0, t_31, b);
259 tcg_gen_sar_tl(t0, t0, t_31);
260 tcg_gen_and_tl(t0, t0, d);
261 tcg_gen_xor_tl(d, d, t0);
266 static void t_gen_asr(TCGv d, TCGv a, TCGv b)
271 t_31 = tcg_temp_new();
272 tcg_gen_sar_tl(d, a, b);
274 tcg_gen_movi_tl(t_31, 31);
275 tcg_gen_sub_tl(t0, t_31, b);
276 tcg_gen_sar_tl(t0, t0, t_31);
277 tcg_gen_or_tl(d, d, t0);
282 /* 64-bit signed mul, lower result in d and upper in d2. */
283 static void t_gen_muls(TCGv d, TCGv d2, TCGv a, TCGv b)
287 t0 = tcg_temp_new_i64();
288 t1 = tcg_temp_new_i64();
290 tcg_gen_ext_i32_i64(t0, a);
291 tcg_gen_ext_i32_i64(t1, b);
292 tcg_gen_mul_i64(t0, t0, t1);
294 tcg_gen_trunc_i64_i32(d, t0);
295 tcg_gen_shri_i64(t0, t0, 32);
296 tcg_gen_trunc_i64_i32(d2, t0);
298 tcg_temp_free_i64(t0);
299 tcg_temp_free_i64(t1);
302 /* 64-bit unsigned muls, lower result in d and upper in d2. */
303 static void t_gen_mulu(TCGv d, TCGv d2, TCGv a, TCGv b)
307 t0 = tcg_temp_new_i64();
308 t1 = tcg_temp_new_i64();
310 tcg_gen_extu_i32_i64(t0, a);
311 tcg_gen_extu_i32_i64(t1, b);
312 tcg_gen_mul_i64(t0, t0, t1);
314 tcg_gen_trunc_i64_i32(d, t0);
315 tcg_gen_shri_i64(t0, t0, 32);
316 tcg_gen_trunc_i64_i32(d2, t0);
318 tcg_temp_free_i64(t0);
319 tcg_temp_free_i64(t1);
322 static void t_gen_cris_dstep(TCGv d, TCGv a, TCGv b)
326 l1 = gen_new_label();
333 tcg_gen_shli_tl(d, a, 1);
334 tcg_gen_brcond_tl(TCG_COND_LTU, d, b, l1);
335 tcg_gen_sub_tl(d, d, b);
339 /* Extended arithmetics on CRIS. */
340 static inline void t_gen_add_flag(TCGv d, int flag)
345 t_gen_mov_TN_preg(c, PR_CCS);
346 /* Propagate carry into d. */
347 tcg_gen_andi_tl(c, c, 1 << flag);
349 tcg_gen_shri_tl(c, c, flag);
350 tcg_gen_add_tl(d, d, c);
354 static inline void t_gen_addx_carry(DisasContext *dc, TCGv d)
356 if (dc->flagx_known) {
361 t_gen_mov_TN_preg(c, PR_CCS);
362 /* C flag is already at bit 0. */
363 tcg_gen_andi_tl(c, c, C_FLAG);
364 tcg_gen_add_tl(d, d, c);
372 t_gen_mov_TN_preg(x, PR_CCS);
373 tcg_gen_mov_tl(c, x);
375 /* Propagate carry into d if X is set. Branch free. */
376 tcg_gen_andi_tl(c, c, C_FLAG);
377 tcg_gen_andi_tl(x, x, X_FLAG);
378 tcg_gen_shri_tl(x, x, 4);
380 tcg_gen_and_tl(x, x, c);
381 tcg_gen_add_tl(d, d, x);
387 static inline void t_gen_subx_carry(DisasContext *dc, TCGv d)
389 if (dc->flagx_known) {
394 t_gen_mov_TN_preg(c, PR_CCS);
395 /* C flag is already at bit 0. */
396 tcg_gen_andi_tl(c, c, C_FLAG);
397 tcg_gen_sub_tl(d, d, c);
405 t_gen_mov_TN_preg(x, PR_CCS);
406 tcg_gen_mov_tl(c, x);
408 /* Propagate carry into d if X is set. Branch free. */
409 tcg_gen_andi_tl(c, c, C_FLAG);
410 tcg_gen_andi_tl(x, x, X_FLAG);
411 tcg_gen_shri_tl(x, x, 4);
413 tcg_gen_and_tl(x, x, c);
414 tcg_gen_sub_tl(d, d, x);
420 /* Swap the two bytes within each half word of the s operand.
421 T0 = ((T0 << 8) & 0xff00ff00) | ((T0 >> 8) & 0x00ff00ff) */
422 static inline void t_gen_swapb(TCGv d, TCGv s)
427 org_s = tcg_temp_new();
429 /* d and s may refer to the same object. */
430 tcg_gen_mov_tl(org_s, s);
431 tcg_gen_shli_tl(t, org_s, 8);
432 tcg_gen_andi_tl(d, t, 0xff00ff00);
433 tcg_gen_shri_tl(t, org_s, 8);
434 tcg_gen_andi_tl(t, t, 0x00ff00ff);
435 tcg_gen_or_tl(d, d, t);
437 tcg_temp_free(org_s);
440 /* Swap the halfwords of the s operand. */
441 static inline void t_gen_swapw(TCGv d, TCGv s)
444 /* d and s refer the same object. */
446 tcg_gen_mov_tl(t, s);
447 tcg_gen_shli_tl(d, t, 16);
448 tcg_gen_shri_tl(t, t, 16);
449 tcg_gen_or_tl(d, d, t);
453 /* Reverse the within each byte.
454 T0 = (((T0 << 7) & 0x80808080) |
455 ((T0 << 5) & 0x40404040) |
456 ((T0 << 3) & 0x20202020) |
457 ((T0 << 1) & 0x10101010) |
458 ((T0 >> 1) & 0x08080808) |
459 ((T0 >> 3) & 0x04040404) |
460 ((T0 >> 5) & 0x02020202) |
461 ((T0 >> 7) & 0x01010101));
463 static inline void t_gen_swapr(TCGv d, TCGv s)
466 int shift; /* LSL when positive, LSR when negative. */
481 /* d and s refer the same object. */
483 org_s = tcg_temp_new();
484 tcg_gen_mov_tl(org_s, s);
486 tcg_gen_shli_tl(t, org_s, bitrev[0].shift);
487 tcg_gen_andi_tl(d, t, bitrev[0].mask);
488 for (i = 1; i < ARRAY_SIZE(bitrev); i++) {
489 if (bitrev[i].shift >= 0) {
490 tcg_gen_shli_tl(t, org_s, bitrev[i].shift);
492 tcg_gen_shri_tl(t, org_s, -bitrev[i].shift);
494 tcg_gen_andi_tl(t, t, bitrev[i].mask);
495 tcg_gen_or_tl(d, d, t);
498 tcg_temp_free(org_s);
501 static void t_gen_cc_jmp(TCGv pc_true, TCGv pc_false)
506 l1 = gen_new_label();
507 btaken = tcg_temp_new();
509 /* Conditional jmp. */
510 tcg_gen_mov_tl(btaken, env_btaken);
511 tcg_gen_mov_tl(env_pc, pc_false);
512 tcg_gen_brcondi_tl(TCG_COND_EQ, btaken, 0, l1);
513 tcg_gen_mov_tl(env_pc, pc_true);
516 tcg_temp_free(btaken);
519 static void gen_goto_tb(DisasContext *dc, int n, target_ulong dest)
521 TranslationBlock *tb;
523 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
525 tcg_gen_movi_tl(env_pc, dest);
526 tcg_gen_exit_tb((long)tb + n);
528 tcg_gen_movi_tl(env_pc, dest);
533 /* Sign extend at translation time. */
534 static int sign_extend(unsigned int val, unsigned int width)
546 static inline void cris_clear_x_flag(DisasContext *dc)
548 if (dc->flagx_known && dc->flags_x)
549 dc->flags_uptodate = 0;
555 static void cris_flush_cc_state(DisasContext *dc)
557 if (dc->cc_size_uptodate != dc->cc_size) {
558 tcg_gen_movi_tl(cc_size, dc->cc_size);
559 dc->cc_size_uptodate = dc->cc_size;
561 tcg_gen_movi_tl(cc_op, dc->cc_op);
562 tcg_gen_movi_tl(cc_mask, dc->cc_mask);
565 static void cris_evaluate_flags(DisasContext *dc)
567 if (dc->flags_uptodate)
570 cris_flush_cc_state(dc);
575 gen_helper_evaluate_flags_mcp(cpu_PR[PR_CCS],
576 cpu_PR[PR_CCS], cc_src,
580 gen_helper_evaluate_flags_muls(cpu_PR[PR_CCS],
581 cpu_PR[PR_CCS], cc_result,
585 gen_helper_evaluate_flags_mulu(cpu_PR[PR_CCS],
586 cpu_PR[PR_CCS], cc_result,
599 gen_helper_evaluate_flags_move_4(cpu_PR[PR_CCS],
600 cpu_PR[PR_CCS], cc_result);
603 gen_helper_evaluate_flags_move_2(cpu_PR[PR_CCS],
604 cpu_PR[PR_CCS], cc_result);
607 gen_helper_evaluate_flags();
616 if (dc->cc_size == 4)
617 gen_helper_evaluate_flags_sub_4(cpu_PR[PR_CCS],
618 cpu_PR[PR_CCS], cc_src, cc_dest, cc_result);
620 gen_helper_evaluate_flags();
627 gen_helper_evaluate_flags_alu_4(cpu_PR[PR_CCS],
628 cpu_PR[PR_CCS], cc_src, cc_dest, cc_result);
631 gen_helper_evaluate_flags();
637 if (dc->flagx_known) {
639 tcg_gen_ori_tl(cpu_PR[PR_CCS],
640 cpu_PR[PR_CCS], X_FLAG);
642 tcg_gen_andi_tl(cpu_PR[PR_CCS],
643 cpu_PR[PR_CCS], ~X_FLAG);
645 dc->flags_uptodate = 1;
648 static void cris_cc_mask(DisasContext *dc, unsigned int mask)
657 /* Check if we need to evaluate the condition codes due to
659 ovl = (dc->cc_mask ^ mask) & ~mask;
661 /* TODO: optimize this case. It trigs all the time. */
662 cris_evaluate_flags (dc);
668 static void cris_update_cc_op(DisasContext *dc, int op, int size)
672 dc->flags_uptodate = 0;
675 static inline void cris_update_cc_x(DisasContext *dc)
677 /* Save the x flag state at the time of the cc snapshot. */
678 if (dc->flagx_known) {
679 if (dc->cc_x_uptodate == (2 | dc->flags_x))
681 tcg_gen_movi_tl(cc_x, dc->flags_x);
682 dc->cc_x_uptodate = 2 | dc->flags_x;
685 tcg_gen_andi_tl(cc_x, cpu_PR[PR_CCS], X_FLAG);
686 dc->cc_x_uptodate = 1;
690 /* Update cc prior to executing ALU op. Needs source operands untouched. */
691 static void cris_pre_alu_update_cc(DisasContext *dc, int op,
692 TCGv dst, TCGv src, int size)
695 cris_update_cc_op(dc, op, size);
696 tcg_gen_mov_tl(cc_src, src);
705 tcg_gen_mov_tl(cc_dest, dst);
707 cris_update_cc_x(dc);
711 /* Update cc after executing ALU op. needs the result. */
712 static inline void cris_update_result(DisasContext *dc, TCGv res)
715 tcg_gen_mov_tl(cc_result, res);
718 /* Returns one if the write back stage should execute. */
719 static void cris_alu_op_exec(DisasContext *dc, int op,
720 TCGv dst, TCGv a, TCGv b, int size)
722 /* Emit the ALU insns. */
726 tcg_gen_add_tl(dst, a, b);
727 /* Extended arithmetics. */
728 t_gen_addx_carry(dc, dst);
731 tcg_gen_add_tl(dst, a, b);
732 t_gen_add_flag(dst, 0); /* C_FLAG. */
735 tcg_gen_add_tl(dst, a, b);
736 t_gen_add_flag(dst, 8); /* R_FLAG. */
739 tcg_gen_sub_tl(dst, a, b);
740 /* Extended arithmetics. */
741 t_gen_subx_carry(dc, dst);
744 tcg_gen_mov_tl(dst, b);
747 tcg_gen_or_tl(dst, a, b);
750 tcg_gen_and_tl(dst, a, b);
753 tcg_gen_xor_tl(dst, a, b);
756 t_gen_lsl(dst, a, b);
759 t_gen_lsr(dst, a, b);
762 t_gen_asr(dst, a, b);
765 tcg_gen_neg_tl(dst, b);
766 /* Extended arithmetics. */
767 t_gen_subx_carry(dc, dst);
770 gen_helper_lz(dst, b);
773 t_gen_muls(dst, cpu_PR[PR_MOF], a, b);
776 t_gen_mulu(dst, cpu_PR[PR_MOF], a, b);
779 t_gen_cris_dstep(dst, a, b);
784 l1 = gen_new_label();
785 tcg_gen_mov_tl(dst, a);
786 tcg_gen_brcond_tl(TCG_COND_LEU, a, b, l1);
787 tcg_gen_mov_tl(dst, b);
792 tcg_gen_sub_tl(dst, a, b);
793 /* Extended arithmetics. */
794 t_gen_subx_carry(dc, dst);
797 qemu_log("illegal ALU op.\n");
803 tcg_gen_andi_tl(dst, dst, 0xff);
805 tcg_gen_andi_tl(dst, dst, 0xffff);
808 static void cris_alu(DisasContext *dc, int op,
809 TCGv d, TCGv op_a, TCGv op_b, int size)
816 if (op == CC_OP_CMP) {
817 tmp = tcg_temp_new();
819 } else if (size == 4) {
823 tmp = tcg_temp_new();
826 cris_pre_alu_update_cc(dc, op, op_a, op_b, size);
827 cris_alu_op_exec(dc, op, tmp, op_a, op_b, size);
828 cris_update_result(dc, tmp);
833 tcg_gen_andi_tl(d, d, ~0xff);
835 tcg_gen_andi_tl(d, d, ~0xffff);
836 tcg_gen_or_tl(d, d, tmp);
838 if (!TCGV_EQUAL(tmp, d))
842 static int arith_cc(DisasContext *dc)
846 case CC_OP_ADDC: return 1;
847 case CC_OP_ADD: return 1;
848 case CC_OP_SUB: return 1;
849 case CC_OP_DSTEP: return 1;
850 case CC_OP_LSL: return 1;
851 case CC_OP_LSR: return 1;
852 case CC_OP_ASR: return 1;
853 case CC_OP_CMP: return 1;
854 case CC_OP_NEG: return 1;
855 case CC_OP_OR: return 1;
856 case CC_OP_AND: return 1;
857 case CC_OP_XOR: return 1;
858 case CC_OP_MULU: return 1;
859 case CC_OP_MULS: return 1;
867 static void gen_tst_cc (DisasContext *dc, TCGv cc, int cond)
869 int arith_opt, move_opt;
871 /* TODO: optimize more condition codes. */
874 * If the flags are live, we've gotta look into the bits of CCS.
875 * Otherwise, if we just did an arithmetic operation we try to
876 * evaluate the condition code faster.
878 * When this function is done, T0 should be non-zero if the condition
881 arith_opt = arith_cc(dc) && !dc->flags_uptodate;
882 move_opt = (dc->cc_op == CC_OP_MOVE);
885 if (arith_opt || move_opt) {
886 /* If cc_result is zero, T0 should be
887 non-zero otherwise T0 should be zero. */
889 l1 = gen_new_label();
890 tcg_gen_movi_tl(cc, 0);
891 tcg_gen_brcondi_tl(TCG_COND_NE, cc_result,
893 tcg_gen_movi_tl(cc, 1);
897 cris_evaluate_flags(dc);
899 cpu_PR[PR_CCS], Z_FLAG);
903 if (arith_opt || move_opt)
904 tcg_gen_mov_tl(cc, cc_result);
906 cris_evaluate_flags(dc);
907 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS],
909 tcg_gen_andi_tl(cc, cc, Z_FLAG);
913 cris_evaluate_flags(dc);
914 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS], C_FLAG);
917 cris_evaluate_flags(dc);
918 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS], C_FLAG);
919 tcg_gen_andi_tl(cc, cc, C_FLAG);
922 cris_evaluate_flags(dc);
923 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS], V_FLAG);
926 cris_evaluate_flags(dc);
927 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS],
929 tcg_gen_andi_tl(cc, cc, V_FLAG);
932 if (arith_opt || move_opt) {
935 if (dc->cc_size == 1)
937 else if (dc->cc_size == 2)
940 tcg_gen_shri_tl(cc, cc_result, bits);
941 tcg_gen_xori_tl(cc, cc, 1);
943 cris_evaluate_flags(dc);
944 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS],
946 tcg_gen_andi_tl(cc, cc, N_FLAG);
950 if (arith_opt || move_opt) {
953 if (dc->cc_size == 1)
955 else if (dc->cc_size == 2)
958 tcg_gen_shri_tl(cc, cc_result, bits);
959 tcg_gen_andi_tl(cc, cc, 1);
962 cris_evaluate_flags(dc);
963 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS],
968 cris_evaluate_flags(dc);
969 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS],
973 cris_evaluate_flags(dc);
977 tmp = tcg_temp_new();
978 tcg_gen_xori_tl(tmp, cpu_PR[PR_CCS],
980 /* Overlay the C flag on top of the Z. */
981 tcg_gen_shli_tl(cc, tmp, 2);
982 tcg_gen_and_tl(cc, tmp, cc);
983 tcg_gen_andi_tl(cc, cc, Z_FLAG);
989 cris_evaluate_flags(dc);
990 /* Overlay the V flag on top of the N. */
991 tcg_gen_shli_tl(cc, cpu_PR[PR_CCS], 2);
994 tcg_gen_andi_tl(cc, cc, N_FLAG);
995 tcg_gen_xori_tl(cc, cc, N_FLAG);
998 cris_evaluate_flags(dc);
999 /* Overlay the V flag on top of the N. */
1000 tcg_gen_shli_tl(cc, cpu_PR[PR_CCS], 2);
1002 cpu_PR[PR_CCS], cc);
1003 tcg_gen_andi_tl(cc, cc, N_FLAG);
1006 cris_evaluate_flags(dc);
1013 /* To avoid a shift we overlay everything on
1015 tcg_gen_shri_tl(n, cpu_PR[PR_CCS], 2);
1016 tcg_gen_shri_tl(z, cpu_PR[PR_CCS], 1);
1018 tcg_gen_xori_tl(z, z, 2);
1020 tcg_gen_xor_tl(n, n, cpu_PR[PR_CCS]);
1021 tcg_gen_xori_tl(n, n, 2);
1022 tcg_gen_and_tl(cc, z, n);
1023 tcg_gen_andi_tl(cc, cc, 2);
1030 cris_evaluate_flags(dc);
1037 /* To avoid a shift we overlay everything on
1039 tcg_gen_shri_tl(n, cpu_PR[PR_CCS], 2);
1040 tcg_gen_shri_tl(z, cpu_PR[PR_CCS], 1);
1042 tcg_gen_xor_tl(n, n, cpu_PR[PR_CCS]);
1043 tcg_gen_or_tl(cc, z, n);
1044 tcg_gen_andi_tl(cc, cc, 2);
1051 cris_evaluate_flags(dc);
1052 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS], P_FLAG);
1055 tcg_gen_movi_tl(cc, 1);
1063 static void cris_store_direct_jmp(DisasContext *dc)
1065 /* Store the direct jmp state into the cpu-state. */
1066 if (dc->jmp == JMP_DIRECT) {
1067 tcg_gen_movi_tl(env_btarget, dc->jmp_pc);
1068 tcg_gen_movi_tl(env_btaken, 1);
1072 static void cris_prepare_cc_branch (DisasContext *dc,
1073 int offset, int cond)
1075 /* This helps us re-schedule the micro-code to insns in delay-slots
1076 before the actual jump. */
1077 dc->delayed_branch = 2;
1078 dc->jmp_pc = dc->pc + offset;
1082 dc->jmp = JMP_INDIRECT;
1083 gen_tst_cc (dc, env_btaken, cond);
1084 tcg_gen_movi_tl(env_btarget, dc->jmp_pc);
1086 /* Allow chaining. */
1087 dc->jmp = JMP_DIRECT;
1092 /* jumps, when the dest is in a live reg for example. Direct should be set
1093 when the dest addr is constant to allow tb chaining. */
1094 static inline void cris_prepare_jmp (DisasContext *dc, unsigned int type)
1096 /* This helps us re-schedule the micro-code to insns in delay-slots
1097 before the actual jump. */
1098 dc->delayed_branch = 2;
1100 if (type == JMP_INDIRECT)
1101 tcg_gen_movi_tl(env_btaken, 1);
1104 static void gen_load64(DisasContext *dc, TCGv_i64 dst, TCGv addr)
1106 int mem_index = cpu_mmu_index(dc->env);
1108 /* If we get a fault on a delayslot we must keep the jmp state in
1109 the cpu-state to be able to re-execute the jmp. */
1110 if (dc->delayed_branch == 1)
1111 cris_store_direct_jmp(dc);
1113 tcg_gen_qemu_ld64(dst, addr, mem_index);
1116 static void gen_load(DisasContext *dc, TCGv dst, TCGv addr,
1117 unsigned int size, int sign)
1119 int mem_index = cpu_mmu_index(dc->env);
1121 /* If we get a fault on a delayslot we must keep the jmp state in
1122 the cpu-state to be able to re-execute the jmp. */
1123 if (dc->delayed_branch == 1)
1124 cris_store_direct_jmp(dc);
1128 tcg_gen_qemu_ld8s(dst, addr, mem_index);
1130 tcg_gen_qemu_ld8u(dst, addr, mem_index);
1132 else if (size == 2) {
1134 tcg_gen_qemu_ld16s(dst, addr, mem_index);
1136 tcg_gen_qemu_ld16u(dst, addr, mem_index);
1138 else if (size == 4) {
1139 tcg_gen_qemu_ld32u(dst, addr, mem_index);
1146 static void gen_store (DisasContext *dc, TCGv addr, TCGv val,
1149 int mem_index = cpu_mmu_index(dc->env);
1151 /* If we get a fault on a delayslot we must keep the jmp state in
1152 the cpu-state to be able to re-execute the jmp. */
1153 if (dc->delayed_branch == 1)
1154 cris_store_direct_jmp(dc);
1157 /* Conditional writes. We only support the kind were X and P are known
1158 at translation time. */
1159 if (dc->flagx_known && dc->flags_x && (dc->tb_flags & P_FLAG)) {
1161 cris_evaluate_flags(dc);
1162 tcg_gen_ori_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], C_FLAG);
1167 tcg_gen_qemu_st8(val, addr, mem_index);
1169 tcg_gen_qemu_st16(val, addr, mem_index);
1171 tcg_gen_qemu_st32(val, addr, mem_index);
1173 if (dc->flagx_known && dc->flags_x) {
1174 cris_evaluate_flags(dc);
1175 tcg_gen_andi_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], ~C_FLAG);
1179 static inline void t_gen_sext(TCGv d, TCGv s, int size)
1182 tcg_gen_ext8s_i32(d, s);
1184 tcg_gen_ext16s_i32(d, s);
1185 else if(!TCGV_EQUAL(d, s))
1186 tcg_gen_mov_tl(d, s);
1189 static inline void t_gen_zext(TCGv d, TCGv s, int size)
1192 tcg_gen_ext8u_i32(d, s);
1194 tcg_gen_ext16u_i32(d, s);
1195 else if (!TCGV_EQUAL(d, s))
1196 tcg_gen_mov_tl(d, s);
1200 static char memsize_char(int size)
1204 case 1: return 'b'; break;
1205 case 2: return 'w'; break;
1206 case 4: return 'd'; break;
1214 static inline unsigned int memsize_z(DisasContext *dc)
1216 return dc->zsize + 1;
1219 static inline unsigned int memsize_zz(DisasContext *dc)
1230 static inline void do_postinc (DisasContext *dc, int size)
1233 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], size);
1236 static inline void dec_prep_move_r(DisasContext *dc, int rs, int rd,
1237 int size, int s_ext, TCGv dst)
1240 t_gen_sext(dst, cpu_R[rs], size);
1242 t_gen_zext(dst, cpu_R[rs], size);
1245 /* Prepare T0 and T1 for a register alu operation.
1246 s_ext decides if the operand1 should be sign-extended or zero-extended when
1248 static void dec_prep_alu_r(DisasContext *dc, int rs, int rd,
1249 int size, int s_ext, TCGv dst, TCGv src)
1251 dec_prep_move_r(dc, rs, rd, size, s_ext, src);
1254 t_gen_sext(dst, cpu_R[rd], size);
1256 t_gen_zext(dst, cpu_R[rd], size);
1259 static int dec_prep_move_m(DisasContext *dc, int s_ext, int memsize,
1262 unsigned int rs, rd;
1269 is_imm = rs == 15 && dc->postinc;
1271 /* Load [$rs] onto T1. */
1273 insn_len = 2 + memsize;
1280 imm = ldsb_code(dc->pc + 2);
1282 imm = ldsw_code(dc->pc + 2);
1285 imm = ldub_code(dc->pc + 2);
1287 imm = lduw_code(dc->pc + 2);
1290 imm = ldl_code(dc->pc + 2);
1292 tcg_gen_movi_tl(dst, imm);
1295 cris_flush_cc_state(dc);
1296 gen_load(dc, dst, cpu_R[rs], memsize, 0);
1298 t_gen_sext(dst, dst, memsize);
1300 t_gen_zext(dst, dst, memsize);
1305 /* Prepare T0 and T1 for a memory + alu operation.
1306 s_ext decides if the operand1 should be sign-extended or zero-extended when
1308 static int dec_prep_alu_m(DisasContext *dc, int s_ext, int memsize,
1313 insn_len = dec_prep_move_m(dc, s_ext, memsize, src);
1314 tcg_gen_mov_tl(dst, cpu_R[dc->op2]);
1319 static const char *cc_name(int cc)
1321 static const char *cc_names[16] = {
1322 "cc", "cs", "ne", "eq", "vc", "vs", "pl", "mi",
1323 "ls", "hi", "ge", "lt", "gt", "le", "a", "p"
1326 return cc_names[cc];
1330 /* Start of insn decoders. */
1332 static unsigned int dec_bccq(DisasContext *dc)
1336 uint32_t cond = dc->op2;
1339 offset = EXTRACT_FIELD (dc->ir, 1, 7);
1340 sign = EXTRACT_FIELD(dc->ir, 0, 0);
1343 offset |= sign << 8;
1345 offset = sign_extend(offset, 8);
1347 LOG_DIS("b%s %x\n", cc_name(cond), dc->pc + offset);
1349 /* op2 holds the condition-code. */
1350 cris_cc_mask(dc, 0);
1351 cris_prepare_cc_branch (dc, offset, cond);
1354 static unsigned int dec_addoq(DisasContext *dc)
1358 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 7);
1359 imm = sign_extend(dc->op1, 7);
1361 LOG_DIS("addoq %d, $r%u\n", imm, dc->op2);
1362 cris_cc_mask(dc, 0);
1363 /* Fetch register operand, */
1364 tcg_gen_addi_tl(cpu_R[R_ACR], cpu_R[dc->op2], imm);
1368 static unsigned int dec_addq(DisasContext *dc)
1370 LOG_DIS("addq %u, $r%u\n", dc->op1, dc->op2);
1372 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1374 cris_cc_mask(dc, CC_MASK_NZVC);
1376 cris_alu(dc, CC_OP_ADD,
1377 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(dc->op1), 4);
1380 static unsigned int dec_moveq(DisasContext *dc)
1384 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1385 imm = sign_extend(dc->op1, 5);
1386 LOG_DIS("moveq %d, $r%u\n", imm, dc->op2);
1388 tcg_gen_mov_tl(cpu_R[dc->op2], tcg_const_tl(imm));
1391 static unsigned int dec_subq(DisasContext *dc)
1393 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1395 LOG_DIS("subq %u, $r%u\n", dc->op1, dc->op2);
1397 cris_cc_mask(dc, CC_MASK_NZVC);
1398 cris_alu(dc, CC_OP_SUB,
1399 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(dc->op1), 4);
1402 static unsigned int dec_cmpq(DisasContext *dc)
1405 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1406 imm = sign_extend(dc->op1, 5);
1408 LOG_DIS("cmpq %d, $r%d\n", imm, dc->op2);
1409 cris_cc_mask(dc, CC_MASK_NZVC);
1411 cris_alu(dc, CC_OP_CMP,
1412 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(imm), 4);
1415 static unsigned int dec_andq(DisasContext *dc)
1418 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1419 imm = sign_extend(dc->op1, 5);
1421 LOG_DIS("andq %d, $r%d\n", imm, dc->op2);
1422 cris_cc_mask(dc, CC_MASK_NZ);
1424 cris_alu(dc, CC_OP_AND,
1425 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(imm), 4);
1428 static unsigned int dec_orq(DisasContext *dc)
1431 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1432 imm = sign_extend(dc->op1, 5);
1433 LOG_DIS("orq %d, $r%d\n", imm, dc->op2);
1434 cris_cc_mask(dc, CC_MASK_NZ);
1436 cris_alu(dc, CC_OP_OR,
1437 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(imm), 4);
1440 static unsigned int dec_btstq(DisasContext *dc)
1442 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1443 LOG_DIS("btstq %u, $r%d\n", dc->op1, dc->op2);
1445 cris_cc_mask(dc, CC_MASK_NZ);
1446 cris_evaluate_flags(dc);
1447 gen_helper_btst(cpu_PR[PR_CCS], cpu_R[dc->op2],
1448 tcg_const_tl(dc->op1), cpu_PR[PR_CCS]);
1449 cris_alu(dc, CC_OP_MOVE,
1450 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op2], 4);
1451 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
1452 dc->flags_uptodate = 1;
1455 static unsigned int dec_asrq(DisasContext *dc)
1457 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1458 LOG_DIS("asrq %u, $r%d\n", dc->op1, dc->op2);
1459 cris_cc_mask(dc, CC_MASK_NZ);
1461 tcg_gen_sari_tl(cpu_R[dc->op2], cpu_R[dc->op2], dc->op1);
1462 cris_alu(dc, CC_OP_MOVE,
1464 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1467 static unsigned int dec_lslq(DisasContext *dc)
1469 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1470 LOG_DIS("lslq %u, $r%d\n", dc->op1, dc->op2);
1472 cris_cc_mask(dc, CC_MASK_NZ);
1474 tcg_gen_shli_tl(cpu_R[dc->op2], cpu_R[dc->op2], dc->op1);
1476 cris_alu(dc, CC_OP_MOVE,
1478 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1481 static unsigned int dec_lsrq(DisasContext *dc)
1483 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1484 LOG_DIS("lsrq %u, $r%d\n", dc->op1, dc->op2);
1486 cris_cc_mask(dc, CC_MASK_NZ);
1488 tcg_gen_shri_tl(cpu_R[dc->op2], cpu_R[dc->op2], dc->op1);
1489 cris_alu(dc, CC_OP_MOVE,
1491 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1495 static unsigned int dec_move_r(DisasContext *dc)
1497 int size = memsize_zz(dc);
1499 LOG_DIS("move.%c $r%u, $r%u\n",
1500 memsize_char(size), dc->op1, dc->op2);
1502 cris_cc_mask(dc, CC_MASK_NZ);
1504 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, cpu_R[dc->op2]);
1505 cris_cc_mask(dc, CC_MASK_NZ);
1506 cris_update_cc_op(dc, CC_OP_MOVE, 4);
1507 cris_update_cc_x(dc);
1508 cris_update_result(dc, cpu_R[dc->op2]);
1513 t0 = tcg_temp_new();
1514 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, t0);
1515 cris_alu(dc, CC_OP_MOVE,
1517 cpu_R[dc->op2], t0, size);
1523 static unsigned int dec_scc_r(DisasContext *dc)
1527 LOG_DIS("s%s $r%u\n",
1528 cc_name(cond), dc->op1);
1534 gen_tst_cc (dc, cpu_R[dc->op1], cond);
1535 l1 = gen_new_label();
1536 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_R[dc->op1], 0, l1);
1537 tcg_gen_movi_tl(cpu_R[dc->op1], 1);
1541 tcg_gen_movi_tl(cpu_R[dc->op1], 1);
1543 cris_cc_mask(dc, 0);
1547 static inline void cris_alu_alloc_temps(DisasContext *dc, int size, TCGv *t)
1550 t[0] = cpu_R[dc->op2];
1551 t[1] = cpu_R[dc->op1];
1553 t[0] = tcg_temp_new();
1554 t[1] = tcg_temp_new();
1558 static inline void cris_alu_free_temps(DisasContext *dc, int size, TCGv *t)
1561 tcg_temp_free(t[0]);
1562 tcg_temp_free(t[1]);
1566 static unsigned int dec_and_r(DisasContext *dc)
1569 int size = memsize_zz(dc);
1571 LOG_DIS("and.%c $r%u, $r%u\n",
1572 memsize_char(size), dc->op1, dc->op2);
1574 cris_cc_mask(dc, CC_MASK_NZ);
1576 cris_alu_alloc_temps(dc, size, t);
1577 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1578 cris_alu(dc, CC_OP_AND, cpu_R[dc->op2], t[0], t[1], size);
1579 cris_alu_free_temps(dc, size, t);
1583 static unsigned int dec_lz_r(DisasContext *dc)
1586 LOG_DIS("lz $r%u, $r%u\n",
1588 cris_cc_mask(dc, CC_MASK_NZ);
1589 t0 = tcg_temp_new();
1590 dec_prep_alu_r(dc, dc->op1, dc->op2, 4, 0, cpu_R[dc->op2], t0);
1591 cris_alu(dc, CC_OP_LZ, cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1596 static unsigned int dec_lsl_r(DisasContext *dc)
1599 int size = memsize_zz(dc);
1601 LOG_DIS("lsl.%c $r%u, $r%u\n",
1602 memsize_char(size), dc->op1, dc->op2);
1604 cris_cc_mask(dc, CC_MASK_NZ);
1605 cris_alu_alloc_temps(dc, size, t);
1606 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1607 tcg_gen_andi_tl(t[1], t[1], 63);
1608 cris_alu(dc, CC_OP_LSL, cpu_R[dc->op2], t[0], t[1], size);
1609 cris_alu_alloc_temps(dc, size, t);
1613 static unsigned int dec_lsr_r(DisasContext *dc)
1616 int size = memsize_zz(dc);
1618 LOG_DIS("lsr.%c $r%u, $r%u\n",
1619 memsize_char(size), dc->op1, dc->op2);
1621 cris_cc_mask(dc, CC_MASK_NZ);
1622 cris_alu_alloc_temps(dc, size, t);
1623 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1624 tcg_gen_andi_tl(t[1], t[1], 63);
1625 cris_alu(dc, CC_OP_LSR, cpu_R[dc->op2], t[0], t[1], size);
1626 cris_alu_free_temps(dc, size, t);
1630 static unsigned int dec_asr_r(DisasContext *dc)
1633 int size = memsize_zz(dc);
1635 LOG_DIS("asr.%c $r%u, $r%u\n",
1636 memsize_char(size), dc->op1, dc->op2);
1638 cris_cc_mask(dc, CC_MASK_NZ);
1639 cris_alu_alloc_temps(dc, size, t);
1640 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 1, t[0], t[1]);
1641 tcg_gen_andi_tl(t[1], t[1], 63);
1642 cris_alu(dc, CC_OP_ASR, cpu_R[dc->op2], t[0], t[1], size);
1643 cris_alu_free_temps(dc, size, t);
1647 static unsigned int dec_muls_r(DisasContext *dc)
1650 int size = memsize_zz(dc);
1652 LOG_DIS("muls.%c $r%u, $r%u\n",
1653 memsize_char(size), dc->op1, dc->op2);
1654 cris_cc_mask(dc, CC_MASK_NZV);
1655 cris_alu_alloc_temps(dc, size, t);
1656 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 1, t[0], t[1]);
1658 cris_alu(dc, CC_OP_MULS, cpu_R[dc->op2], t[0], t[1], 4);
1659 cris_alu_free_temps(dc, size, t);
1663 static unsigned int dec_mulu_r(DisasContext *dc)
1666 int size = memsize_zz(dc);
1668 LOG_DIS("mulu.%c $r%u, $r%u\n",
1669 memsize_char(size), dc->op1, dc->op2);
1670 cris_cc_mask(dc, CC_MASK_NZV);
1671 cris_alu_alloc_temps(dc, size, t);
1672 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1674 cris_alu(dc, CC_OP_MULU, cpu_R[dc->op2], t[0], t[1], 4);
1675 cris_alu_alloc_temps(dc, size, t);
1680 static unsigned int dec_dstep_r(DisasContext *dc)
1682 LOG_DIS("dstep $r%u, $r%u\n", dc->op1, dc->op2);
1683 cris_cc_mask(dc, CC_MASK_NZ);
1684 cris_alu(dc, CC_OP_DSTEP,
1685 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op1], 4);
1689 static unsigned int dec_xor_r(DisasContext *dc)
1692 int size = memsize_zz(dc);
1693 LOG_DIS("xor.%c $r%u, $r%u\n",
1694 memsize_char(size), dc->op1, dc->op2);
1695 BUG_ON(size != 4); /* xor is dword. */
1696 cris_cc_mask(dc, CC_MASK_NZ);
1697 cris_alu_alloc_temps(dc, size, t);
1698 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1700 cris_alu(dc, CC_OP_XOR, cpu_R[dc->op2], t[0], t[1], 4);
1701 cris_alu_free_temps(dc, size, t);
1705 static unsigned int dec_bound_r(DisasContext *dc)
1708 int size = memsize_zz(dc);
1709 LOG_DIS("bound.%c $r%u, $r%u\n",
1710 memsize_char(size), dc->op1, dc->op2);
1711 cris_cc_mask(dc, CC_MASK_NZ);
1712 l0 = tcg_temp_local_new();
1713 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, l0);
1714 cris_alu(dc, CC_OP_BOUND, cpu_R[dc->op2], cpu_R[dc->op2], l0, 4);
1719 static unsigned int dec_cmp_r(DisasContext *dc)
1722 int size = memsize_zz(dc);
1723 LOG_DIS("cmp.%c $r%u, $r%u\n",
1724 memsize_char(size), dc->op1, dc->op2);
1725 cris_cc_mask(dc, CC_MASK_NZVC);
1726 cris_alu_alloc_temps(dc, size, t);
1727 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1729 cris_alu(dc, CC_OP_CMP, cpu_R[dc->op2], t[0], t[1], size);
1730 cris_alu_free_temps(dc, size, t);
1734 static unsigned int dec_abs_r(DisasContext *dc)
1738 LOG_DIS("abs $r%u, $r%u\n",
1740 cris_cc_mask(dc, CC_MASK_NZ);
1742 t0 = tcg_temp_new();
1743 tcg_gen_sari_tl(t0, cpu_R[dc->op1], 31);
1744 tcg_gen_xor_tl(cpu_R[dc->op2], cpu_R[dc->op1], t0);
1745 tcg_gen_sub_tl(cpu_R[dc->op2], cpu_R[dc->op2], t0);
1748 cris_alu(dc, CC_OP_MOVE,
1749 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op2], 4);
1753 static unsigned int dec_add_r(DisasContext *dc)
1756 int size = memsize_zz(dc);
1757 LOG_DIS("add.%c $r%u, $r%u\n",
1758 memsize_char(size), dc->op1, dc->op2);
1759 cris_cc_mask(dc, CC_MASK_NZVC);
1760 cris_alu_alloc_temps(dc, size, t);
1761 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1763 cris_alu(dc, CC_OP_ADD, cpu_R[dc->op2], t[0], t[1], size);
1764 cris_alu_free_temps(dc, size, t);
1768 static unsigned int dec_addc_r(DisasContext *dc)
1770 LOG_DIS("addc $r%u, $r%u\n",
1772 cris_evaluate_flags(dc);
1773 /* Set for this insn. */
1774 dc->flagx_known = 1;
1775 dc->flags_x = X_FLAG;
1777 cris_cc_mask(dc, CC_MASK_NZVC);
1778 cris_alu(dc, CC_OP_ADDC,
1779 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op1], 4);
1783 static unsigned int dec_mcp_r(DisasContext *dc)
1785 LOG_DIS("mcp $p%u, $r%u\n",
1787 cris_evaluate_flags(dc);
1788 cris_cc_mask(dc, CC_MASK_RNZV);
1789 cris_alu(dc, CC_OP_MCP,
1790 cpu_R[dc->op1], cpu_R[dc->op1], cpu_PR[dc->op2], 4);
1795 static char * swapmode_name(int mode, char *modename) {
1798 modename[i++] = 'n';
1800 modename[i++] = 'w';
1802 modename[i++] = 'b';
1804 modename[i++] = 'r';
1810 static unsigned int dec_swap_r(DisasContext *dc)
1816 LOG_DIS("swap%s $r%u\n",
1817 swapmode_name(dc->op2, modename), dc->op1);
1819 cris_cc_mask(dc, CC_MASK_NZ);
1820 t0 = tcg_temp_new();
1821 t_gen_mov_TN_reg(t0, dc->op1);
1823 tcg_gen_not_tl(t0, t0);
1825 t_gen_swapw(t0, t0);
1827 t_gen_swapb(t0, t0);
1829 t_gen_swapr(t0, t0);
1830 cris_alu(dc, CC_OP_MOVE,
1831 cpu_R[dc->op1], cpu_R[dc->op1], t0, 4);
1836 static unsigned int dec_or_r(DisasContext *dc)
1839 int size = memsize_zz(dc);
1840 LOG_DIS("or.%c $r%u, $r%u\n",
1841 memsize_char(size), dc->op1, dc->op2);
1842 cris_cc_mask(dc, CC_MASK_NZ);
1843 cris_alu_alloc_temps(dc, size, t);
1844 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1845 cris_alu(dc, CC_OP_OR, cpu_R[dc->op2], t[0], t[1], size);
1846 cris_alu_free_temps(dc, size, t);
1850 static unsigned int dec_addi_r(DisasContext *dc)
1853 LOG_DIS("addi.%c $r%u, $r%u\n",
1854 memsize_char(memsize_zz(dc)), dc->op2, dc->op1);
1855 cris_cc_mask(dc, 0);
1856 t0 = tcg_temp_new();
1857 tcg_gen_shl_tl(t0, cpu_R[dc->op2], tcg_const_tl(dc->zzsize));
1858 tcg_gen_add_tl(cpu_R[dc->op1], cpu_R[dc->op1], t0);
1863 static unsigned int dec_addi_acr(DisasContext *dc)
1866 LOG_DIS("addi.%c $r%u, $r%u, $acr\n",
1867 memsize_char(memsize_zz(dc)), dc->op2, dc->op1);
1868 cris_cc_mask(dc, 0);
1869 t0 = tcg_temp_new();
1870 tcg_gen_shl_tl(t0, cpu_R[dc->op2], tcg_const_tl(dc->zzsize));
1871 tcg_gen_add_tl(cpu_R[R_ACR], cpu_R[dc->op1], t0);
1876 static unsigned int dec_neg_r(DisasContext *dc)
1879 int size = memsize_zz(dc);
1880 LOG_DIS("neg.%c $r%u, $r%u\n",
1881 memsize_char(size), dc->op1, dc->op2);
1882 cris_cc_mask(dc, CC_MASK_NZVC);
1883 cris_alu_alloc_temps(dc, size, t);
1884 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1886 cris_alu(dc, CC_OP_NEG, cpu_R[dc->op2], t[0], t[1], size);
1887 cris_alu_free_temps(dc, size, t);
1891 static unsigned int dec_btst_r(DisasContext *dc)
1893 LOG_DIS("btst $r%u, $r%u\n",
1895 cris_cc_mask(dc, CC_MASK_NZ);
1896 cris_evaluate_flags(dc);
1897 gen_helper_btst(cpu_PR[PR_CCS], cpu_R[dc->op2],
1898 cpu_R[dc->op1], cpu_PR[PR_CCS]);
1899 cris_alu(dc, CC_OP_MOVE, cpu_R[dc->op2],
1900 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1901 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
1902 dc->flags_uptodate = 1;
1906 static unsigned int dec_sub_r(DisasContext *dc)
1909 int size = memsize_zz(dc);
1910 LOG_DIS("sub.%c $r%u, $r%u\n",
1911 memsize_char(size), dc->op1, dc->op2);
1912 cris_cc_mask(dc, CC_MASK_NZVC);
1913 cris_alu_alloc_temps(dc, size, t);
1914 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1915 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], t[0], t[1], size);
1916 cris_alu_free_temps(dc, size, t);
1920 /* Zero extension. From size to dword. */
1921 static unsigned int dec_movu_r(DisasContext *dc)
1924 int size = memsize_z(dc);
1925 LOG_DIS("movu.%c $r%u, $r%u\n",
1929 cris_cc_mask(dc, CC_MASK_NZ);
1930 t0 = tcg_temp_new();
1931 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, t0);
1932 cris_alu(dc, CC_OP_MOVE, cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1937 /* Sign extension. From size to dword. */
1938 static unsigned int dec_movs_r(DisasContext *dc)
1941 int size = memsize_z(dc);
1942 LOG_DIS("movs.%c $r%u, $r%u\n",
1946 cris_cc_mask(dc, CC_MASK_NZ);
1947 t0 = tcg_temp_new();
1948 /* Size can only be qi or hi. */
1949 t_gen_sext(t0, cpu_R[dc->op1], size);
1950 cris_alu(dc, CC_OP_MOVE,
1951 cpu_R[dc->op2], cpu_R[dc->op1], t0, 4);
1956 /* zero extension. From size to dword. */
1957 static unsigned int dec_addu_r(DisasContext *dc)
1960 int size = memsize_z(dc);
1961 LOG_DIS("addu.%c $r%u, $r%u\n",
1965 cris_cc_mask(dc, CC_MASK_NZVC);
1966 t0 = tcg_temp_new();
1967 /* Size can only be qi or hi. */
1968 t_gen_zext(t0, cpu_R[dc->op1], size);
1969 cris_alu(dc, CC_OP_ADD,
1970 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1975 /* Sign extension. From size to dword. */
1976 static unsigned int dec_adds_r(DisasContext *dc)
1979 int size = memsize_z(dc);
1980 LOG_DIS("adds.%c $r%u, $r%u\n",
1984 cris_cc_mask(dc, CC_MASK_NZVC);
1985 t0 = tcg_temp_new();
1986 /* Size can only be qi or hi. */
1987 t_gen_sext(t0, cpu_R[dc->op1], size);
1988 cris_alu(dc, CC_OP_ADD,
1989 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1994 /* Zero extension. From size to dword. */
1995 static unsigned int dec_subu_r(DisasContext *dc)
1998 int size = memsize_z(dc);
1999 LOG_DIS("subu.%c $r%u, $r%u\n",
2003 cris_cc_mask(dc, CC_MASK_NZVC);
2004 t0 = tcg_temp_new();
2005 /* Size can only be qi or hi. */
2006 t_gen_zext(t0, cpu_R[dc->op1], size);
2007 cris_alu(dc, CC_OP_SUB,
2008 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
2013 /* Sign extension. From size to dword. */
2014 static unsigned int dec_subs_r(DisasContext *dc)
2017 int size = memsize_z(dc);
2018 LOG_DIS("subs.%c $r%u, $r%u\n",
2022 cris_cc_mask(dc, CC_MASK_NZVC);
2023 t0 = tcg_temp_new();
2024 /* Size can only be qi or hi. */
2025 t_gen_sext(t0, cpu_R[dc->op1], size);
2026 cris_alu(dc, CC_OP_SUB,
2027 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
2032 static unsigned int dec_setclrf(DisasContext *dc)
2035 int set = (~dc->opcode >> 2) & 1;
2038 flags = (EXTRACT_FIELD(dc->ir, 12, 15) << 4)
2039 | EXTRACT_FIELD(dc->ir, 0, 3);
2040 if (set && flags == 0) {
2043 } else if (!set && (flags & 0x20)) {
2048 set ? "set" : "clr",
2052 /* User space is not allowed to touch these. Silently ignore. */
2053 if (dc->tb_flags & U_FLAG) {
2054 flags &= ~(S_FLAG | I_FLAG | U_FLAG);
2057 if (flags & X_FLAG) {
2058 dc->flagx_known = 1;
2060 dc->flags_x = X_FLAG;
2065 /* Break the TB if the P flag changes. */
2066 if (flags & P_FLAG) {
2067 if ((set && !(dc->tb_flags & P_FLAG))
2068 || (!set && (dc->tb_flags & P_FLAG))) {
2069 tcg_gen_movi_tl(env_pc, dc->pc + 2);
2070 dc->is_jmp = DISAS_UPDATE;
2071 dc->cpustate_changed = 1;
2074 if (flags & S_FLAG) {
2075 dc->cpustate_changed = 1;
2079 /* Simply decode the flags. */
2080 cris_evaluate_flags (dc);
2081 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
2082 cris_update_cc_x(dc);
2083 tcg_gen_movi_tl(cc_op, dc->cc_op);
2086 if (!(dc->tb_flags & U_FLAG) && (flags & U_FLAG)) {
2087 /* Enter user mode. */
2088 t_gen_mov_env_TN(ksp, cpu_R[R_SP]);
2089 tcg_gen_mov_tl(cpu_R[R_SP], cpu_PR[PR_USP]);
2090 dc->cpustate_changed = 1;
2092 tcg_gen_ori_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], flags);
2095 tcg_gen_andi_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], ~flags);
2097 dc->flags_uptodate = 1;
2102 static unsigned int dec_move_rs(DisasContext *dc)
2104 LOG_DIS("move $r%u, $s%u\n", dc->op1, dc->op2);
2105 cris_cc_mask(dc, 0);
2106 gen_helper_movl_sreg_reg(tcg_const_tl(dc->op2), tcg_const_tl(dc->op1));
2109 static unsigned int dec_move_sr(DisasContext *dc)
2111 LOG_DIS("move $s%u, $r%u\n", dc->op2, dc->op1);
2112 cris_cc_mask(dc, 0);
2113 gen_helper_movl_reg_sreg(tcg_const_tl(dc->op1), tcg_const_tl(dc->op2));
2117 static unsigned int dec_move_rp(DisasContext *dc)
2120 LOG_DIS("move $r%u, $p%u\n", dc->op1, dc->op2);
2121 cris_cc_mask(dc, 0);
2123 t[0] = tcg_temp_new();
2124 if (dc->op2 == PR_CCS) {
2125 cris_evaluate_flags(dc);
2126 t_gen_mov_TN_reg(t[0], dc->op1);
2127 if (dc->tb_flags & U_FLAG) {
2128 t[1] = tcg_temp_new();
2129 /* User space is not allowed to touch all flags. */
2130 tcg_gen_andi_tl(t[0], t[0], 0x39f);
2131 tcg_gen_andi_tl(t[1], cpu_PR[PR_CCS], ~0x39f);
2132 tcg_gen_or_tl(t[0], t[1], t[0]);
2133 tcg_temp_free(t[1]);
2137 t_gen_mov_TN_reg(t[0], dc->op1);
2139 t_gen_mov_preg_TN(dc, dc->op2, t[0]);
2140 if (dc->op2 == PR_CCS) {
2141 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
2142 dc->flags_uptodate = 1;
2144 tcg_temp_free(t[0]);
2147 static unsigned int dec_move_pr(DisasContext *dc)
2150 LOG_DIS("move $p%u, $r%u\n", dc->op1, dc->op2);
2151 cris_cc_mask(dc, 0);
2153 if (dc->op2 == PR_CCS)
2154 cris_evaluate_flags(dc);
2156 t0 = tcg_temp_new();
2157 t_gen_mov_TN_preg(t0, dc->op2);
2158 cris_alu(dc, CC_OP_MOVE,
2159 cpu_R[dc->op1], cpu_R[dc->op1], t0, preg_sizes[dc->op2]);
2164 static unsigned int dec_move_mr(DisasContext *dc)
2166 int memsize = memsize_zz(dc);
2168 LOG_DIS("move.%c [$r%u%s, $r%u\n",
2169 memsize_char(memsize),
2170 dc->op1, dc->postinc ? "+]" : "]",
2174 insn_len = dec_prep_move_m(dc, 0, 4, cpu_R[dc->op2]);
2175 cris_cc_mask(dc, CC_MASK_NZ);
2176 cris_update_cc_op(dc, CC_OP_MOVE, 4);
2177 cris_update_cc_x(dc);
2178 cris_update_result(dc, cpu_R[dc->op2]);
2183 t0 = tcg_temp_new();
2184 insn_len = dec_prep_move_m(dc, 0, memsize, t0);
2185 cris_cc_mask(dc, CC_MASK_NZ);
2186 cris_alu(dc, CC_OP_MOVE,
2187 cpu_R[dc->op2], cpu_R[dc->op2], t0, memsize);
2190 do_postinc(dc, memsize);
2194 static inline void cris_alu_m_alloc_temps(TCGv *t)
2196 t[0] = tcg_temp_new();
2197 t[1] = tcg_temp_new();
2200 static inline void cris_alu_m_free_temps(TCGv *t)
2202 tcg_temp_free(t[0]);
2203 tcg_temp_free(t[1]);
2206 static unsigned int dec_movs_m(DisasContext *dc)
2209 int memsize = memsize_z(dc);
2211 LOG_DIS("movs.%c [$r%u%s, $r%u\n",
2212 memsize_char(memsize),
2213 dc->op1, dc->postinc ? "+]" : "]",
2216 cris_alu_m_alloc_temps(t);
2218 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2219 cris_cc_mask(dc, CC_MASK_NZ);
2220 cris_alu(dc, CC_OP_MOVE,
2221 cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2222 do_postinc(dc, memsize);
2223 cris_alu_m_free_temps(t);
2227 static unsigned int dec_addu_m(DisasContext *dc)
2230 int memsize = memsize_z(dc);
2232 LOG_DIS("addu.%c [$r%u%s, $r%u\n",
2233 memsize_char(memsize),
2234 dc->op1, dc->postinc ? "+]" : "]",
2237 cris_alu_m_alloc_temps(t);
2239 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2240 cris_cc_mask(dc, CC_MASK_NZVC);
2241 cris_alu(dc, CC_OP_ADD,
2242 cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2243 do_postinc(dc, memsize);
2244 cris_alu_m_free_temps(t);
2248 static unsigned int dec_adds_m(DisasContext *dc)
2251 int memsize = memsize_z(dc);
2253 LOG_DIS("adds.%c [$r%u%s, $r%u\n",
2254 memsize_char(memsize),
2255 dc->op1, dc->postinc ? "+]" : "]",
2258 cris_alu_m_alloc_temps(t);
2260 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2261 cris_cc_mask(dc, CC_MASK_NZVC);
2262 cris_alu(dc, CC_OP_ADD, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2263 do_postinc(dc, memsize);
2264 cris_alu_m_free_temps(t);
2268 static unsigned int dec_subu_m(DisasContext *dc)
2271 int memsize = memsize_z(dc);
2273 LOG_DIS("subu.%c [$r%u%s, $r%u\n",
2274 memsize_char(memsize),
2275 dc->op1, dc->postinc ? "+]" : "]",
2278 cris_alu_m_alloc_temps(t);
2280 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2281 cris_cc_mask(dc, CC_MASK_NZVC);
2282 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2283 do_postinc(dc, memsize);
2284 cris_alu_m_free_temps(t);
2288 static unsigned int dec_subs_m(DisasContext *dc)
2291 int memsize = memsize_z(dc);
2293 LOG_DIS("subs.%c [$r%u%s, $r%u\n",
2294 memsize_char(memsize),
2295 dc->op1, dc->postinc ? "+]" : "]",
2298 cris_alu_m_alloc_temps(t);
2300 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2301 cris_cc_mask(dc, CC_MASK_NZVC);
2302 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2303 do_postinc(dc, memsize);
2304 cris_alu_m_free_temps(t);
2308 static unsigned int dec_movu_m(DisasContext *dc)
2311 int memsize = memsize_z(dc);
2314 LOG_DIS("movu.%c [$r%u%s, $r%u\n",
2315 memsize_char(memsize),
2316 dc->op1, dc->postinc ? "+]" : "]",
2319 cris_alu_m_alloc_temps(t);
2320 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2321 cris_cc_mask(dc, CC_MASK_NZ);
2322 cris_alu(dc, CC_OP_MOVE, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2323 do_postinc(dc, memsize);
2324 cris_alu_m_free_temps(t);
2328 static unsigned int dec_cmpu_m(DisasContext *dc)
2331 int memsize = memsize_z(dc);
2333 LOG_DIS("cmpu.%c [$r%u%s, $r%u\n",
2334 memsize_char(memsize),
2335 dc->op1, dc->postinc ? "+]" : "]",
2338 cris_alu_m_alloc_temps(t);
2339 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2340 cris_cc_mask(dc, CC_MASK_NZVC);
2341 cris_alu(dc, CC_OP_CMP, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2342 do_postinc(dc, memsize);
2343 cris_alu_m_free_temps(t);
2347 static unsigned int dec_cmps_m(DisasContext *dc)
2350 int memsize = memsize_z(dc);
2352 LOG_DIS("cmps.%c [$r%u%s, $r%u\n",
2353 memsize_char(memsize),
2354 dc->op1, dc->postinc ? "+]" : "]",
2357 cris_alu_m_alloc_temps(t);
2358 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2359 cris_cc_mask(dc, CC_MASK_NZVC);
2360 cris_alu(dc, CC_OP_CMP,
2361 cpu_R[dc->op2], cpu_R[dc->op2], t[1],
2363 do_postinc(dc, memsize);
2364 cris_alu_m_free_temps(t);
2368 static unsigned int dec_cmp_m(DisasContext *dc)
2371 int memsize = memsize_zz(dc);
2373 LOG_DIS("cmp.%c [$r%u%s, $r%u\n",
2374 memsize_char(memsize),
2375 dc->op1, dc->postinc ? "+]" : "]",
2378 cris_alu_m_alloc_temps(t);
2379 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2380 cris_cc_mask(dc, CC_MASK_NZVC);
2381 cris_alu(dc, CC_OP_CMP,
2382 cpu_R[dc->op2], cpu_R[dc->op2], t[1],
2384 do_postinc(dc, memsize);
2385 cris_alu_m_free_temps(t);
2389 static unsigned int dec_test_m(DisasContext *dc)
2392 int memsize = memsize_zz(dc);
2394 LOG_DIS("test.%c [$r%u%s] op2=%x\n",
2395 memsize_char(memsize),
2396 dc->op1, dc->postinc ? "+]" : "]",
2399 cris_evaluate_flags(dc);
2401 cris_alu_m_alloc_temps(t);
2402 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2403 cris_cc_mask(dc, CC_MASK_NZ);
2404 tcg_gen_andi_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], ~3);
2406 cris_alu(dc, CC_OP_CMP,
2407 cpu_R[dc->op2], t[1], tcg_const_tl(0), memsize_zz(dc));
2408 do_postinc(dc, memsize);
2409 cris_alu_m_free_temps(t);
2413 static unsigned int dec_and_m(DisasContext *dc)
2416 int memsize = memsize_zz(dc);
2418 LOG_DIS("and.%c [$r%u%s, $r%u\n",
2419 memsize_char(memsize),
2420 dc->op1, dc->postinc ? "+]" : "]",
2423 cris_alu_m_alloc_temps(t);
2424 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2425 cris_cc_mask(dc, CC_MASK_NZ);
2426 cris_alu(dc, CC_OP_AND, cpu_R[dc->op2], t[0], t[1], memsize_zz(dc));
2427 do_postinc(dc, memsize);
2428 cris_alu_m_free_temps(t);
2432 static unsigned int dec_add_m(DisasContext *dc)
2435 int memsize = memsize_zz(dc);
2437 LOG_DIS("add.%c [$r%u%s, $r%u\n",
2438 memsize_char(memsize),
2439 dc->op1, dc->postinc ? "+]" : "]",
2442 cris_alu_m_alloc_temps(t);
2443 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2444 cris_cc_mask(dc, CC_MASK_NZVC);
2445 cris_alu(dc, CC_OP_ADD,
2446 cpu_R[dc->op2], t[0], t[1], memsize_zz(dc));
2447 do_postinc(dc, memsize);
2448 cris_alu_m_free_temps(t);
2452 static unsigned int dec_addo_m(DisasContext *dc)
2455 int memsize = memsize_zz(dc);
2457 LOG_DIS("add.%c [$r%u%s, $r%u\n",
2458 memsize_char(memsize),
2459 dc->op1, dc->postinc ? "+]" : "]",
2462 cris_alu_m_alloc_temps(t);
2463 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2464 cris_cc_mask(dc, 0);
2465 cris_alu(dc, CC_OP_ADD, cpu_R[R_ACR], t[0], t[1], 4);
2466 do_postinc(dc, memsize);
2467 cris_alu_m_free_temps(t);
2471 static unsigned int dec_bound_m(DisasContext *dc)
2474 int memsize = memsize_zz(dc);
2476 LOG_DIS("bound.%c [$r%u%s, $r%u\n",
2477 memsize_char(memsize),
2478 dc->op1, dc->postinc ? "+]" : "]",
2481 l[0] = tcg_temp_local_new();
2482 l[1] = tcg_temp_local_new();
2483 insn_len = dec_prep_alu_m(dc, 0, memsize, l[0], l[1]);
2484 cris_cc_mask(dc, CC_MASK_NZ);
2485 cris_alu(dc, CC_OP_BOUND, cpu_R[dc->op2], l[0], l[1], 4);
2486 do_postinc(dc, memsize);
2487 tcg_temp_free(l[0]);
2488 tcg_temp_free(l[1]);
2492 static unsigned int dec_addc_mr(DisasContext *dc)
2496 LOG_DIS("addc [$r%u%s, $r%u\n",
2497 dc->op1, dc->postinc ? "+]" : "]",
2500 cris_evaluate_flags(dc);
2502 /* Set for this insn. */
2503 dc->flagx_known = 1;
2504 dc->flags_x = X_FLAG;
2506 cris_alu_m_alloc_temps(t);
2507 insn_len = dec_prep_alu_m(dc, 0, 4, t[0], t[1]);
2508 cris_cc_mask(dc, CC_MASK_NZVC);
2509 cris_alu(dc, CC_OP_ADDC, cpu_R[dc->op2], t[0], t[1], 4);
2511 cris_alu_m_free_temps(t);
2515 static unsigned int dec_sub_m(DisasContext *dc)
2518 int memsize = memsize_zz(dc);
2520 LOG_DIS("sub.%c [$r%u%s, $r%u ir=%x zz=%x\n",
2521 memsize_char(memsize),
2522 dc->op1, dc->postinc ? "+]" : "]",
2523 dc->op2, dc->ir, dc->zzsize);
2525 cris_alu_m_alloc_temps(t);
2526 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2527 cris_cc_mask(dc, CC_MASK_NZVC);
2528 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], t[0], t[1], memsize);
2529 do_postinc(dc, memsize);
2530 cris_alu_m_free_temps(t);
2534 static unsigned int dec_or_m(DisasContext *dc)
2537 int memsize = memsize_zz(dc);
2539 LOG_DIS("or.%c [$r%u%s, $r%u pc=%x\n",
2540 memsize_char(memsize),
2541 dc->op1, dc->postinc ? "+]" : "]",
2544 cris_alu_m_alloc_temps(t);
2545 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2546 cris_cc_mask(dc, CC_MASK_NZ);
2547 cris_alu(dc, CC_OP_OR,
2548 cpu_R[dc->op2], t[0], t[1], memsize_zz(dc));
2549 do_postinc(dc, memsize);
2550 cris_alu_m_free_temps(t);
2554 static unsigned int dec_move_mp(DisasContext *dc)
2557 int memsize = memsize_zz(dc);
2560 LOG_DIS("move.%c [$r%u%s, $p%u\n",
2561 memsize_char(memsize),
2563 dc->postinc ? "+]" : "]",
2566 cris_alu_m_alloc_temps(t);
2567 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2568 cris_cc_mask(dc, 0);
2569 if (dc->op2 == PR_CCS) {
2570 cris_evaluate_flags(dc);
2571 if (dc->tb_flags & U_FLAG) {
2572 /* User space is not allowed to touch all flags. */
2573 tcg_gen_andi_tl(t[1], t[1], 0x39f);
2574 tcg_gen_andi_tl(t[0], cpu_PR[PR_CCS], ~0x39f);
2575 tcg_gen_or_tl(t[1], t[0], t[1]);
2579 t_gen_mov_preg_TN(dc, dc->op2, t[1]);
2581 do_postinc(dc, memsize);
2582 cris_alu_m_free_temps(t);
2586 static unsigned int dec_move_pm(DisasContext *dc)
2591 memsize = preg_sizes[dc->op2];
2593 LOG_DIS("move.%c $p%u, [$r%u%s\n",
2594 memsize_char(memsize),
2595 dc->op2, dc->op1, dc->postinc ? "+]" : "]");
2597 /* prepare store. Address in T0, value in T1. */
2598 if (dc->op2 == PR_CCS)
2599 cris_evaluate_flags(dc);
2600 t0 = tcg_temp_new();
2601 t_gen_mov_TN_preg(t0, dc->op2);
2602 cris_flush_cc_state(dc);
2603 gen_store(dc, cpu_R[dc->op1], t0, memsize);
2606 cris_cc_mask(dc, 0);
2608 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], memsize);
2612 static unsigned int dec_movem_mr(DisasContext *dc)
2618 int nr = dc->op2 + 1;
2620 LOG_DIS("movem [$r%u%s, $r%u\n", dc->op1,
2621 dc->postinc ? "+]" : "]", dc->op2);
2623 addr = tcg_temp_new();
2624 /* There are probably better ways of doing this. */
2625 cris_flush_cc_state(dc);
2626 for (i = 0; i < (nr >> 1); i++) {
2627 tmp[i] = tcg_temp_new_i64();
2628 tcg_gen_addi_tl(addr, cpu_R[dc->op1], i * 8);
2629 gen_load64(dc, tmp[i], addr);
2632 tmp32 = tcg_temp_new_i32();
2633 tcg_gen_addi_tl(addr, cpu_R[dc->op1], i * 8);
2634 gen_load(dc, tmp32, addr, 4, 0);
2637 tcg_temp_free(addr);
2639 for (i = 0; i < (nr >> 1); i++) {
2640 tcg_gen_trunc_i64_i32(cpu_R[i * 2], tmp[i]);
2641 tcg_gen_shri_i64(tmp[i], tmp[i], 32);
2642 tcg_gen_trunc_i64_i32(cpu_R[i * 2 + 1], tmp[i]);
2643 tcg_temp_free_i64(tmp[i]);
2646 tcg_gen_mov_tl(cpu_R[dc->op2], tmp32);
2647 tcg_temp_free(tmp32);
2650 /* writeback the updated pointer value. */
2652 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], nr * 4);
2654 /* gen_load might want to evaluate the previous insns flags. */
2655 cris_cc_mask(dc, 0);
2659 static unsigned int dec_movem_rm(DisasContext *dc)
2665 LOG_DIS("movem $r%u, [$r%u%s\n", dc->op2, dc->op1,
2666 dc->postinc ? "+]" : "]");
2668 cris_flush_cc_state(dc);
2670 tmp = tcg_temp_new();
2671 addr = tcg_temp_new();
2672 tcg_gen_movi_tl(tmp, 4);
2673 tcg_gen_mov_tl(addr, cpu_R[dc->op1]);
2674 for (i = 0; i <= dc->op2; i++) {
2675 /* Displace addr. */
2676 /* Perform the store. */
2677 gen_store(dc, addr, cpu_R[i], 4);
2678 tcg_gen_add_tl(addr, addr, tmp);
2681 tcg_gen_mov_tl(cpu_R[dc->op1], addr);
2682 cris_cc_mask(dc, 0);
2684 tcg_temp_free(addr);
2688 static unsigned int dec_move_rm(DisasContext *dc)
2692 memsize = memsize_zz(dc);
2694 LOG_DIS("move.%c $r%u, [$r%u]\n",
2695 memsize_char(memsize), dc->op2, dc->op1);
2697 /* prepare store. */
2698 cris_flush_cc_state(dc);
2699 gen_store(dc, cpu_R[dc->op1], cpu_R[dc->op2], memsize);
2702 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], memsize);
2703 cris_cc_mask(dc, 0);
2707 static unsigned int dec_lapcq(DisasContext *dc)
2709 LOG_DIS("lapcq %x, $r%u\n",
2710 dc->pc + dc->op1*2, dc->op2);
2711 cris_cc_mask(dc, 0);
2712 tcg_gen_movi_tl(cpu_R[dc->op2], dc->pc + dc->op1 * 2);
2716 static unsigned int dec_lapc_im(DisasContext *dc)
2724 cris_cc_mask(dc, 0);
2725 imm = ldl_code(dc->pc + 2);
2726 LOG_DIS("lapc 0x%x, $r%u\n", imm + dc->pc, dc->op2);
2730 t_gen_mov_reg_TN(rd, tcg_const_tl(pc));
2734 /* Jump to special reg. */
2735 static unsigned int dec_jump_p(DisasContext *dc)
2737 LOG_DIS("jump $p%u\n", dc->op2);
2739 if (dc->op2 == PR_CCS)
2740 cris_evaluate_flags(dc);
2741 t_gen_mov_TN_preg(env_btarget, dc->op2);
2742 /* rete will often have low bit set to indicate delayslot. */
2743 tcg_gen_andi_tl(env_btarget, env_btarget, ~1);
2744 cris_cc_mask(dc, 0);
2745 cris_prepare_jmp(dc, JMP_INDIRECT);
2749 /* Jump and save. */
2750 static unsigned int dec_jas_r(DisasContext *dc)
2752 LOG_DIS("jas $r%u, $p%u\n", dc->op1, dc->op2);
2753 cris_cc_mask(dc, 0);
2754 /* Store the return address in Pd. */
2755 tcg_gen_mov_tl(env_btarget, cpu_R[dc->op1]);
2758 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 4));
2760 cris_prepare_jmp(dc, JMP_INDIRECT);
2764 static unsigned int dec_jas_im(DisasContext *dc)
2768 imm = ldl_code(dc->pc + 2);
2770 LOG_DIS("jas 0x%x\n", imm);
2771 cris_cc_mask(dc, 0);
2772 /* Store the return address in Pd. */
2773 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 8));
2776 cris_prepare_jmp(dc, JMP_DIRECT);
2780 static unsigned int dec_jasc_im(DisasContext *dc)
2784 imm = ldl_code(dc->pc + 2);
2786 LOG_DIS("jasc 0x%x\n", imm);
2787 cris_cc_mask(dc, 0);
2788 /* Store the return address in Pd. */
2789 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 8 + 4));
2792 cris_prepare_jmp(dc, JMP_DIRECT);
2796 static unsigned int dec_jasc_r(DisasContext *dc)
2798 LOG_DIS("jasc_r $r%u, $p%u\n", dc->op1, dc->op2);
2799 cris_cc_mask(dc, 0);
2800 /* Store the return address in Pd. */
2801 tcg_gen_mov_tl(env_btarget, cpu_R[dc->op1]);
2802 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 4 + 4));
2803 cris_prepare_jmp(dc, JMP_INDIRECT);
2807 static unsigned int dec_bcc_im(DisasContext *dc)
2810 uint32_t cond = dc->op2;
2812 offset = ldsw_code(dc->pc + 2);
2814 LOG_DIS("b%s %d pc=%x dst=%x\n",
2815 cc_name(cond), offset,
2816 dc->pc, dc->pc + offset);
2818 cris_cc_mask(dc, 0);
2819 /* op2 holds the condition-code. */
2820 cris_prepare_cc_branch (dc, offset, cond);
2824 static unsigned int dec_bas_im(DisasContext *dc)
2829 simm = ldl_code(dc->pc + 2);
2831 LOG_DIS("bas 0x%x, $p%u\n", dc->pc + simm, dc->op2);
2832 cris_cc_mask(dc, 0);
2833 /* Store the return address in Pd. */
2834 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 8));
2836 dc->jmp_pc = dc->pc + simm;
2837 cris_prepare_jmp(dc, JMP_DIRECT);
2841 static unsigned int dec_basc_im(DisasContext *dc)
2844 simm = ldl_code(dc->pc + 2);
2846 LOG_DIS("basc 0x%x, $p%u\n", dc->pc + simm, dc->op2);
2847 cris_cc_mask(dc, 0);
2848 /* Store the return address in Pd. */
2849 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 12));
2851 dc->jmp_pc = dc->pc + simm;
2852 cris_prepare_jmp(dc, JMP_DIRECT);
2856 static unsigned int dec_rfe_etc(DisasContext *dc)
2858 cris_cc_mask(dc, 0);
2860 if (dc->op2 == 15) {
2861 t_gen_mov_env_TN(halted, tcg_const_tl(1));
2862 tcg_gen_movi_tl(env_pc, dc->pc + 2);
2863 t_gen_raise_exception(EXCP_HLT);
2867 switch (dc->op2 & 7) {
2871 cris_evaluate_flags(dc);
2873 dc->is_jmp = DISAS_UPDATE;
2878 cris_evaluate_flags(dc);
2880 dc->is_jmp = DISAS_UPDATE;
2883 LOG_DIS("break %d\n", dc->op1);
2884 cris_evaluate_flags (dc);
2886 tcg_gen_movi_tl(env_pc, dc->pc + 2);
2888 /* Breaks start at 16 in the exception vector. */
2889 t_gen_mov_env_TN(trap_vector,
2890 tcg_const_tl(dc->op1 + 16));
2891 t_gen_raise_exception(EXCP_BREAK);
2892 dc->is_jmp = DISAS_UPDATE;
2895 printf ("op2=%x\n", dc->op2);
2903 static unsigned int dec_ftag_fidx_d_m(DisasContext *dc)
2908 static unsigned int dec_ftag_fidx_i_m(DisasContext *dc)
2913 static unsigned int dec_null(DisasContext *dc)
2915 printf ("unknown insn pc=%x opc=%x op1=%x op2=%x\n",
2916 dc->pc, dc->opcode, dc->op1, dc->op2);
2922 static struct decoder_info {
2927 unsigned int (*dec)(DisasContext *dc);
2929 /* Order matters here. */
2930 {DEC_MOVEQ, dec_moveq},
2931 {DEC_BTSTQ, dec_btstq},
2932 {DEC_CMPQ, dec_cmpq},
2933 {DEC_ADDOQ, dec_addoq},
2934 {DEC_ADDQ, dec_addq},
2935 {DEC_SUBQ, dec_subq},
2936 {DEC_ANDQ, dec_andq},
2938 {DEC_ASRQ, dec_asrq},
2939 {DEC_LSLQ, dec_lslq},
2940 {DEC_LSRQ, dec_lsrq},
2941 {DEC_BCCQ, dec_bccq},
2943 {DEC_BCC_IM, dec_bcc_im},
2944 {DEC_JAS_IM, dec_jas_im},
2945 {DEC_JAS_R, dec_jas_r},
2946 {DEC_JASC_IM, dec_jasc_im},
2947 {DEC_JASC_R, dec_jasc_r},
2948 {DEC_BAS_IM, dec_bas_im},
2949 {DEC_BASC_IM, dec_basc_im},
2950 {DEC_JUMP_P, dec_jump_p},
2951 {DEC_LAPC_IM, dec_lapc_im},
2952 {DEC_LAPCQ, dec_lapcq},
2954 {DEC_RFE_ETC, dec_rfe_etc},
2955 {DEC_ADDC_MR, dec_addc_mr},
2957 {DEC_MOVE_MP, dec_move_mp},
2958 {DEC_MOVE_PM, dec_move_pm},
2959 {DEC_MOVEM_MR, dec_movem_mr},
2960 {DEC_MOVEM_RM, dec_movem_rm},
2961 {DEC_MOVE_PR, dec_move_pr},
2962 {DEC_SCC_R, dec_scc_r},
2963 {DEC_SETF, dec_setclrf},
2964 {DEC_CLEARF, dec_setclrf},
2966 {DEC_MOVE_SR, dec_move_sr},
2967 {DEC_MOVE_RP, dec_move_rp},
2968 {DEC_SWAP_R, dec_swap_r},
2969 {DEC_ABS_R, dec_abs_r},
2970 {DEC_LZ_R, dec_lz_r},
2971 {DEC_MOVE_RS, dec_move_rs},
2972 {DEC_BTST_R, dec_btst_r},
2973 {DEC_ADDC_R, dec_addc_r},
2975 {DEC_DSTEP_R, dec_dstep_r},
2976 {DEC_XOR_R, dec_xor_r},
2977 {DEC_MCP_R, dec_mcp_r},
2978 {DEC_CMP_R, dec_cmp_r},
2980 {DEC_ADDI_R, dec_addi_r},
2981 {DEC_ADDI_ACR, dec_addi_acr},
2983 {DEC_ADD_R, dec_add_r},
2984 {DEC_SUB_R, dec_sub_r},
2986 {DEC_ADDU_R, dec_addu_r},
2987 {DEC_ADDS_R, dec_adds_r},
2988 {DEC_SUBU_R, dec_subu_r},
2989 {DEC_SUBS_R, dec_subs_r},
2990 {DEC_LSL_R, dec_lsl_r},
2992 {DEC_AND_R, dec_and_r},
2993 {DEC_OR_R, dec_or_r},
2994 {DEC_BOUND_R, dec_bound_r},
2995 {DEC_ASR_R, dec_asr_r},
2996 {DEC_LSR_R, dec_lsr_r},
2998 {DEC_MOVU_R, dec_movu_r},
2999 {DEC_MOVS_R, dec_movs_r},
3000 {DEC_NEG_R, dec_neg_r},
3001 {DEC_MOVE_R, dec_move_r},
3003 {DEC_FTAG_FIDX_I_M, dec_ftag_fidx_i_m},
3004 {DEC_FTAG_FIDX_D_M, dec_ftag_fidx_d_m},
3006 {DEC_MULS_R, dec_muls_r},
3007 {DEC_MULU_R, dec_mulu_r},
3009 {DEC_ADDU_M, dec_addu_m},
3010 {DEC_ADDS_M, dec_adds_m},
3011 {DEC_SUBU_M, dec_subu_m},
3012 {DEC_SUBS_M, dec_subs_m},
3014 {DEC_CMPU_M, dec_cmpu_m},
3015 {DEC_CMPS_M, dec_cmps_m},
3016 {DEC_MOVU_M, dec_movu_m},
3017 {DEC_MOVS_M, dec_movs_m},
3019 {DEC_CMP_M, dec_cmp_m},
3020 {DEC_ADDO_M, dec_addo_m},
3021 {DEC_BOUND_M, dec_bound_m},
3022 {DEC_ADD_M, dec_add_m},
3023 {DEC_SUB_M, dec_sub_m},
3024 {DEC_AND_M, dec_and_m},
3025 {DEC_OR_M, dec_or_m},
3026 {DEC_MOVE_RM, dec_move_rm},
3027 {DEC_TEST_M, dec_test_m},
3028 {DEC_MOVE_MR, dec_move_mr},
3033 static inline unsigned int
3034 cris_decoder(DisasContext *dc)
3036 unsigned int insn_len = 2;
3039 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
3040 tcg_gen_debug_insn_start(dc->pc);
3042 /* Load a halfword onto the instruction register. */
3043 dc->ir = lduw_code(dc->pc);
3045 /* Now decode it. */
3046 dc->opcode = EXTRACT_FIELD(dc->ir, 4, 11);
3047 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 3);
3048 dc->op2 = EXTRACT_FIELD(dc->ir, 12, 15);
3049 dc->zsize = EXTRACT_FIELD(dc->ir, 4, 4);
3050 dc->zzsize = EXTRACT_FIELD(dc->ir, 4, 5);
3051 dc->postinc = EXTRACT_FIELD(dc->ir, 10, 10);
3053 /* Large switch for all insns. */
3054 for (i = 0; i < ARRAY_SIZE(decinfo); i++) {
3055 if ((dc->opcode & decinfo[i].mask) == decinfo[i].bits)
3057 insn_len = decinfo[i].dec(dc);
3062 #if !defined(CONFIG_USER_ONLY)
3063 /* Single-stepping ? */
3064 if (dc->tb_flags & S_FLAG) {
3067 l1 = gen_new_label();
3068 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_PR[PR_SPC], dc->pc, l1);
3069 /* We treat SPC as a break with an odd trap vector. */
3070 cris_evaluate_flags (dc);
3071 t_gen_mov_env_TN(trap_vector, tcg_const_tl(3));
3072 tcg_gen_movi_tl(env_pc, dc->pc + insn_len);
3073 tcg_gen_movi_tl(cpu_PR[PR_SPC], dc->pc + insn_len);
3074 t_gen_raise_exception(EXCP_BREAK);
3081 static void check_breakpoint(CPUState *env, DisasContext *dc)
3085 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
3086 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
3087 if (bp->pc == dc->pc) {
3088 cris_evaluate_flags (dc);
3089 tcg_gen_movi_tl(env_pc, dc->pc);
3090 t_gen_raise_exception(EXCP_DEBUG);
3091 dc->is_jmp = DISAS_UPDATE;
3099 * Delay slots on QEMU/CRIS.
3101 * If an exception hits on a delayslot, the core will let ERP (the Exception
3102 * Return Pointer) point to the branch (the previous) insn and set the lsb to
3103 * to give SW a hint that the exception actually hit on the dslot.
3105 * CRIS expects all PC addresses to be 16-bit aligned. The lsb is ignored by
3106 * the core and any jmp to an odd addresses will mask off that lsb. It is
3107 * simply there to let sw know there was an exception on a dslot.
3109 * When the software returns from an exception, the branch will re-execute.
3110 * On QEMU care needs to be taken when a branch+delayslot sequence is broken
3111 * and the branch and delayslot dont share pages.
3113 * The TB contaning the branch insn will set up env->btarget and evaluate
3114 * env->btaken. When the translation loop exits we will note that the branch
3115 * sequence is broken and let env->dslot be the size of the branch insn (those
3118 * The TB contaning the delayslot will have the PC of its real insn (i.e no lsb
3119 * set). It will also expect to have env->dslot setup with the size of the
3120 * delay slot so that env->pc - env->dslot point to the branch insn. This TB
3121 * will execute the dslot and take the branch, either to btarget or just one
3124 * When exceptions occur, we check for env->dslot in do_interrupt to detect
3125 * broken branch sequences and setup $erp accordingly (i.e let it point to the
3126 * branch and set lsb). Then env->dslot gets cleared so that the exception
3127 * handler can enter. When returning from exceptions (jump $erp) the lsb gets
3128 * masked off and we will reexecute the branch insn.
3132 /* generate intermediate code for basic block 'tb'. */
3134 gen_intermediate_code_internal(CPUState *env, TranslationBlock *tb,
3137 uint16_t *gen_opc_end;
3139 unsigned int insn_len;
3141 struct DisasContext ctx;
3142 struct DisasContext *dc = &ctx;
3143 uint32_t next_page_start;
3148 qemu_log_try_set_file(stderr);
3150 /* Odd PC indicates that branch is rexecuting due to exception in the
3151 * delayslot, like in real hw.
3153 pc_start = tb->pc & ~1;
3157 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
3159 dc->is_jmp = DISAS_NEXT;
3162 dc->singlestep_enabled = env->singlestep_enabled;
3163 dc->flags_uptodate = 1;
3164 dc->flagx_known = 1;
3165 dc->flags_x = tb->flags & X_FLAG;
3166 dc->cc_x_uptodate = 0;
3170 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
3171 dc->cc_size_uptodate = -1;
3173 /* Decode TB flags. */
3174 dc->tb_flags = tb->flags & (S_FLAG | P_FLAG | U_FLAG | X_FLAG);
3175 dc->delayed_branch = !!(tb->flags & 7);
3176 if (dc->delayed_branch)
3177 dc->jmp = JMP_INDIRECT;
3179 dc->jmp = JMP_NOJMP;
3181 dc->cpustate_changed = 0;
3183 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
3185 "srch=%d pc=%x %x flg=%llx bt=%x ds=%u ccs=%x\n"
3191 search_pc, dc->pc, dc->ppc,
3192 (unsigned long long)tb->flags,
3193 env->btarget, (unsigned)tb->flags & 7,
3195 env->pregs[PR_PID], env->pregs[PR_USP],
3196 env->regs[0], env->regs[1], env->regs[2], env->regs[3],
3197 env->regs[4], env->regs[5], env->regs[6], env->regs[7],
3198 env->regs[8], env->regs[9],
3199 env->regs[10], env->regs[11],
3200 env->regs[12], env->regs[13],
3201 env->regs[14], env->regs[15]);
3202 qemu_log("--------------\n");
3203 qemu_log("IN: %s\n", lookup_symbol(pc_start));
3206 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
3209 max_insns = tb->cflags & CF_COUNT_MASK;
3211 max_insns = CF_COUNT_MASK;
3216 check_breakpoint(env, dc);
3219 j = gen_opc_ptr - gen_opc_buf;
3223 gen_opc_instr_start[lj++] = 0;
3225 if (dc->delayed_branch == 1)
3226 gen_opc_pc[lj] = dc->ppc | 1;
3228 gen_opc_pc[lj] = dc->pc;
3229 gen_opc_instr_start[lj] = 1;
3230 gen_opc_icount[lj] = num_insns;
3234 LOG_DIS("%8.8x:\t", dc->pc);
3236 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
3240 insn_len = cris_decoder(dc);
3244 cris_clear_x_flag(dc);
3247 /* Check for delayed branches here. If we do it before
3248 actually generating any host code, the simulator will just
3249 loop doing nothing for on this program location. */
3250 if (dc->delayed_branch) {
3251 dc->delayed_branch--;
3252 if (dc->delayed_branch == 0)
3255 t_gen_mov_env_TN(dslot,
3257 if (dc->jmp == JMP_DIRECT) {
3258 dc->is_jmp = DISAS_NEXT;
3260 t_gen_cc_jmp(env_btarget,
3261 tcg_const_tl(dc->pc));
3262 dc->is_jmp = DISAS_JUMP;
3268 /* If we are rexecuting a branch due to exceptions on
3269 delay slots dont break. */
3270 if (!(tb->pc & 1) && env->singlestep_enabled)
3272 } while (!dc->is_jmp && !dc->cpustate_changed
3273 && gen_opc_ptr < gen_opc_end
3275 && (dc->pc < next_page_start)
3276 && num_insns < max_insns);
3279 if (dc->jmp == JMP_DIRECT && !dc->delayed_branch)
3282 if (tb->cflags & CF_LAST_IO)
3284 /* Force an update if the per-tb cpu state has changed. */
3285 if (dc->is_jmp == DISAS_NEXT
3286 && (dc->cpustate_changed || !dc->flagx_known
3287 || (dc->flags_x != (tb->flags & X_FLAG)))) {
3288 dc->is_jmp = DISAS_UPDATE;
3289 tcg_gen_movi_tl(env_pc, npc);
3291 /* Broken branch+delayslot sequence. */
3292 if (dc->delayed_branch == 1) {
3293 /* Set env->dslot to the size of the branch insn. */
3294 t_gen_mov_env_TN(dslot, tcg_const_tl(dc->pc - dc->ppc));
3295 cris_store_direct_jmp(dc);
3298 cris_evaluate_flags (dc);
3300 if (unlikely(env->singlestep_enabled)) {
3301 if (dc->is_jmp == DISAS_NEXT)
3302 tcg_gen_movi_tl(env_pc, npc);
3303 t_gen_raise_exception(EXCP_DEBUG);
3305 switch(dc->is_jmp) {
3307 gen_goto_tb(dc, 1, npc);
3312 /* indicate that the hash table must be used
3313 to find the next TB */
3318 /* nothing more to generate */
3322 gen_icount_end(tb, num_insns);
3323 *gen_opc_ptr = INDEX_op_end;
3325 j = gen_opc_ptr - gen_opc_buf;
3328 gen_opc_instr_start[lj++] = 0;
3330 tb->size = dc->pc - pc_start;
3331 tb->icount = num_insns;
3336 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
3337 log_target_disas(pc_start, dc->pc - pc_start, 0);
3338 qemu_log("\nisize=%d osize=%zd\n",
3339 dc->pc - pc_start, gen_opc_ptr - gen_opc_buf);
3345 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
3347 gen_intermediate_code_internal(env, tb, 0);
3350 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
3352 gen_intermediate_code_internal(env, tb, 1);
3355 void cpu_dump_state (CPUState *env, FILE *f,
3356 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
3365 cpu_fprintf(f, "PC=%x CCS=%x btaken=%d btarget=%x\n"
3366 "cc_op=%d cc_src=%d cc_dest=%d cc_result=%x cc_mask=%x\n",
3367 env->pc, env->pregs[PR_CCS], env->btaken, env->btarget,
3369 env->cc_src, env->cc_dest, env->cc_result, env->cc_mask);
3372 for (i = 0; i < 16; i++) {
3373 cpu_fprintf(f, "r%2.2d=%8.8x ", i, env->regs[i]);
3374 if ((i + 1) % 4 == 0)
3375 cpu_fprintf(f, "\n");
3377 cpu_fprintf(f, "\nspecial regs:\n");
3378 for (i = 0; i < 16; i++) {
3379 cpu_fprintf(f, "p%2.2d=%8.8x ", i, env->pregs[i]);
3380 if ((i + 1) % 4 == 0)
3381 cpu_fprintf(f, "\n");
3383 srs = env->pregs[PR_SRS];
3384 cpu_fprintf(f, "\nsupport function regs bank %x:\n", srs);
3386 for (i = 0; i < 16; i++) {
3387 cpu_fprintf(f, "s%2.2d=%8.8x ",
3388 i, env->sregs[srs][i]);
3389 if ((i + 1) % 4 == 0)
3390 cpu_fprintf(f, "\n");
3393 cpu_fprintf(f, "\n\n");
3397 CPUCRISState *cpu_cris_init (const char *cpu_model)
3400 static int tcg_initialized = 0;
3403 env = qemu_mallocz(sizeof(CPUCRISState));
3407 qemu_init_vcpu(env);
3409 if (tcg_initialized)
3412 tcg_initialized = 1;
3414 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
3415 cc_x = tcg_global_mem_new(TCG_AREG0,
3416 offsetof(CPUState, cc_x), "cc_x");
3417 cc_src = tcg_global_mem_new(TCG_AREG0,
3418 offsetof(CPUState, cc_src), "cc_src");
3419 cc_dest = tcg_global_mem_new(TCG_AREG0,
3420 offsetof(CPUState, cc_dest),
3422 cc_result = tcg_global_mem_new(TCG_AREG0,
3423 offsetof(CPUState, cc_result),
3425 cc_op = tcg_global_mem_new(TCG_AREG0,
3426 offsetof(CPUState, cc_op), "cc_op");
3427 cc_size = tcg_global_mem_new(TCG_AREG0,
3428 offsetof(CPUState, cc_size),
3430 cc_mask = tcg_global_mem_new(TCG_AREG0,
3431 offsetof(CPUState, cc_mask),
3434 env_pc = tcg_global_mem_new(TCG_AREG0,
3435 offsetof(CPUState, pc),
3437 env_btarget = tcg_global_mem_new(TCG_AREG0,
3438 offsetof(CPUState, btarget),
3440 env_btaken = tcg_global_mem_new(TCG_AREG0,
3441 offsetof(CPUState, btaken),
3443 for (i = 0; i < 16; i++) {
3444 cpu_R[i] = tcg_global_mem_new(TCG_AREG0,
3445 offsetof(CPUState, regs[i]),
3448 for (i = 0; i < 16; i++) {
3449 cpu_PR[i] = tcg_global_mem_new(TCG_AREG0,
3450 offsetof(CPUState, pregs[i]),
3454 #define GEN_HELPER 2
3460 void cpu_reset (CPUCRISState *env)
3462 if (qemu_loglevel_mask(CPU_LOG_RESET)) {
3463 qemu_log("CPU Reset (CPU %d)\n", env->cpu_index);
3464 log_cpu_state(env, 0);
3467 memset(env, 0, offsetof(CPUCRISState, breakpoints));
3470 env->pregs[PR_VR] = 32;
3471 #if defined(CONFIG_USER_ONLY)
3472 /* start in user mode with interrupts enabled. */
3473 env->pregs[PR_CCS] |= U_FLAG | I_FLAG;
3476 env->pregs[PR_CCS] = 0;
3480 void gen_pc_load(CPUState *env, struct TranslationBlock *tb,
3481 unsigned long searched_pc, int pc_pos, void *puc)
3483 env->pc = gen_opc_pc[pc_pos];