2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
30 #include "qemu-common.h"
31 #include "host-utils.h"
37 #define CPU_SINGLE_STEP 0x1
38 #define CPU_BRANCH_STEP 0x2
39 #define GDBSTUB_SINGLE_STEP 0x4
41 /* Include definitions for instructions classes and implementations flags */
42 //#define PPC_DEBUG_DISAS
43 //#define DO_PPC_STATISTICS
45 #ifdef PPC_DEBUG_DISAS
46 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
48 # define LOG_DISAS(...) do { } while (0)
50 /*****************************************************************************/
51 /* Code translation helpers */
53 /* global register indexes */
54 static TCGv_ptr cpu_env;
55 static char cpu_reg_names[10*3 + 22*4 /* GPR */
56 #if !defined(TARGET_PPC64)
57 + 10*4 + 22*5 /* SPE GPRh */
59 + 10*4 + 22*5 /* FPR */
60 + 2*(10*6 + 22*7) /* AVRh, AVRl */
62 static TCGv cpu_gpr[32];
63 #if !defined(TARGET_PPC64)
64 static TCGv cpu_gprh[32];
66 static TCGv_i64 cpu_fpr[32];
67 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
68 static TCGv_i32 cpu_crf[8];
74 static TCGv cpu_reserve;
75 static TCGv_i32 cpu_fpscr;
76 static TCGv_i32 cpu_access_type;
78 #include "gen-icount.h"
80 void ppc_translate_init(void)
84 size_t cpu_reg_names_size;
85 static int done_init = 0;
90 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
93 cpu_reg_names_size = sizeof(cpu_reg_names);
95 for (i = 0; i < 8; i++) {
96 snprintf(p, cpu_reg_names_size, "crf%d", i);
97 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
98 offsetof(CPUState, crf[i]), p);
100 cpu_reg_names_size -= 5;
103 for (i = 0; i < 32; i++) {
104 snprintf(p, cpu_reg_names_size, "r%d", i);
105 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
106 offsetof(CPUState, gpr[i]), p);
107 p += (i < 10) ? 3 : 4;
108 cpu_reg_names_size -= (i < 10) ? 3 : 4;
109 #if !defined(TARGET_PPC64)
110 snprintf(p, cpu_reg_names_size, "r%dH", i);
111 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
112 offsetof(CPUState, gprh[i]), p);
113 p += (i < 10) ? 4 : 5;
114 cpu_reg_names_size -= (i < 10) ? 4 : 5;
117 snprintf(p, cpu_reg_names_size, "fp%d", i);
118 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
119 offsetof(CPUState, fpr[i]), p);
120 p += (i < 10) ? 4 : 5;
121 cpu_reg_names_size -= (i < 10) ? 4 : 5;
123 snprintf(p, cpu_reg_names_size, "avr%dH", i);
124 #ifdef HOST_WORDS_BIGENDIAN
125 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
126 offsetof(CPUState, avr[i].u64[0]), p);
128 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
129 offsetof(CPUState, avr[i].u64[1]), p);
131 p += (i < 10) ? 6 : 7;
132 cpu_reg_names_size -= (i < 10) ? 6 : 7;
134 snprintf(p, cpu_reg_names_size, "avr%dL", i);
135 #ifdef HOST_WORDS_BIGENDIAN
136 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
137 offsetof(CPUState, avr[i].u64[1]), p);
139 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
140 offsetof(CPUState, avr[i].u64[0]), p);
142 p += (i < 10) ? 6 : 7;
143 cpu_reg_names_size -= (i < 10) ? 6 : 7;
146 cpu_nip = tcg_global_mem_new(TCG_AREG0,
147 offsetof(CPUState, nip), "nip");
149 cpu_msr = tcg_global_mem_new(TCG_AREG0,
150 offsetof(CPUState, msr), "msr");
152 cpu_ctr = tcg_global_mem_new(TCG_AREG0,
153 offsetof(CPUState, ctr), "ctr");
155 cpu_lr = tcg_global_mem_new(TCG_AREG0,
156 offsetof(CPUState, lr), "lr");
158 cpu_xer = tcg_global_mem_new(TCG_AREG0,
159 offsetof(CPUState, xer), "xer");
161 cpu_reserve = tcg_global_mem_new(TCG_AREG0,
162 offsetof(CPUState, reserve_addr),
165 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
166 offsetof(CPUState, fpscr), "fpscr");
168 cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
169 offsetof(CPUState, access_type), "access_type");
171 /* register helpers */
178 /* internal defines */
179 typedef struct DisasContext {
180 struct TranslationBlock *tb;
184 /* Routine used to access memory */
187 /* Translation flags */
189 #if defined(TARGET_PPC64)
195 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
196 int singlestep_enabled;
199 struct opc_handler_t {
202 /* instruction type */
204 /* extended instruction type */
207 void (*handler)(DisasContext *ctx);
208 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
211 #if defined(DO_PPC_STATISTICS)
216 static inline void gen_reset_fpstatus(void)
218 gen_helper_reset_fpstatus();
221 static inline void gen_compute_fprf(TCGv_i64 arg, int set_fprf, int set_rc)
223 TCGv_i32 t0 = tcg_temp_new_i32();
226 /* This case might be optimized later */
227 tcg_gen_movi_i32(t0, 1);
228 gen_helper_compute_fprf(t0, arg, t0);
229 if (unlikely(set_rc)) {
230 tcg_gen_mov_i32(cpu_crf[1], t0);
232 gen_helper_float_check_status();
233 } else if (unlikely(set_rc)) {
234 /* We always need to compute fpcc */
235 tcg_gen_movi_i32(t0, 0);
236 gen_helper_compute_fprf(t0, arg, t0);
237 tcg_gen_mov_i32(cpu_crf[1], t0);
240 tcg_temp_free_i32(t0);
243 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
245 if (ctx->access_type != access_type) {
246 tcg_gen_movi_i32(cpu_access_type, access_type);
247 ctx->access_type = access_type;
251 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
253 #if defined(TARGET_PPC64)
255 tcg_gen_movi_tl(cpu_nip, nip);
258 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
261 static inline void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
264 if (ctx->exception == POWERPC_EXCP_NONE) {
265 gen_update_nip(ctx, ctx->nip);
267 t0 = tcg_const_i32(excp);
268 t1 = tcg_const_i32(error);
269 gen_helper_raise_exception_err(t0, t1);
270 tcg_temp_free_i32(t0);
271 tcg_temp_free_i32(t1);
272 ctx->exception = (excp);
275 static inline void gen_exception(DisasContext *ctx, uint32_t excp)
278 if (ctx->exception == POWERPC_EXCP_NONE) {
279 gen_update_nip(ctx, ctx->nip);
281 t0 = tcg_const_i32(excp);
282 gen_helper_raise_exception(t0);
283 tcg_temp_free_i32(t0);
284 ctx->exception = (excp);
287 static inline void gen_debug_exception(DisasContext *ctx)
291 if (ctx->exception != POWERPC_EXCP_BRANCH)
292 gen_update_nip(ctx, ctx->nip);
293 t0 = tcg_const_i32(EXCP_DEBUG);
294 gen_helper_raise_exception(t0);
295 tcg_temp_free_i32(t0);
298 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
300 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error);
303 /* Stop translation */
304 static inline void gen_stop_exception(DisasContext *ctx)
306 gen_update_nip(ctx, ctx->nip);
307 ctx->exception = POWERPC_EXCP_STOP;
310 /* No need to update nip here, as execution flow will change */
311 static inline void gen_sync_exception(DisasContext *ctx)
313 ctx->exception = POWERPC_EXCP_SYNC;
316 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
317 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
319 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
320 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
322 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
323 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
325 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
326 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
328 typedef struct opcode_t {
329 unsigned char opc1, opc2, opc3;
330 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
331 unsigned char pad[5];
333 unsigned char pad[1];
335 opc_handler_t handler;
339 /*****************************************************************************/
340 /*** Instruction decoding ***/
341 #define EXTRACT_HELPER(name, shift, nb) \
342 static inline uint32_t name(uint32_t opcode) \
344 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
347 #define EXTRACT_SHELPER(name, shift, nb) \
348 static inline int32_t name(uint32_t opcode) \
350 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
354 EXTRACT_HELPER(opc1, 26, 6);
356 EXTRACT_HELPER(opc2, 1, 5);
358 EXTRACT_HELPER(opc3, 6, 5);
359 /* Update Cr0 flags */
360 EXTRACT_HELPER(Rc, 0, 1);
362 EXTRACT_HELPER(rD, 21, 5);
364 EXTRACT_HELPER(rS, 21, 5);
366 EXTRACT_HELPER(rA, 16, 5);
368 EXTRACT_HELPER(rB, 11, 5);
370 EXTRACT_HELPER(rC, 6, 5);
372 EXTRACT_HELPER(crfD, 23, 3);
373 EXTRACT_HELPER(crfS, 18, 3);
374 EXTRACT_HELPER(crbD, 21, 5);
375 EXTRACT_HELPER(crbA, 16, 5);
376 EXTRACT_HELPER(crbB, 11, 5);
378 EXTRACT_HELPER(_SPR, 11, 10);
379 static inline uint32_t SPR(uint32_t opcode)
381 uint32_t sprn = _SPR(opcode);
383 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
385 /*** Get constants ***/
386 EXTRACT_HELPER(IMM, 12, 8);
387 /* 16 bits signed immediate value */
388 EXTRACT_SHELPER(SIMM, 0, 16);
389 /* 16 bits unsigned immediate value */
390 EXTRACT_HELPER(UIMM, 0, 16);
391 /* 5 bits signed immediate value */
392 EXTRACT_HELPER(SIMM5, 16, 5);
393 /* 5 bits signed immediate value */
394 EXTRACT_HELPER(UIMM5, 16, 5);
396 EXTRACT_HELPER(NB, 11, 5);
398 EXTRACT_HELPER(SH, 11, 5);
399 /* Vector shift count */
400 EXTRACT_HELPER(VSH, 6, 4);
402 EXTRACT_HELPER(MB, 6, 5);
404 EXTRACT_HELPER(ME, 1, 5);
406 EXTRACT_HELPER(TO, 21, 5);
408 EXTRACT_HELPER(CRM, 12, 8);
409 EXTRACT_HELPER(FM, 17, 8);
410 EXTRACT_HELPER(SR, 16, 4);
411 EXTRACT_HELPER(FPIMM, 12, 4);
413 /*** Jump target decoding ***/
415 EXTRACT_SHELPER(d, 0, 16);
416 /* Immediate address */
417 static inline target_ulong LI(uint32_t opcode)
419 return (opcode >> 0) & 0x03FFFFFC;
422 static inline uint32_t BD(uint32_t opcode)
424 return (opcode >> 0) & 0xFFFC;
427 EXTRACT_HELPER(BO, 21, 5);
428 EXTRACT_HELPER(BI, 16, 5);
429 /* Absolute/relative address */
430 EXTRACT_HELPER(AA, 1, 1);
432 EXTRACT_HELPER(LK, 0, 1);
434 /* Create a mask between <start> and <end> bits */
435 static inline target_ulong MASK(uint32_t start, uint32_t end)
439 #if defined(TARGET_PPC64)
440 if (likely(start == 0)) {
441 ret = UINT64_MAX << (63 - end);
442 } else if (likely(end == 63)) {
443 ret = UINT64_MAX >> start;
446 if (likely(start == 0)) {
447 ret = UINT32_MAX << (31 - end);
448 } else if (likely(end == 31)) {
449 ret = UINT32_MAX >> start;
453 ret = (((target_ulong)(-1ULL)) >> (start)) ^
454 (((target_ulong)(-1ULL) >> (end)) >> 1);
455 if (unlikely(start > end))
462 /*****************************************************************************/
463 /* PowerPC instructions table */
465 #if defined(DO_PPC_STATISTICS)
466 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
476 .handler = &gen_##name, \
477 .oname = stringify(name), \
479 .oname = stringify(name), \
481 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
491 .handler = &gen_##name, \
497 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
507 .handler = &gen_##name, \
509 .oname = stringify(name), \
511 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
521 .handler = &gen_##name, \
527 /* SPR load/store helpers */
528 static inline void gen_load_spr(TCGv t, int reg)
530 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
533 static inline void gen_store_spr(int reg, TCGv t)
535 tcg_gen_st_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
538 /* Invalid instruction */
539 static void gen_invalid(DisasContext *ctx)
541 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
544 static opc_handler_t invalid_handler = {
548 .handler = gen_invalid,
551 /*** Integer comparison ***/
553 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
557 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer);
558 tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO);
559 tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1);
561 l1 = gen_new_label();
562 l2 = gen_new_label();
563 l3 = gen_new_label();
565 tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1);
566 tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2);
568 tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1);
569 tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2);
571 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ);
574 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT);
577 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT);
581 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
583 TCGv t0 = tcg_const_local_tl(arg1);
584 gen_op_cmp(arg0, t0, s, crf);
588 #if defined(TARGET_PPC64)
589 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
592 t0 = tcg_temp_local_new();
593 t1 = tcg_temp_local_new();
595 tcg_gen_ext32s_tl(t0, arg0);
596 tcg_gen_ext32s_tl(t1, arg1);
598 tcg_gen_ext32u_tl(t0, arg0);
599 tcg_gen_ext32u_tl(t1, arg1);
601 gen_op_cmp(t0, t1, s, crf);
606 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
608 TCGv t0 = tcg_const_local_tl(arg1);
609 gen_op_cmp32(arg0, t0, s, crf);
614 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
616 #if defined(TARGET_PPC64)
618 gen_op_cmpi32(reg, 0, 1, 0);
621 gen_op_cmpi(reg, 0, 1, 0);
625 static void gen_cmp(DisasContext *ctx)
627 #if defined(TARGET_PPC64)
628 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
629 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
630 1, crfD(ctx->opcode));
633 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
634 1, crfD(ctx->opcode));
638 static void gen_cmpi(DisasContext *ctx)
640 #if defined(TARGET_PPC64)
641 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
642 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
643 1, crfD(ctx->opcode));
646 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
647 1, crfD(ctx->opcode));
651 static void gen_cmpl(DisasContext *ctx)
653 #if defined(TARGET_PPC64)
654 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
655 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
656 0, crfD(ctx->opcode));
659 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
660 0, crfD(ctx->opcode));
664 static void gen_cmpli(DisasContext *ctx)
666 #if defined(TARGET_PPC64)
667 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
668 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
669 0, crfD(ctx->opcode));
672 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
673 0, crfD(ctx->opcode));
676 /* isel (PowerPC 2.03 specification) */
677 static void gen_isel(DisasContext *ctx)
680 uint32_t bi = rC(ctx->opcode);
684 l1 = gen_new_label();
685 l2 = gen_new_label();
687 mask = 1 << (3 - (bi & 0x03));
688 t0 = tcg_temp_new_i32();
689 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
690 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
691 if (rA(ctx->opcode) == 0)
692 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
694 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
697 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
699 tcg_temp_free_i32(t0);
702 /*** Integer arithmetic ***/
704 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
705 TCGv arg1, TCGv arg2, int sub)
710 l1 = gen_new_label();
711 /* Start with XER OV disabled, the most likely case */
712 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
713 t0 = tcg_temp_local_new();
714 tcg_gen_xor_tl(t0, arg0, arg1);
715 #if defined(TARGET_PPC64)
717 tcg_gen_ext32s_tl(t0, t0);
720 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
722 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
723 tcg_gen_xor_tl(t0, arg1, arg2);
724 #if defined(TARGET_PPC64)
726 tcg_gen_ext32s_tl(t0, t0);
729 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
731 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
732 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
737 static inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1,
740 int l1 = gen_new_label();
742 #if defined(TARGET_PPC64)
743 if (!(ctx->sf_mode)) {
748 tcg_gen_ext32u_tl(t0, arg1);
749 tcg_gen_ext32u_tl(t1, arg2);
751 tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1);
753 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
755 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
763 tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1);
765 tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1);
767 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
772 /* Common add function */
773 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
774 TCGv arg2, int add_ca, int compute_ca,
779 if ((!compute_ca && !compute_ov) ||
780 (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) {
783 t0 = tcg_temp_local_new();
787 t1 = tcg_temp_local_new();
788 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
789 tcg_gen_shri_tl(t1, t1, XER_CA);
794 if (compute_ca && compute_ov) {
795 /* Start with XER CA and OV disabled, the most likely case */
796 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
797 } else if (compute_ca) {
798 /* Start with XER CA disabled, the most likely case */
799 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
800 } else if (compute_ov) {
801 /* Start with XER OV disabled, the most likely case */
802 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
805 tcg_gen_add_tl(t0, arg1, arg2);
808 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
811 tcg_gen_add_tl(t0, t0, t1);
812 gen_op_arith_compute_ca(ctx, t0, t1, 0);
816 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
819 if (unlikely(Rc(ctx->opcode) != 0))
820 gen_set_Rc0(ctx, t0);
822 if (!TCGV_EQUAL(t0, ret)) {
823 tcg_gen_mov_tl(ret, t0);
827 /* Add functions with two operands */
828 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
829 static void glue(gen_, name)(DisasContext *ctx) \
831 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
832 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
833 add_ca, compute_ca, compute_ov); \
835 /* Add functions with one operand and one immediate */
836 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
837 add_ca, compute_ca, compute_ov) \
838 static void glue(gen_, name)(DisasContext *ctx) \
840 TCGv t0 = tcg_const_local_tl(const_val); \
841 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
842 cpu_gpr[rA(ctx->opcode)], t0, \
843 add_ca, compute_ca, compute_ov); \
847 /* add add. addo addo. */
848 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
849 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
850 /* addc addc. addco addco. */
851 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
852 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
853 /* adde adde. addeo addeo. */
854 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
855 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
856 /* addme addme. addmeo addmeo. */
857 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
858 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
859 /* addze addze. addzeo addzeo.*/
860 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
861 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
863 static void gen_addi(DisasContext *ctx)
865 target_long simm = SIMM(ctx->opcode);
867 if (rA(ctx->opcode) == 0) {
869 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
871 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm);
875 static inline void gen_op_addic(DisasContext *ctx, TCGv ret, TCGv arg1,
878 target_long simm = SIMM(ctx->opcode);
880 /* Start with XER CA and OV disabled, the most likely case */
881 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
883 if (likely(simm != 0)) {
884 TCGv t0 = tcg_temp_local_new();
885 tcg_gen_addi_tl(t0, arg1, simm);
886 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
887 tcg_gen_mov_tl(ret, t0);
890 tcg_gen_mov_tl(ret, arg1);
893 gen_set_Rc0(ctx, ret);
897 static void gen_addic(DisasContext *ctx)
899 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
902 static void gen_addic_(DisasContext *ctx)
904 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
908 static void gen_addis(DisasContext *ctx)
910 target_long simm = SIMM(ctx->opcode);
912 if (rA(ctx->opcode) == 0) {
914 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
916 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16);
920 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
921 TCGv arg2, int sign, int compute_ov)
923 int l1 = gen_new_label();
924 int l2 = gen_new_label();
925 TCGv_i32 t0 = tcg_temp_local_new_i32();
926 TCGv_i32 t1 = tcg_temp_local_new_i32();
928 tcg_gen_trunc_tl_i32(t0, arg1);
929 tcg_gen_trunc_tl_i32(t1, arg2);
930 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
932 int l3 = gen_new_label();
933 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
934 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
936 tcg_gen_div_i32(t0, t0, t1);
938 tcg_gen_divu_i32(t0, t0, t1);
941 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
946 tcg_gen_sari_i32(t0, t0, 31);
948 tcg_gen_movi_i32(t0, 0);
951 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
954 tcg_gen_extu_i32_tl(ret, t0);
955 tcg_temp_free_i32(t0);
956 tcg_temp_free_i32(t1);
957 if (unlikely(Rc(ctx->opcode) != 0))
958 gen_set_Rc0(ctx, ret);
961 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
962 static void glue(gen_, name)(DisasContext *ctx) \
964 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
965 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
968 /* divwu divwu. divwuo divwuo. */
969 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
970 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
971 /* divw divw. divwo divwo. */
972 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
973 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
974 #if defined(TARGET_PPC64)
975 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
976 TCGv arg2, int sign, int compute_ov)
978 int l1 = gen_new_label();
979 int l2 = gen_new_label();
981 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
983 int l3 = gen_new_label();
984 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
985 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
987 tcg_gen_div_i64(ret, arg1, arg2);
989 tcg_gen_divu_i64(ret, arg1, arg2);
992 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
997 tcg_gen_sari_i64(ret, arg1, 63);
999 tcg_gen_movi_i64(ret, 0);
1002 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1005 if (unlikely(Rc(ctx->opcode) != 0))
1006 gen_set_Rc0(ctx, ret);
1008 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1009 static void glue(gen_, name)(DisasContext *ctx) \
1011 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1012 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1013 sign, compute_ov); \
1015 /* divwu divwu. divwuo divwuo. */
1016 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1017 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1018 /* divw divw. divwo divwo. */
1019 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1020 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1024 static void gen_mulhw(DisasContext *ctx)
1028 t0 = tcg_temp_new_i64();
1029 t1 = tcg_temp_new_i64();
1030 #if defined(TARGET_PPC64)
1031 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1032 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1033 tcg_gen_mul_i64(t0, t0, t1);
1034 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1036 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1037 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1038 tcg_gen_mul_i64(t0, t0, t1);
1039 tcg_gen_shri_i64(t0, t0, 32);
1040 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1042 tcg_temp_free_i64(t0);
1043 tcg_temp_free_i64(t1);
1044 if (unlikely(Rc(ctx->opcode) != 0))
1045 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1048 /* mulhwu mulhwu. */
1049 static void gen_mulhwu(DisasContext *ctx)
1053 t0 = tcg_temp_new_i64();
1054 t1 = tcg_temp_new_i64();
1055 #if defined(TARGET_PPC64)
1056 tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1057 tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1058 tcg_gen_mul_i64(t0, t0, t1);
1059 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1061 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1062 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1063 tcg_gen_mul_i64(t0, t0, t1);
1064 tcg_gen_shri_i64(t0, t0, 32);
1065 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1067 tcg_temp_free_i64(t0);
1068 tcg_temp_free_i64(t1);
1069 if (unlikely(Rc(ctx->opcode) != 0))
1070 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1074 static void gen_mullw(DisasContext *ctx)
1076 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1077 cpu_gpr[rB(ctx->opcode)]);
1078 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1079 if (unlikely(Rc(ctx->opcode) != 0))
1080 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1083 /* mullwo mullwo. */
1084 static void gen_mullwo(DisasContext *ctx)
1089 t0 = tcg_temp_new_i64();
1090 t1 = tcg_temp_new_i64();
1091 l1 = gen_new_label();
1092 /* Start with XER OV disabled, the most likely case */
1093 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1094 #if defined(TARGET_PPC64)
1095 tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1096 tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1098 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1099 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1101 tcg_gen_mul_i64(t0, t0, t1);
1102 #if defined(TARGET_PPC64)
1103 tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0);
1104 tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1);
1106 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1107 tcg_gen_ext32s_i64(t1, t0);
1108 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
1110 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1112 tcg_temp_free_i64(t0);
1113 tcg_temp_free_i64(t1);
1114 if (unlikely(Rc(ctx->opcode) != 0))
1115 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1119 static void gen_mulli(DisasContext *ctx)
1121 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1124 #if defined(TARGET_PPC64)
1125 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
1126 static void glue(gen_, name)(DisasContext *ctx) \
1128 gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \
1129 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
1130 if (unlikely(Rc(ctx->opcode) != 0)) \
1131 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1134 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00);
1135 /* mulhdu mulhdu. */
1136 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02);
1139 static void gen_mulld(DisasContext *ctx)
1141 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1142 cpu_gpr[rB(ctx->opcode)]);
1143 if (unlikely(Rc(ctx->opcode) != 0))
1144 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1146 /* mulldo mulldo. */
1147 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17);
1150 /* neg neg. nego nego. */
1151 static inline void gen_op_arith_neg(DisasContext *ctx, TCGv ret, TCGv arg1,
1154 int l1 = gen_new_label();
1155 int l2 = gen_new_label();
1156 TCGv t0 = tcg_temp_local_new();
1157 #if defined(TARGET_PPC64)
1159 tcg_gen_mov_tl(t0, arg1);
1160 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1);
1164 tcg_gen_ext32s_tl(t0, arg1);
1165 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1);
1167 tcg_gen_neg_tl(ret, arg1);
1169 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1173 tcg_gen_mov_tl(ret, t0);
1175 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1179 if (unlikely(Rc(ctx->opcode) != 0))
1180 gen_set_Rc0(ctx, ret);
1183 static void gen_neg(DisasContext *ctx)
1185 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1188 static void gen_nego(DisasContext *ctx)
1190 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1193 /* Common subf function */
1194 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1195 TCGv arg2, int add_ca, int compute_ca,
1200 if ((!compute_ca && !compute_ov) ||
1201 (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) {
1204 t0 = tcg_temp_local_new();
1208 t1 = tcg_temp_local_new();
1209 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
1210 tcg_gen_shri_tl(t1, t1, XER_CA);
1215 if (compute_ca && compute_ov) {
1216 /* Start with XER CA and OV disabled, the most likely case */
1217 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
1218 } else if (compute_ca) {
1219 /* Start with XER CA disabled, the most likely case */
1220 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1221 } else if (compute_ov) {
1222 /* Start with XER OV disabled, the most likely case */
1223 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1227 tcg_gen_not_tl(t0, arg1);
1228 tcg_gen_add_tl(t0, t0, arg2);
1229 gen_op_arith_compute_ca(ctx, t0, arg2, 0);
1230 tcg_gen_add_tl(t0, t0, t1);
1231 gen_op_arith_compute_ca(ctx, t0, t1, 0);
1234 tcg_gen_sub_tl(t0, arg2, arg1);
1236 gen_op_arith_compute_ca(ctx, t0, arg2, 1);
1240 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1243 if (unlikely(Rc(ctx->opcode) != 0))
1244 gen_set_Rc0(ctx, t0);
1246 if (!TCGV_EQUAL(t0, ret)) {
1247 tcg_gen_mov_tl(ret, t0);
1251 /* Sub functions with Two operands functions */
1252 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1253 static void glue(gen_, name)(DisasContext *ctx) \
1255 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1256 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1257 add_ca, compute_ca, compute_ov); \
1259 /* Sub functions with one operand and one immediate */
1260 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1261 add_ca, compute_ca, compute_ov) \
1262 static void glue(gen_, name)(DisasContext *ctx) \
1264 TCGv t0 = tcg_const_local_tl(const_val); \
1265 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1266 cpu_gpr[rA(ctx->opcode)], t0, \
1267 add_ca, compute_ca, compute_ov); \
1268 tcg_temp_free(t0); \
1270 /* subf subf. subfo subfo. */
1271 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1272 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1273 /* subfc subfc. subfco subfco. */
1274 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1275 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1276 /* subfe subfe. subfeo subfo. */
1277 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1278 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1279 /* subfme subfme. subfmeo subfmeo. */
1280 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1281 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1282 /* subfze subfze. subfzeo subfzeo.*/
1283 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1284 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1287 static void gen_subfic(DisasContext *ctx)
1289 /* Start with XER CA and OV disabled, the most likely case */
1290 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1291 TCGv t0 = tcg_temp_local_new();
1292 TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
1293 tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
1294 gen_op_arith_compute_ca(ctx, t0, t1, 1);
1296 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
1300 /*** Integer logical ***/
1301 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1302 static void glue(gen_, name)(DisasContext *ctx) \
1304 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1305 cpu_gpr[rB(ctx->opcode)]); \
1306 if (unlikely(Rc(ctx->opcode) != 0)) \
1307 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1310 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1311 static void glue(gen_, name)(DisasContext *ctx) \
1313 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1314 if (unlikely(Rc(ctx->opcode) != 0)) \
1315 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1319 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1321 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1324 static void gen_andi_(DisasContext *ctx)
1326 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1327 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1331 static void gen_andis_(DisasContext *ctx)
1333 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1334 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1338 static void gen_cntlzw(DisasContext *ctx)
1340 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1341 if (unlikely(Rc(ctx->opcode) != 0))
1342 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1345 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1346 /* extsb & extsb. */
1347 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1348 /* extsh & extsh. */
1349 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1351 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1353 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1356 static void gen_or(DisasContext *ctx)
1360 rs = rS(ctx->opcode);
1361 ra = rA(ctx->opcode);
1362 rb = rB(ctx->opcode);
1363 /* Optimisation for mr. ri case */
1364 if (rs != ra || rs != rb) {
1366 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1368 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1369 if (unlikely(Rc(ctx->opcode) != 0))
1370 gen_set_Rc0(ctx, cpu_gpr[ra]);
1371 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1372 gen_set_Rc0(ctx, cpu_gpr[rs]);
1373 #if defined(TARGET_PPC64)
1379 /* Set process priority to low */
1383 /* Set process priority to medium-low */
1387 /* Set process priority to normal */
1390 #if !defined(CONFIG_USER_ONLY)
1392 if (ctx->mem_idx > 0) {
1393 /* Set process priority to very low */
1398 if (ctx->mem_idx > 0) {
1399 /* Set process priority to medium-hight */
1404 if (ctx->mem_idx > 0) {
1405 /* Set process priority to high */
1410 if (ctx->mem_idx > 1) {
1411 /* Set process priority to very high */
1421 TCGv t0 = tcg_temp_new();
1422 gen_load_spr(t0, SPR_PPR);
1423 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1424 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1425 gen_store_spr(SPR_PPR, t0);
1432 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1435 static void gen_xor(DisasContext *ctx)
1437 /* Optimisation for "set to zero" case */
1438 if (rS(ctx->opcode) != rB(ctx->opcode))
1439 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1441 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1442 if (unlikely(Rc(ctx->opcode) != 0))
1443 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1447 static void gen_ori(DisasContext *ctx)
1449 target_ulong uimm = UIMM(ctx->opcode);
1451 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1453 /* XXX: should handle special NOPs for POWER series */
1456 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1460 static void gen_oris(DisasContext *ctx)
1462 target_ulong uimm = UIMM(ctx->opcode);
1464 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1468 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1472 static void gen_xori(DisasContext *ctx)
1474 target_ulong uimm = UIMM(ctx->opcode);
1476 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1480 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1484 static void gen_xoris(DisasContext *ctx)
1486 target_ulong uimm = UIMM(ctx->opcode);
1488 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1492 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1495 /* popcntb : PowerPC 2.03 specification */
1496 static void gen_popcntb(DisasContext *ctx)
1498 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1501 static void gen_popcntw(DisasContext *ctx)
1503 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1506 #if defined(TARGET_PPC64)
1507 /* popcntd: PowerPC 2.06 specification */
1508 static void gen_popcntd(DisasContext *ctx)
1510 gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1514 #if defined(TARGET_PPC64)
1515 /* extsw & extsw. */
1516 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1519 static void gen_cntlzd(DisasContext *ctx)
1521 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1522 if (unlikely(Rc(ctx->opcode) != 0))
1523 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1527 /*** Integer rotate ***/
1529 /* rlwimi & rlwimi. */
1530 static void gen_rlwimi(DisasContext *ctx)
1532 uint32_t mb, me, sh;
1534 mb = MB(ctx->opcode);
1535 me = ME(ctx->opcode);
1536 sh = SH(ctx->opcode);
1537 if (likely(sh == 0 && mb == 0 && me == 31)) {
1538 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1542 TCGv t0 = tcg_temp_new();
1543 #if defined(TARGET_PPC64)
1544 TCGv_i32 t2 = tcg_temp_new_i32();
1545 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1546 tcg_gen_rotli_i32(t2, t2, sh);
1547 tcg_gen_extu_i32_i64(t0, t2);
1548 tcg_temp_free_i32(t2);
1550 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1552 #if defined(TARGET_PPC64)
1556 mask = MASK(mb, me);
1557 t1 = tcg_temp_new();
1558 tcg_gen_andi_tl(t0, t0, mask);
1559 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1560 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1564 if (unlikely(Rc(ctx->opcode) != 0))
1565 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1568 /* rlwinm & rlwinm. */
1569 static void gen_rlwinm(DisasContext *ctx)
1571 uint32_t mb, me, sh;
1573 sh = SH(ctx->opcode);
1574 mb = MB(ctx->opcode);
1575 me = ME(ctx->opcode);
1577 if (likely(mb == 0 && me == (31 - sh))) {
1578 if (likely(sh == 0)) {
1579 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1581 TCGv t0 = tcg_temp_new();
1582 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1583 tcg_gen_shli_tl(t0, t0, sh);
1584 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1587 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1588 TCGv t0 = tcg_temp_new();
1589 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1590 tcg_gen_shri_tl(t0, t0, mb);
1591 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1594 TCGv t0 = tcg_temp_new();
1595 #if defined(TARGET_PPC64)
1596 TCGv_i32 t1 = tcg_temp_new_i32();
1597 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1598 tcg_gen_rotli_i32(t1, t1, sh);
1599 tcg_gen_extu_i32_i64(t0, t1);
1600 tcg_temp_free_i32(t1);
1602 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1604 #if defined(TARGET_PPC64)
1608 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1611 if (unlikely(Rc(ctx->opcode) != 0))
1612 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1615 /* rlwnm & rlwnm. */
1616 static void gen_rlwnm(DisasContext *ctx)
1620 #if defined(TARGET_PPC64)
1624 mb = MB(ctx->opcode);
1625 me = ME(ctx->opcode);
1626 t0 = tcg_temp_new();
1627 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1628 #if defined(TARGET_PPC64)
1629 t1 = tcg_temp_new_i32();
1630 t2 = tcg_temp_new_i32();
1631 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1632 tcg_gen_trunc_i64_i32(t2, t0);
1633 tcg_gen_rotl_i32(t1, t1, t2);
1634 tcg_gen_extu_i32_i64(t0, t1);
1635 tcg_temp_free_i32(t1);
1636 tcg_temp_free_i32(t2);
1638 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1640 if (unlikely(mb != 0 || me != 31)) {
1641 #if defined(TARGET_PPC64)
1645 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1647 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1650 if (unlikely(Rc(ctx->opcode) != 0))
1651 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1654 #if defined(TARGET_PPC64)
1655 #define GEN_PPC64_R2(name, opc1, opc2) \
1656 static void glue(gen_, name##0)(DisasContext *ctx) \
1658 gen_##name(ctx, 0); \
1661 static void glue(gen_, name##1)(DisasContext *ctx) \
1663 gen_##name(ctx, 1); \
1665 #define GEN_PPC64_R4(name, opc1, opc2) \
1666 static void glue(gen_, name##0)(DisasContext *ctx) \
1668 gen_##name(ctx, 0, 0); \
1671 static void glue(gen_, name##1)(DisasContext *ctx) \
1673 gen_##name(ctx, 0, 1); \
1676 static void glue(gen_, name##2)(DisasContext *ctx) \
1678 gen_##name(ctx, 1, 0); \
1681 static void glue(gen_, name##3)(DisasContext *ctx) \
1683 gen_##name(ctx, 1, 1); \
1686 static inline void gen_rldinm(DisasContext *ctx, uint32_t mb, uint32_t me,
1689 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1690 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1691 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1692 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1694 TCGv t0 = tcg_temp_new();
1695 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1696 if (likely(mb == 0 && me == 63)) {
1697 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1699 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1703 if (unlikely(Rc(ctx->opcode) != 0))
1704 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1706 /* rldicl - rldicl. */
1707 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
1711 sh = SH(ctx->opcode) | (shn << 5);
1712 mb = MB(ctx->opcode) | (mbn << 5);
1713 gen_rldinm(ctx, mb, 63, sh);
1715 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1716 /* rldicr - rldicr. */
1717 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
1721 sh = SH(ctx->opcode) | (shn << 5);
1722 me = MB(ctx->opcode) | (men << 5);
1723 gen_rldinm(ctx, 0, me, sh);
1725 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1726 /* rldic - rldic. */
1727 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
1731 sh = SH(ctx->opcode) | (shn << 5);
1732 mb = MB(ctx->opcode) | (mbn << 5);
1733 gen_rldinm(ctx, mb, 63 - sh, sh);
1735 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1737 static inline void gen_rldnm(DisasContext *ctx, uint32_t mb, uint32_t me)
1741 mb = MB(ctx->opcode);
1742 me = ME(ctx->opcode);
1743 t0 = tcg_temp_new();
1744 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1745 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1746 if (unlikely(mb != 0 || me != 63)) {
1747 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1749 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1752 if (unlikely(Rc(ctx->opcode) != 0))
1753 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1756 /* rldcl - rldcl. */
1757 static inline void gen_rldcl(DisasContext *ctx, int mbn)
1761 mb = MB(ctx->opcode) | (mbn << 5);
1762 gen_rldnm(ctx, mb, 63);
1764 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1765 /* rldcr - rldcr. */
1766 static inline void gen_rldcr(DisasContext *ctx, int men)
1770 me = MB(ctx->opcode) | (men << 5);
1771 gen_rldnm(ctx, 0, me);
1773 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1774 /* rldimi - rldimi. */
1775 static inline void gen_rldimi(DisasContext *ctx, int mbn, int shn)
1777 uint32_t sh, mb, me;
1779 sh = SH(ctx->opcode) | (shn << 5);
1780 mb = MB(ctx->opcode) | (mbn << 5);
1782 if (unlikely(sh == 0 && mb == 0)) {
1783 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1788 t0 = tcg_temp_new();
1789 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1790 t1 = tcg_temp_new();
1791 mask = MASK(mb, me);
1792 tcg_gen_andi_tl(t0, t0, mask);
1793 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1794 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1798 if (unlikely(Rc(ctx->opcode) != 0))
1799 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1801 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1804 /*** Integer shift ***/
1807 static void gen_slw(DisasContext *ctx)
1811 t0 = tcg_temp_new();
1812 /* AND rS with a mask that is 0 when rB >= 0x20 */
1813 #if defined(TARGET_PPC64)
1814 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1815 tcg_gen_sari_tl(t0, t0, 0x3f);
1817 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1818 tcg_gen_sari_tl(t0, t0, 0x1f);
1820 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1821 t1 = tcg_temp_new();
1822 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1823 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1826 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1827 if (unlikely(Rc(ctx->opcode) != 0))
1828 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1832 static void gen_sraw(DisasContext *ctx)
1834 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
1835 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1836 if (unlikely(Rc(ctx->opcode) != 0))
1837 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1840 /* srawi & srawi. */
1841 static void gen_srawi(DisasContext *ctx)
1843 int sh = SH(ctx->opcode);
1847 l1 = gen_new_label();
1848 l2 = gen_new_label();
1849 t0 = tcg_temp_local_new();
1850 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1851 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
1852 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1853 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1854 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1857 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1859 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1860 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh);
1863 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1864 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1866 if (unlikely(Rc(ctx->opcode) != 0))
1867 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1871 static void gen_srw(DisasContext *ctx)
1875 t0 = tcg_temp_new();
1876 /* AND rS with a mask that is 0 when rB >= 0x20 */
1877 #if defined(TARGET_PPC64)
1878 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1879 tcg_gen_sari_tl(t0, t0, 0x3f);
1881 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1882 tcg_gen_sari_tl(t0, t0, 0x1f);
1884 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1885 tcg_gen_ext32u_tl(t0, t0);
1886 t1 = tcg_temp_new();
1887 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1888 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1891 if (unlikely(Rc(ctx->opcode) != 0))
1892 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1895 #if defined(TARGET_PPC64)
1897 static void gen_sld(DisasContext *ctx)
1901 t0 = tcg_temp_new();
1902 /* AND rS with a mask that is 0 when rB >= 0x40 */
1903 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1904 tcg_gen_sari_tl(t0, t0, 0x3f);
1905 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1906 t1 = tcg_temp_new();
1907 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1908 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1911 if (unlikely(Rc(ctx->opcode) != 0))
1912 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1916 static void gen_srad(DisasContext *ctx)
1918 gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
1919 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1920 if (unlikely(Rc(ctx->opcode) != 0))
1921 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1923 /* sradi & sradi. */
1924 static inline void gen_sradi(DisasContext *ctx, int n)
1926 int sh = SH(ctx->opcode) + (n << 5);
1930 l1 = gen_new_label();
1931 l2 = gen_new_label();
1932 t0 = tcg_temp_local_new();
1933 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
1934 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1935 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1936 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1939 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1942 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1944 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1945 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1947 if (unlikely(Rc(ctx->opcode) != 0))
1948 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1951 static void gen_sradi0(DisasContext *ctx)
1956 static void gen_sradi1(DisasContext *ctx)
1962 static void gen_srd(DisasContext *ctx)
1966 t0 = tcg_temp_new();
1967 /* AND rS with a mask that is 0 when rB >= 0x40 */
1968 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1969 tcg_gen_sari_tl(t0, t0, 0x3f);
1970 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1971 t1 = tcg_temp_new();
1972 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1973 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1976 if (unlikely(Rc(ctx->opcode) != 0))
1977 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1981 /*** Floating-Point arithmetic ***/
1982 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1983 static void gen_f##name(DisasContext *ctx) \
1985 if (unlikely(!ctx->fpu_enabled)) { \
1986 gen_exception(ctx, POWERPC_EXCP_FPU); \
1989 /* NIP cannot be restored if the memory exception comes from an helper */ \
1990 gen_update_nip(ctx, ctx->nip - 4); \
1991 gen_reset_fpstatus(); \
1992 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
1993 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
1995 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
1997 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
1998 Rc(ctx->opcode) != 0); \
2001 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2002 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2003 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2005 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2006 static void gen_f##name(DisasContext *ctx) \
2008 if (unlikely(!ctx->fpu_enabled)) { \
2009 gen_exception(ctx, POWERPC_EXCP_FPU); \
2012 /* NIP cannot be restored if the memory exception comes from an helper */ \
2013 gen_update_nip(ctx, ctx->nip - 4); \
2014 gen_reset_fpstatus(); \
2015 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2016 cpu_fpr[rB(ctx->opcode)]); \
2018 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2020 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2021 set_fprf, Rc(ctx->opcode) != 0); \
2023 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2024 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2025 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2027 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2028 static void gen_f##name(DisasContext *ctx) \
2030 if (unlikely(!ctx->fpu_enabled)) { \
2031 gen_exception(ctx, POWERPC_EXCP_FPU); \
2034 /* NIP cannot be restored if the memory exception comes from an helper */ \
2035 gen_update_nip(ctx, ctx->nip - 4); \
2036 gen_reset_fpstatus(); \
2037 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2038 cpu_fpr[rC(ctx->opcode)]); \
2040 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2042 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2043 set_fprf, Rc(ctx->opcode) != 0); \
2045 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2046 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2047 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2049 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2050 static void gen_f##name(DisasContext *ctx) \
2052 if (unlikely(!ctx->fpu_enabled)) { \
2053 gen_exception(ctx, POWERPC_EXCP_FPU); \
2056 /* NIP cannot be restored if the memory exception comes from an helper */ \
2057 gen_update_nip(ctx, ctx->nip - 4); \
2058 gen_reset_fpstatus(); \
2059 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2060 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2061 set_fprf, Rc(ctx->opcode) != 0); \
2064 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2065 static void gen_f##name(DisasContext *ctx) \
2067 if (unlikely(!ctx->fpu_enabled)) { \
2068 gen_exception(ctx, POWERPC_EXCP_FPU); \
2071 /* NIP cannot be restored if the memory exception comes from an helper */ \
2072 gen_update_nip(ctx, ctx->nip - 4); \
2073 gen_reset_fpstatus(); \
2074 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2075 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2076 set_fprf, Rc(ctx->opcode) != 0); \
2080 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2082 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2084 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2087 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2090 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2093 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2096 static void gen_frsqrtes(DisasContext *ctx)
2098 if (unlikely(!ctx->fpu_enabled)) {
2099 gen_exception(ctx, POWERPC_EXCP_FPU);
2102 /* NIP cannot be restored if the memory exception comes from an helper */
2103 gen_update_nip(ctx, ctx->nip - 4);
2104 gen_reset_fpstatus();
2105 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2106 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2107 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2111 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2113 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2117 static void gen_fsqrt(DisasContext *ctx)
2119 if (unlikely(!ctx->fpu_enabled)) {
2120 gen_exception(ctx, POWERPC_EXCP_FPU);
2123 /* NIP cannot be restored if the memory exception comes from an helper */
2124 gen_update_nip(ctx, ctx->nip - 4);
2125 gen_reset_fpstatus();
2126 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2127 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2130 static void gen_fsqrts(DisasContext *ctx)
2132 if (unlikely(!ctx->fpu_enabled)) {
2133 gen_exception(ctx, POWERPC_EXCP_FPU);
2136 /* NIP cannot be restored if the memory exception comes from an helper */
2137 gen_update_nip(ctx, ctx->nip - 4);
2138 gen_reset_fpstatus();
2139 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2140 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2141 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2144 /*** Floating-Point multiply-and-add ***/
2145 /* fmadd - fmadds */
2146 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2147 /* fmsub - fmsubs */
2148 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2149 /* fnmadd - fnmadds */
2150 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2151 /* fnmsub - fnmsubs */
2152 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2154 /*** Floating-Point round & convert ***/
2156 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2158 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2160 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2161 #if defined(TARGET_PPC64)
2163 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2165 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2167 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2171 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2173 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2175 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2177 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2179 /*** Floating-Point compare ***/
2182 static void gen_fcmpo(DisasContext *ctx)
2185 if (unlikely(!ctx->fpu_enabled)) {
2186 gen_exception(ctx, POWERPC_EXCP_FPU);
2189 /* NIP cannot be restored if the memory exception comes from an helper */
2190 gen_update_nip(ctx, ctx->nip - 4);
2191 gen_reset_fpstatus();
2192 crf = tcg_const_i32(crfD(ctx->opcode));
2193 gen_helper_fcmpo(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2194 tcg_temp_free_i32(crf);
2195 gen_helper_float_check_status();
2199 static void gen_fcmpu(DisasContext *ctx)
2202 if (unlikely(!ctx->fpu_enabled)) {
2203 gen_exception(ctx, POWERPC_EXCP_FPU);
2206 /* NIP cannot be restored if the memory exception comes from an helper */
2207 gen_update_nip(ctx, ctx->nip - 4);
2208 gen_reset_fpstatus();
2209 crf = tcg_const_i32(crfD(ctx->opcode));
2210 gen_helper_fcmpu(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2211 tcg_temp_free_i32(crf);
2212 gen_helper_float_check_status();
2215 /*** Floating-point move ***/
2217 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2218 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
2221 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2222 static void gen_fmr(DisasContext *ctx)
2224 if (unlikely(!ctx->fpu_enabled)) {
2225 gen_exception(ctx, POWERPC_EXCP_FPU);
2228 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2229 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2233 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2234 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
2236 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2237 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
2239 /*** Floating-Point status & ctrl register ***/
2242 static void gen_mcrfs(DisasContext *ctx)
2246 if (unlikely(!ctx->fpu_enabled)) {
2247 gen_exception(ctx, POWERPC_EXCP_FPU);
2250 bfa = 4 * (7 - crfS(ctx->opcode));
2251 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa);
2252 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2253 tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2257 static void gen_mffs(DisasContext *ctx)
2259 if (unlikely(!ctx->fpu_enabled)) {
2260 gen_exception(ctx, POWERPC_EXCP_FPU);
2263 gen_reset_fpstatus();
2264 tcg_gen_extu_i32_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2265 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2269 static void gen_mtfsb0(DisasContext *ctx)
2273 if (unlikely(!ctx->fpu_enabled)) {
2274 gen_exception(ctx, POWERPC_EXCP_FPU);
2277 crb = 31 - crbD(ctx->opcode);
2278 gen_reset_fpstatus();
2279 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
2281 /* NIP cannot be restored if the memory exception comes from an helper */
2282 gen_update_nip(ctx, ctx->nip - 4);
2283 t0 = tcg_const_i32(crb);
2284 gen_helper_fpscr_clrbit(t0);
2285 tcg_temp_free_i32(t0);
2287 if (unlikely(Rc(ctx->opcode) != 0)) {
2288 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2293 static void gen_mtfsb1(DisasContext *ctx)
2297 if (unlikely(!ctx->fpu_enabled)) {
2298 gen_exception(ctx, POWERPC_EXCP_FPU);
2301 crb = 31 - crbD(ctx->opcode);
2302 gen_reset_fpstatus();
2303 /* XXX: we pretend we can only do IEEE floating-point computations */
2304 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2306 /* NIP cannot be restored if the memory exception comes from an helper */
2307 gen_update_nip(ctx, ctx->nip - 4);
2308 t0 = tcg_const_i32(crb);
2309 gen_helper_fpscr_setbit(t0);
2310 tcg_temp_free_i32(t0);
2312 if (unlikely(Rc(ctx->opcode) != 0)) {
2313 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2315 /* We can raise a differed exception */
2316 gen_helper_float_check_status();
2320 static void gen_mtfsf(DisasContext *ctx)
2323 int L = ctx->opcode & 0x02000000;
2325 if (unlikely(!ctx->fpu_enabled)) {
2326 gen_exception(ctx, POWERPC_EXCP_FPU);
2329 /* NIP cannot be restored if the memory exception comes from an helper */
2330 gen_update_nip(ctx, ctx->nip - 4);
2331 gen_reset_fpstatus();
2333 t0 = tcg_const_i32(0xff);
2335 t0 = tcg_const_i32(FM(ctx->opcode));
2336 gen_helper_store_fpscr(cpu_fpr[rB(ctx->opcode)], t0);
2337 tcg_temp_free_i32(t0);
2338 if (unlikely(Rc(ctx->opcode) != 0)) {
2339 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2341 /* We can raise a differed exception */
2342 gen_helper_float_check_status();
2346 static void gen_mtfsfi(DisasContext *ctx)
2352 if (unlikely(!ctx->fpu_enabled)) {
2353 gen_exception(ctx, POWERPC_EXCP_FPU);
2356 bf = crbD(ctx->opcode) >> 2;
2358 /* NIP cannot be restored if the memory exception comes from an helper */
2359 gen_update_nip(ctx, ctx->nip - 4);
2360 gen_reset_fpstatus();
2361 t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh));
2362 t1 = tcg_const_i32(1 << sh);
2363 gen_helper_store_fpscr(t0, t1);
2364 tcg_temp_free_i64(t0);
2365 tcg_temp_free_i32(t1);
2366 if (unlikely(Rc(ctx->opcode) != 0)) {
2367 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2369 /* We can raise a differed exception */
2370 gen_helper_float_check_status();
2373 /*** Addressing modes ***/
2374 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2375 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2378 target_long simm = SIMM(ctx->opcode);
2381 if (rA(ctx->opcode) == 0) {
2382 #if defined(TARGET_PPC64)
2383 if (!ctx->sf_mode) {
2384 tcg_gen_movi_tl(EA, (uint32_t)simm);
2387 tcg_gen_movi_tl(EA, simm);
2388 } else if (likely(simm != 0)) {
2389 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2390 #if defined(TARGET_PPC64)
2391 if (!ctx->sf_mode) {
2392 tcg_gen_ext32u_tl(EA, EA);
2396 #if defined(TARGET_PPC64)
2397 if (!ctx->sf_mode) {
2398 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2401 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2405 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2407 if (rA(ctx->opcode) == 0) {
2408 #if defined(TARGET_PPC64)
2409 if (!ctx->sf_mode) {
2410 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2413 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2415 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2416 #if defined(TARGET_PPC64)
2417 if (!ctx->sf_mode) {
2418 tcg_gen_ext32u_tl(EA, EA);
2424 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2426 if (rA(ctx->opcode) == 0) {
2427 tcg_gen_movi_tl(EA, 0);
2429 #if defined(TARGET_PPC64)
2430 if (!ctx->sf_mode) {
2431 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2434 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2438 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2441 tcg_gen_addi_tl(ret, arg1, val);
2442 #if defined(TARGET_PPC64)
2443 if (!ctx->sf_mode) {
2444 tcg_gen_ext32u_tl(ret, ret);
2449 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2451 int l1 = gen_new_label();
2452 TCGv t0 = tcg_temp_new();
2454 /* NIP cannot be restored if the memory exception comes from an helper */
2455 gen_update_nip(ctx, ctx->nip - 4);
2456 tcg_gen_andi_tl(t0, EA, mask);
2457 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2458 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2459 t2 = tcg_const_i32(0);
2460 gen_helper_raise_exception_err(t1, t2);
2461 tcg_temp_free_i32(t1);
2462 tcg_temp_free_i32(t2);
2467 /*** Integer load ***/
2468 static inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2470 tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx);
2473 static inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2475 tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx);
2478 static inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2480 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2481 if (unlikely(ctx->le_mode)) {
2482 tcg_gen_bswap16_tl(arg1, arg1);
2486 static inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2488 if (unlikely(ctx->le_mode)) {
2489 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2490 tcg_gen_bswap16_tl(arg1, arg1);
2491 tcg_gen_ext16s_tl(arg1, arg1);
2493 tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx);
2497 static inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2499 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2500 if (unlikely(ctx->le_mode)) {
2501 tcg_gen_bswap32_tl(arg1, arg1);
2505 #if defined(TARGET_PPC64)
2506 static inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2508 if (unlikely(ctx->le_mode)) {
2509 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2510 tcg_gen_bswap32_tl(arg1, arg1);
2511 tcg_gen_ext32s_tl(arg1, arg1);
2513 tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx);
2517 static inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2519 tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2520 if (unlikely(ctx->le_mode)) {
2521 tcg_gen_bswap64_i64(arg1, arg1);
2525 static inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2)
2527 tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx);
2530 static inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2)
2532 if (unlikely(ctx->le_mode)) {
2533 TCGv t0 = tcg_temp_new();
2534 tcg_gen_ext16u_tl(t0, arg1);
2535 tcg_gen_bswap16_tl(t0, t0);
2536 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2539 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2543 static inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2)
2545 if (unlikely(ctx->le_mode)) {
2546 TCGv t0 = tcg_temp_new();
2547 tcg_gen_ext32u_tl(t0, arg1);
2548 tcg_gen_bswap32_tl(t0, t0);
2549 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2552 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2556 static inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2558 if (unlikely(ctx->le_mode)) {
2559 TCGv_i64 t0 = tcg_temp_new_i64();
2560 tcg_gen_bswap64_i64(t0, arg1);
2561 tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2562 tcg_temp_free_i64(t0);
2564 tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2567 #define GEN_LD(name, ldop, opc, type) \
2568 static void glue(gen_, name)(DisasContext *ctx) \
2571 gen_set_access_type(ctx, ACCESS_INT); \
2572 EA = tcg_temp_new(); \
2573 gen_addr_imm_index(ctx, EA, 0); \
2574 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2575 tcg_temp_free(EA); \
2578 #define GEN_LDU(name, ldop, opc, type) \
2579 static void glue(gen_, name##u)(DisasContext *ctx) \
2582 if (unlikely(rA(ctx->opcode) == 0 || \
2583 rA(ctx->opcode) == rD(ctx->opcode))) { \
2584 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2587 gen_set_access_type(ctx, ACCESS_INT); \
2588 EA = tcg_temp_new(); \
2589 if (type == PPC_64B) \
2590 gen_addr_imm_index(ctx, EA, 0x03); \
2592 gen_addr_imm_index(ctx, EA, 0); \
2593 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2594 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2595 tcg_temp_free(EA); \
2598 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2599 static void glue(gen_, name##ux)(DisasContext *ctx) \
2602 if (unlikely(rA(ctx->opcode) == 0 || \
2603 rA(ctx->opcode) == rD(ctx->opcode))) { \
2604 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2607 gen_set_access_type(ctx, ACCESS_INT); \
2608 EA = tcg_temp_new(); \
2609 gen_addr_reg_index(ctx, EA); \
2610 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2611 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2612 tcg_temp_free(EA); \
2615 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2616 static void glue(gen_, name##x)(DisasContext *ctx) \
2619 gen_set_access_type(ctx, ACCESS_INT); \
2620 EA = tcg_temp_new(); \
2621 gen_addr_reg_index(ctx, EA); \
2622 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2623 tcg_temp_free(EA); \
2626 #define GEN_LDS(name, ldop, op, type) \
2627 GEN_LD(name, ldop, op | 0x20, type); \
2628 GEN_LDU(name, ldop, op | 0x21, type); \
2629 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2630 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2632 /* lbz lbzu lbzux lbzx */
2633 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2634 /* lha lhau lhaux lhax */
2635 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2636 /* lhz lhzu lhzux lhzx */
2637 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2638 /* lwz lwzu lwzux lwzx */
2639 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2640 #if defined(TARGET_PPC64)
2642 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2644 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2646 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2648 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2650 static void gen_ld(DisasContext *ctx)
2653 if (Rc(ctx->opcode)) {
2654 if (unlikely(rA(ctx->opcode) == 0 ||
2655 rA(ctx->opcode) == rD(ctx->opcode))) {
2656 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2660 gen_set_access_type(ctx, ACCESS_INT);
2661 EA = tcg_temp_new();
2662 gen_addr_imm_index(ctx, EA, 0x03);
2663 if (ctx->opcode & 0x02) {
2664 /* lwa (lwau is undefined) */
2665 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2668 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2670 if (Rc(ctx->opcode))
2671 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2676 static void gen_lq(DisasContext *ctx)
2678 #if defined(CONFIG_USER_ONLY)
2679 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2684 /* Restore CPU state */
2685 if (unlikely(ctx->mem_idx == 0)) {
2686 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2689 ra = rA(ctx->opcode);
2690 rd = rD(ctx->opcode);
2691 if (unlikely((rd & 1) || rd == ra)) {
2692 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2695 if (unlikely(ctx->le_mode)) {
2696 /* Little-endian mode is not handled */
2697 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2700 gen_set_access_type(ctx, ACCESS_INT);
2701 EA = tcg_temp_new();
2702 gen_addr_imm_index(ctx, EA, 0x0F);
2703 gen_qemu_ld64(ctx, cpu_gpr[rd], EA);
2704 gen_addr_add(ctx, EA, EA, 8);
2705 gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA);
2711 /*** Integer store ***/
2712 #define GEN_ST(name, stop, opc, type) \
2713 static void glue(gen_, name)(DisasContext *ctx) \
2716 gen_set_access_type(ctx, ACCESS_INT); \
2717 EA = tcg_temp_new(); \
2718 gen_addr_imm_index(ctx, EA, 0); \
2719 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2720 tcg_temp_free(EA); \
2723 #define GEN_STU(name, stop, opc, type) \
2724 static void glue(gen_, stop##u)(DisasContext *ctx) \
2727 if (unlikely(rA(ctx->opcode) == 0)) { \
2728 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2731 gen_set_access_type(ctx, ACCESS_INT); \
2732 EA = tcg_temp_new(); \
2733 if (type == PPC_64B) \
2734 gen_addr_imm_index(ctx, EA, 0x03); \
2736 gen_addr_imm_index(ctx, EA, 0); \
2737 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2738 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2739 tcg_temp_free(EA); \
2742 #define GEN_STUX(name, stop, opc2, opc3, type) \
2743 static void glue(gen_, name##ux)(DisasContext *ctx) \
2746 if (unlikely(rA(ctx->opcode) == 0)) { \
2747 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2750 gen_set_access_type(ctx, ACCESS_INT); \
2751 EA = tcg_temp_new(); \
2752 gen_addr_reg_index(ctx, EA); \
2753 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2754 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2755 tcg_temp_free(EA); \
2758 #define GEN_STX(name, stop, opc2, opc3, type) \
2759 static void glue(gen_, name##x)(DisasContext *ctx) \
2762 gen_set_access_type(ctx, ACCESS_INT); \
2763 EA = tcg_temp_new(); \
2764 gen_addr_reg_index(ctx, EA); \
2765 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2766 tcg_temp_free(EA); \
2769 #define GEN_STS(name, stop, op, type) \
2770 GEN_ST(name, stop, op | 0x20, type); \
2771 GEN_STU(name, stop, op | 0x21, type); \
2772 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2773 GEN_STX(name, stop, 0x17, op | 0x00, type)
2775 /* stb stbu stbux stbx */
2776 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2777 /* sth sthu sthux sthx */
2778 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2779 /* stw stwu stwux stwx */
2780 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2781 #if defined(TARGET_PPC64)
2782 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2783 GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2785 static void gen_std(DisasContext *ctx)
2790 rs = rS(ctx->opcode);
2791 if ((ctx->opcode & 0x3) == 0x2) {
2792 #if defined(CONFIG_USER_ONLY)
2793 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2796 if (unlikely(ctx->mem_idx == 0)) {
2797 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2800 if (unlikely(rs & 1)) {
2801 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2804 if (unlikely(ctx->le_mode)) {
2805 /* Little-endian mode is not handled */
2806 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2809 gen_set_access_type(ctx, ACCESS_INT);
2810 EA = tcg_temp_new();
2811 gen_addr_imm_index(ctx, EA, 0x03);
2812 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2813 gen_addr_add(ctx, EA, EA, 8);
2814 gen_qemu_st64(ctx, cpu_gpr[rs+1], EA);
2819 if (Rc(ctx->opcode)) {
2820 if (unlikely(rA(ctx->opcode) == 0)) {
2821 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2825 gen_set_access_type(ctx, ACCESS_INT);
2826 EA = tcg_temp_new();
2827 gen_addr_imm_index(ctx, EA, 0x03);
2828 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2829 if (Rc(ctx->opcode))
2830 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2835 /*** Integer load and store with byte reverse ***/
2837 static inline void gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2839 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2840 if (likely(!ctx->le_mode)) {
2841 tcg_gen_bswap16_tl(arg1, arg1);
2844 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2847 static inline void gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2849 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2850 if (likely(!ctx->le_mode)) {
2851 tcg_gen_bswap32_tl(arg1, arg1);
2854 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2857 static inline void gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2859 if (likely(!ctx->le_mode)) {
2860 TCGv t0 = tcg_temp_new();
2861 tcg_gen_ext16u_tl(t0, arg1);
2862 tcg_gen_bswap16_tl(t0, t0);
2863 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2866 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2869 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2872 static inline void gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2874 if (likely(!ctx->le_mode)) {
2875 TCGv t0 = tcg_temp_new();
2876 tcg_gen_ext32u_tl(t0, arg1);
2877 tcg_gen_bswap32_tl(t0, t0);
2878 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2881 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2884 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2886 /*** Integer load and store multiple ***/
2889 static void gen_lmw(DisasContext *ctx)
2893 gen_set_access_type(ctx, ACCESS_INT);
2894 /* NIP cannot be restored if the memory exception comes from an helper */
2895 gen_update_nip(ctx, ctx->nip - 4);
2896 t0 = tcg_temp_new();
2897 t1 = tcg_const_i32(rD(ctx->opcode));
2898 gen_addr_imm_index(ctx, t0, 0);
2899 gen_helper_lmw(t0, t1);
2901 tcg_temp_free_i32(t1);
2905 static void gen_stmw(DisasContext *ctx)
2909 gen_set_access_type(ctx, ACCESS_INT);
2910 /* NIP cannot be restored if the memory exception comes from an helper */
2911 gen_update_nip(ctx, ctx->nip - 4);
2912 t0 = tcg_temp_new();
2913 t1 = tcg_const_i32(rS(ctx->opcode));
2914 gen_addr_imm_index(ctx, t0, 0);
2915 gen_helper_stmw(t0, t1);
2917 tcg_temp_free_i32(t1);
2920 /*** Integer load and store strings ***/
2923 /* PowerPC32 specification says we must generate an exception if
2924 * rA is in the range of registers to be loaded.
2925 * In an other hand, IBM says this is valid, but rA won't be loaded.
2926 * For now, I'll follow the spec...
2928 static void gen_lswi(DisasContext *ctx)
2932 int nb = NB(ctx->opcode);
2933 int start = rD(ctx->opcode);
2934 int ra = rA(ctx->opcode);
2940 if (unlikely(((start + nr) > 32 &&
2941 start <= ra && (start + nr - 32) > ra) ||
2942 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
2943 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2946 gen_set_access_type(ctx, ACCESS_INT);
2947 /* NIP cannot be restored if the memory exception comes from an helper */
2948 gen_update_nip(ctx, ctx->nip - 4);
2949 t0 = tcg_temp_new();
2950 gen_addr_register(ctx, t0);
2951 t1 = tcg_const_i32(nb);
2952 t2 = tcg_const_i32(start);
2953 gen_helper_lsw(t0, t1, t2);
2955 tcg_temp_free_i32(t1);
2956 tcg_temp_free_i32(t2);
2960 static void gen_lswx(DisasContext *ctx)
2963 TCGv_i32 t1, t2, t3;
2964 gen_set_access_type(ctx, ACCESS_INT);
2965 /* NIP cannot be restored if the memory exception comes from an helper */
2966 gen_update_nip(ctx, ctx->nip - 4);
2967 t0 = tcg_temp_new();
2968 gen_addr_reg_index(ctx, t0);
2969 t1 = tcg_const_i32(rD(ctx->opcode));
2970 t2 = tcg_const_i32(rA(ctx->opcode));
2971 t3 = tcg_const_i32(rB(ctx->opcode));
2972 gen_helper_lswx(t0, t1, t2, t3);
2974 tcg_temp_free_i32(t1);
2975 tcg_temp_free_i32(t2);
2976 tcg_temp_free_i32(t3);
2980 static void gen_stswi(DisasContext *ctx)
2984 int nb = NB(ctx->opcode);
2985 gen_set_access_type(ctx, ACCESS_INT);
2986 /* NIP cannot be restored if the memory exception comes from an helper */
2987 gen_update_nip(ctx, ctx->nip - 4);
2988 t0 = tcg_temp_new();
2989 gen_addr_register(ctx, t0);
2992 t1 = tcg_const_i32(nb);
2993 t2 = tcg_const_i32(rS(ctx->opcode));
2994 gen_helper_stsw(t0, t1, t2);
2996 tcg_temp_free_i32(t1);
2997 tcg_temp_free_i32(t2);
3001 static void gen_stswx(DisasContext *ctx)
3005 gen_set_access_type(ctx, ACCESS_INT);
3006 /* NIP cannot be restored if the memory exception comes from an helper */
3007 gen_update_nip(ctx, ctx->nip - 4);
3008 t0 = tcg_temp_new();
3009 gen_addr_reg_index(ctx, t0);
3010 t1 = tcg_temp_new_i32();
3011 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3012 tcg_gen_andi_i32(t1, t1, 0x7F);
3013 t2 = tcg_const_i32(rS(ctx->opcode));
3014 gen_helper_stsw(t0, t1, t2);
3016 tcg_temp_free_i32(t1);
3017 tcg_temp_free_i32(t2);
3020 /*** Memory synchronisation ***/
3022 static void gen_eieio(DisasContext *ctx)
3027 static void gen_isync(DisasContext *ctx)
3029 gen_stop_exception(ctx);
3033 static void gen_lwarx(DisasContext *ctx)
3036 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3037 gen_set_access_type(ctx, ACCESS_RES);
3038 t0 = tcg_temp_local_new();
3039 gen_addr_reg_index(ctx, t0);
3040 gen_check_align(ctx, t0, 0x03);
3041 gen_qemu_ld32u(ctx, gpr, t0);
3042 tcg_gen_mov_tl(cpu_reserve, t0);
3043 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3047 #if defined(CONFIG_USER_ONLY)
3048 static void gen_conditional_store (DisasContext *ctx, TCGv EA,
3051 TCGv t0 = tcg_temp_new();
3052 uint32_t save_exception = ctx->exception;
3054 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUState, reserve_ea));
3055 tcg_gen_movi_tl(t0, (size << 5) | reg);
3056 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, reserve_info));
3058 gen_update_nip(ctx, ctx->nip-4);
3059 ctx->exception = POWERPC_EXCP_BRANCH;
3060 gen_exception(ctx, POWERPC_EXCP_STCX);
3061 ctx->exception = save_exception;
3066 static void gen_stwcx_(DisasContext *ctx)
3069 gen_set_access_type(ctx, ACCESS_RES);
3070 t0 = tcg_temp_local_new();
3071 gen_addr_reg_index(ctx, t0);
3072 gen_check_align(ctx, t0, 0x03);
3073 #if defined(CONFIG_USER_ONLY)
3074 gen_conditional_store(ctx, t0, rS(ctx->opcode), 4);
3079 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3080 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3081 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3082 l1 = gen_new_label();
3083 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3084 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3085 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3087 tcg_gen_movi_tl(cpu_reserve, -1);
3093 #if defined(TARGET_PPC64)
3095 static void gen_ldarx(DisasContext *ctx)
3098 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3099 gen_set_access_type(ctx, ACCESS_RES);
3100 t0 = tcg_temp_local_new();
3101 gen_addr_reg_index(ctx, t0);
3102 gen_check_align(ctx, t0, 0x07);
3103 gen_qemu_ld64(ctx, gpr, t0);
3104 tcg_gen_mov_tl(cpu_reserve, t0);
3105 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3110 static void gen_stdcx_(DisasContext *ctx)
3113 gen_set_access_type(ctx, ACCESS_RES);
3114 t0 = tcg_temp_local_new();
3115 gen_addr_reg_index(ctx, t0);
3116 gen_check_align(ctx, t0, 0x07);
3117 #if defined(CONFIG_USER_ONLY)
3118 gen_conditional_store(ctx, t0, rS(ctx->opcode), 8);
3122 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3123 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3124 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3125 l1 = gen_new_label();
3126 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3127 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3128 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3130 tcg_gen_movi_tl(cpu_reserve, -1);
3135 #endif /* defined(TARGET_PPC64) */
3138 static void gen_sync(DisasContext *ctx)
3143 static void gen_wait(DisasContext *ctx)
3145 TCGv_i32 t0 = tcg_temp_new_i32();
3146 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUState, halted));
3147 tcg_temp_free_i32(t0);
3148 /* Stop translation, as the CPU is supposed to sleep from now */
3149 gen_exception_err(ctx, EXCP_HLT, 1);
3152 /*** Floating-point load ***/
3153 #define GEN_LDF(name, ldop, opc, type) \
3154 static void glue(gen_, name)(DisasContext *ctx) \
3157 if (unlikely(!ctx->fpu_enabled)) { \
3158 gen_exception(ctx, POWERPC_EXCP_FPU); \
3161 gen_set_access_type(ctx, ACCESS_FLOAT); \
3162 EA = tcg_temp_new(); \
3163 gen_addr_imm_index(ctx, EA, 0); \
3164 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3165 tcg_temp_free(EA); \
3168 #define GEN_LDUF(name, ldop, opc, type) \
3169 static void glue(gen_, name##u)(DisasContext *ctx) \
3172 if (unlikely(!ctx->fpu_enabled)) { \
3173 gen_exception(ctx, POWERPC_EXCP_FPU); \
3176 if (unlikely(rA(ctx->opcode) == 0)) { \
3177 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3180 gen_set_access_type(ctx, ACCESS_FLOAT); \
3181 EA = tcg_temp_new(); \
3182 gen_addr_imm_index(ctx, EA, 0); \
3183 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3184 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3185 tcg_temp_free(EA); \
3188 #define GEN_LDUXF(name, ldop, opc, type) \
3189 static void glue(gen_, name##ux)(DisasContext *ctx) \
3192 if (unlikely(!ctx->fpu_enabled)) { \
3193 gen_exception(ctx, POWERPC_EXCP_FPU); \
3196 if (unlikely(rA(ctx->opcode) == 0)) { \
3197 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3200 gen_set_access_type(ctx, ACCESS_FLOAT); \
3201 EA = tcg_temp_new(); \
3202 gen_addr_reg_index(ctx, EA); \
3203 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3204 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3205 tcg_temp_free(EA); \
3208 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3209 static void glue(gen_, name##x)(DisasContext *ctx) \
3212 if (unlikely(!ctx->fpu_enabled)) { \
3213 gen_exception(ctx, POWERPC_EXCP_FPU); \
3216 gen_set_access_type(ctx, ACCESS_FLOAT); \
3217 EA = tcg_temp_new(); \
3218 gen_addr_reg_index(ctx, EA); \
3219 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3220 tcg_temp_free(EA); \
3223 #define GEN_LDFS(name, ldop, op, type) \
3224 GEN_LDF(name, ldop, op | 0x20, type); \
3225 GEN_LDUF(name, ldop, op | 0x21, type); \
3226 GEN_LDUXF(name, ldop, op | 0x01, type); \
3227 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3229 static inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3231 TCGv t0 = tcg_temp_new();
3232 TCGv_i32 t1 = tcg_temp_new_i32();
3233 gen_qemu_ld32u(ctx, t0, arg2);
3234 tcg_gen_trunc_tl_i32(t1, t0);
3236 gen_helper_float32_to_float64(arg1, t1);
3237 tcg_temp_free_i32(t1);
3240 /* lfd lfdu lfdux lfdx */
3241 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3242 /* lfs lfsu lfsux lfsx */
3243 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3245 /*** Floating-point store ***/
3246 #define GEN_STF(name, stop, opc, type) \
3247 static void glue(gen_, name)(DisasContext *ctx) \
3250 if (unlikely(!ctx->fpu_enabled)) { \
3251 gen_exception(ctx, POWERPC_EXCP_FPU); \
3254 gen_set_access_type(ctx, ACCESS_FLOAT); \
3255 EA = tcg_temp_new(); \
3256 gen_addr_imm_index(ctx, EA, 0); \
3257 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3258 tcg_temp_free(EA); \
3261 #define GEN_STUF(name, stop, opc, type) \
3262 static void glue(gen_, name##u)(DisasContext *ctx) \
3265 if (unlikely(!ctx->fpu_enabled)) { \
3266 gen_exception(ctx, POWERPC_EXCP_FPU); \
3269 if (unlikely(rA(ctx->opcode) == 0)) { \
3270 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3273 gen_set_access_type(ctx, ACCESS_FLOAT); \
3274 EA = tcg_temp_new(); \
3275 gen_addr_imm_index(ctx, EA, 0); \
3276 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3277 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3278 tcg_temp_free(EA); \
3281 #define GEN_STUXF(name, stop, opc, type) \
3282 static void glue(gen_, name##ux)(DisasContext *ctx) \
3285 if (unlikely(!ctx->fpu_enabled)) { \
3286 gen_exception(ctx, POWERPC_EXCP_FPU); \
3289 if (unlikely(rA(ctx->opcode) == 0)) { \
3290 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3293 gen_set_access_type(ctx, ACCESS_FLOAT); \
3294 EA = tcg_temp_new(); \
3295 gen_addr_reg_index(ctx, EA); \
3296 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3297 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3298 tcg_temp_free(EA); \
3301 #define GEN_STXF(name, stop, opc2, opc3, type) \
3302 static void glue(gen_, name##x)(DisasContext *ctx) \
3305 if (unlikely(!ctx->fpu_enabled)) { \
3306 gen_exception(ctx, POWERPC_EXCP_FPU); \
3309 gen_set_access_type(ctx, ACCESS_FLOAT); \
3310 EA = tcg_temp_new(); \
3311 gen_addr_reg_index(ctx, EA); \
3312 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3313 tcg_temp_free(EA); \
3316 #define GEN_STFS(name, stop, op, type) \
3317 GEN_STF(name, stop, op | 0x20, type); \
3318 GEN_STUF(name, stop, op | 0x21, type); \
3319 GEN_STUXF(name, stop, op | 0x01, type); \
3320 GEN_STXF(name, stop, 0x17, op | 0x00, type)
3322 static inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3324 TCGv_i32 t0 = tcg_temp_new_i32();
3325 TCGv t1 = tcg_temp_new();
3326 gen_helper_float64_to_float32(t0, arg1);
3327 tcg_gen_extu_i32_tl(t1, t0);
3328 tcg_temp_free_i32(t0);
3329 gen_qemu_st32(ctx, t1, arg2);
3333 /* stfd stfdu stfdux stfdx */
3334 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3335 /* stfs stfsu stfsux stfsx */
3336 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3339 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3341 TCGv t0 = tcg_temp_new();
3342 tcg_gen_trunc_i64_tl(t0, arg1),
3343 gen_qemu_st32(ctx, t0, arg2);
3347 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3350 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3352 TranslationBlock *tb;
3354 #if defined(TARGET_PPC64)
3356 dest = (uint32_t) dest;
3358 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3359 likely(!ctx->singlestep_enabled)) {
3361 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3362 tcg_gen_exit_tb((tcg_target_long)tb + n);
3364 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3365 if (unlikely(ctx->singlestep_enabled)) {
3366 if ((ctx->singlestep_enabled &
3367 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3368 ctx->exception == POWERPC_EXCP_BRANCH) {
3369 target_ulong tmp = ctx->nip;
3371 gen_exception(ctx, POWERPC_EXCP_TRACE);
3374 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3375 gen_debug_exception(ctx);
3382 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3384 #if defined(TARGET_PPC64)
3385 if (ctx->sf_mode == 0)
3386 tcg_gen_movi_tl(cpu_lr, (uint32_t)nip);
3389 tcg_gen_movi_tl(cpu_lr, nip);
3393 static void gen_b(DisasContext *ctx)
3395 target_ulong li, target;
3397 ctx->exception = POWERPC_EXCP_BRANCH;
3398 /* sign extend LI */
3399 #if defined(TARGET_PPC64)
3401 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
3404 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
3405 if (likely(AA(ctx->opcode) == 0))
3406 target = ctx->nip + li - 4;
3409 if (LK(ctx->opcode))
3410 gen_setlr(ctx, ctx->nip);
3411 gen_goto_tb(ctx, 0, target);
3418 static inline void gen_bcond(DisasContext *ctx, int type)
3420 uint32_t bo = BO(ctx->opcode);
3424 ctx->exception = POWERPC_EXCP_BRANCH;
3425 if (type == BCOND_LR || type == BCOND_CTR) {
3426 target = tcg_temp_local_new();
3427 if (type == BCOND_CTR)
3428 tcg_gen_mov_tl(target, cpu_ctr);
3430 tcg_gen_mov_tl(target, cpu_lr);
3432 TCGV_UNUSED(target);
3434 if (LK(ctx->opcode))
3435 gen_setlr(ctx, ctx->nip);
3436 l1 = gen_new_label();
3437 if ((bo & 0x4) == 0) {
3438 /* Decrement and test CTR */
3439 TCGv temp = tcg_temp_new();
3440 if (unlikely(type == BCOND_CTR)) {
3441 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3444 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3445 #if defined(TARGET_PPC64)
3447 tcg_gen_ext32u_tl(temp, cpu_ctr);
3450 tcg_gen_mov_tl(temp, cpu_ctr);
3452 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3454 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3456 tcg_temp_free(temp);
3458 if ((bo & 0x10) == 0) {
3460 uint32_t bi = BI(ctx->opcode);
3461 uint32_t mask = 1 << (3 - (bi & 0x03));
3462 TCGv_i32 temp = tcg_temp_new_i32();
3465 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3466 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3468 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3469 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3471 tcg_temp_free_i32(temp);
3473 if (type == BCOND_IM) {
3474 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3475 if (likely(AA(ctx->opcode) == 0)) {
3476 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3478 gen_goto_tb(ctx, 0, li);
3481 gen_goto_tb(ctx, 1, ctx->nip);
3483 #if defined(TARGET_PPC64)
3484 if (!(ctx->sf_mode))
3485 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3488 tcg_gen_andi_tl(cpu_nip, target, ~3);
3491 #if defined(TARGET_PPC64)
3492 if (!(ctx->sf_mode))
3493 tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip);
3496 tcg_gen_movi_tl(cpu_nip, ctx->nip);
3501 static void gen_bc(DisasContext *ctx)
3503 gen_bcond(ctx, BCOND_IM);
3506 static void gen_bcctr(DisasContext *ctx)
3508 gen_bcond(ctx, BCOND_CTR);
3511 static void gen_bclr(DisasContext *ctx)
3513 gen_bcond(ctx, BCOND_LR);
3516 /*** Condition register logical ***/
3517 #define GEN_CRLOGIC(name, tcg_op, opc) \
3518 static void glue(gen_, name)(DisasContext *ctx) \
3523 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3524 t0 = tcg_temp_new_i32(); \
3526 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3528 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3530 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3531 t1 = tcg_temp_new_i32(); \
3532 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3534 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3536 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3538 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3539 tcg_op(t0, t0, t1); \
3540 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3541 tcg_gen_andi_i32(t0, t0, bitmask); \
3542 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3543 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3544 tcg_temp_free_i32(t0); \
3545 tcg_temp_free_i32(t1); \
3549 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3551 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3553 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3555 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3557 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3559 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3561 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3563 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3566 static void gen_mcrf(DisasContext *ctx)
3568 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3571 /*** System linkage ***/
3573 /* rfi (mem_idx only) */
3574 static void gen_rfi(DisasContext *ctx)
3576 #if defined(CONFIG_USER_ONLY)
3577 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3579 /* Restore CPU state */
3580 if (unlikely(!ctx->mem_idx)) {
3581 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3585 gen_sync_exception(ctx);
3589 #if defined(TARGET_PPC64)
3590 static void gen_rfid(DisasContext *ctx)
3592 #if defined(CONFIG_USER_ONLY)
3593 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3595 /* Restore CPU state */
3596 if (unlikely(!ctx->mem_idx)) {
3597 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3601 gen_sync_exception(ctx);
3605 static void gen_hrfid(DisasContext *ctx)
3607 #if defined(CONFIG_USER_ONLY)
3608 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3610 /* Restore CPU state */
3611 if (unlikely(ctx->mem_idx <= 1)) {
3612 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3616 gen_sync_exception(ctx);
3622 #if defined(CONFIG_USER_ONLY)
3623 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3625 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3627 static void gen_sc(DisasContext *ctx)
3631 lev = (ctx->opcode >> 5) & 0x7F;
3632 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3638 static void gen_tw(DisasContext *ctx)
3640 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3641 /* Update the nip since this might generate a trap exception */
3642 gen_update_nip(ctx, ctx->nip);
3643 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3644 tcg_temp_free_i32(t0);
3648 static void gen_twi(DisasContext *ctx)
3650 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3651 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3652 /* Update the nip since this might generate a trap exception */
3653 gen_update_nip(ctx, ctx->nip);
3654 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], t0, t1);
3656 tcg_temp_free_i32(t1);
3659 #if defined(TARGET_PPC64)
3661 static void gen_td(DisasContext *ctx)
3663 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3664 /* Update the nip since this might generate a trap exception */
3665 gen_update_nip(ctx, ctx->nip);
3666 gen_helper_td(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3667 tcg_temp_free_i32(t0);
3671 static void gen_tdi(DisasContext *ctx)
3673 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3674 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3675 /* Update the nip since this might generate a trap exception */
3676 gen_update_nip(ctx, ctx->nip);
3677 gen_helper_td(cpu_gpr[rA(ctx->opcode)], t0, t1);
3679 tcg_temp_free_i32(t1);
3683 /*** Processor control ***/
3686 static void gen_mcrxr(DisasContext *ctx)
3688 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer);
3689 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA);
3690 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA));
3694 static void gen_mfcr(DisasContext *ctx)
3698 if (likely(ctx->opcode & 0x00100000)) {
3699 crm = CRM(ctx->opcode);
3700 if (likely(crm && ((crm & (crm - 1)) == 0))) {
3702 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3703 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
3704 cpu_gpr[rD(ctx->opcode)], crn * 4);
3707 TCGv_i32 t0 = tcg_temp_new_i32();
3708 tcg_gen_mov_i32(t0, cpu_crf[0]);
3709 tcg_gen_shli_i32(t0, t0, 4);
3710 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
3711 tcg_gen_shli_i32(t0, t0, 4);
3712 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
3713 tcg_gen_shli_i32(t0, t0, 4);
3714 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
3715 tcg_gen_shli_i32(t0, t0, 4);
3716 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
3717 tcg_gen_shli_i32(t0, t0, 4);
3718 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
3719 tcg_gen_shli_i32(t0, t0, 4);
3720 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
3721 tcg_gen_shli_i32(t0, t0, 4);
3722 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
3723 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
3724 tcg_temp_free_i32(t0);
3729 static void gen_mfmsr(DisasContext *ctx)
3731 #if defined(CONFIG_USER_ONLY)
3732 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3734 if (unlikely(!ctx->mem_idx)) {
3735 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3738 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3742 static void spr_noaccess(void *opaque, int gprn, int sprn)
3745 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3746 printf("ERROR: try to access SPR %d !\n", sprn);
3749 #define SPR_NOACCESS (&spr_noaccess)
3752 static inline void gen_op_mfspr(DisasContext *ctx)
3754 void (*read_cb)(void *opaque, int gprn, int sprn);
3755 uint32_t sprn = SPR(ctx->opcode);
3757 #if !defined(CONFIG_USER_ONLY)
3758 if (ctx->mem_idx == 2)
3759 read_cb = ctx->spr_cb[sprn].hea_read;
3760 else if (ctx->mem_idx)
3761 read_cb = ctx->spr_cb[sprn].oea_read;
3764 read_cb = ctx->spr_cb[sprn].uea_read;
3765 if (likely(read_cb != NULL)) {
3766 if (likely(read_cb != SPR_NOACCESS)) {
3767 (*read_cb)(ctx, rD(ctx->opcode), sprn);
3769 /* Privilege exception */
3770 /* This is a hack to avoid warnings when running Linux:
3771 * this OS breaks the PowerPC virtualisation model,
3772 * allowing userland application to read the PVR
3774 if (sprn != SPR_PVR) {
3775 qemu_log("Trying to read privileged spr %d %03x at "
3776 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3777 printf("Trying to read privileged spr %d %03x at "
3778 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3780 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3784 qemu_log("Trying to read invalid spr %d %03x at "
3785 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3786 printf("Trying to read invalid spr %d %03x at " TARGET_FMT_lx "\n",
3787 sprn, sprn, ctx->nip);
3788 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3792 static void gen_mfspr(DisasContext *ctx)
3798 static void gen_mftb(DisasContext *ctx)
3804 static void gen_mtcrf(DisasContext *ctx)
3808 crm = CRM(ctx->opcode);
3809 if (likely((ctx->opcode & 0x00100000))) {
3810 if (crm && ((crm & (crm - 1)) == 0)) {
3811 TCGv_i32 temp = tcg_temp_new_i32();
3813 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3814 tcg_gen_shri_i32(temp, temp, crn * 4);
3815 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
3816 tcg_temp_free_i32(temp);
3819 TCGv_i32 temp = tcg_temp_new_i32();
3820 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3821 for (crn = 0 ; crn < 8 ; crn++) {
3822 if (crm & (1 << crn)) {
3823 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3824 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3827 tcg_temp_free_i32(temp);
3832 #if defined(TARGET_PPC64)
3833 static void gen_mtmsrd(DisasContext *ctx)
3835 #if defined(CONFIG_USER_ONLY)
3836 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3838 if (unlikely(!ctx->mem_idx)) {
3839 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3842 if (ctx->opcode & 0x00010000) {
3843 /* Special form that does not need any synchronisation */
3844 TCGv t0 = tcg_temp_new();
3845 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3846 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3847 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3850 /* XXX: we need to update nip before the store
3851 * if we enter power saving mode, we will exit the loop
3852 * directly from ppc_store_msr
3854 gen_update_nip(ctx, ctx->nip);
3855 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
3856 /* Must stop the translation as machine state (may have) changed */
3857 /* Note that mtmsr is not always defined as context-synchronizing */
3858 gen_stop_exception(ctx);
3864 static void gen_mtmsr(DisasContext *ctx)
3866 #if defined(CONFIG_USER_ONLY)
3867 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3869 if (unlikely(!ctx->mem_idx)) {
3870 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3873 if (ctx->opcode & 0x00010000) {
3874 /* Special form that does not need any synchronisation */
3875 TCGv t0 = tcg_temp_new();
3876 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3877 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3878 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3881 TCGv msr = tcg_temp_new();
3883 /* XXX: we need to update nip before the store
3884 * if we enter power saving mode, we will exit the loop
3885 * directly from ppc_store_msr
3887 gen_update_nip(ctx, ctx->nip);
3888 #if defined(TARGET_PPC64)
3889 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
3891 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
3893 gen_helper_store_msr(msr);
3894 /* Must stop the translation as machine state (may have) changed */
3895 /* Note that mtmsr is not always defined as context-synchronizing */
3896 gen_stop_exception(ctx);
3902 static void gen_mtspr(DisasContext *ctx)
3904 void (*write_cb)(void *opaque, int sprn, int gprn);
3905 uint32_t sprn = SPR(ctx->opcode);
3907 #if !defined(CONFIG_USER_ONLY)
3908 if (ctx->mem_idx == 2)
3909 write_cb = ctx->spr_cb[sprn].hea_write;
3910 else if (ctx->mem_idx)
3911 write_cb = ctx->spr_cb[sprn].oea_write;
3914 write_cb = ctx->spr_cb[sprn].uea_write;
3915 if (likely(write_cb != NULL)) {
3916 if (likely(write_cb != SPR_NOACCESS)) {
3917 (*write_cb)(ctx, sprn, rS(ctx->opcode));
3919 /* Privilege exception */
3920 qemu_log("Trying to write privileged spr %d %03x at "
3921 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3922 printf("Trying to write privileged spr %d %03x at " TARGET_FMT_lx
3923 "\n", sprn, sprn, ctx->nip);
3924 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3928 qemu_log("Trying to write invalid spr %d %03x at "
3929 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3930 printf("Trying to write invalid spr %d %03x at " TARGET_FMT_lx "\n",
3931 sprn, sprn, ctx->nip);
3932 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3936 /*** Cache management ***/
3939 static void gen_dcbf(DisasContext *ctx)
3941 /* XXX: specification says this is treated as a load by the MMU */
3943 gen_set_access_type(ctx, ACCESS_CACHE);
3944 t0 = tcg_temp_new();
3945 gen_addr_reg_index(ctx, t0);
3946 gen_qemu_ld8u(ctx, t0, t0);
3950 /* dcbi (Supervisor only) */
3951 static void gen_dcbi(DisasContext *ctx)
3953 #if defined(CONFIG_USER_ONLY)
3954 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3957 if (unlikely(!ctx->mem_idx)) {
3958 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3961 EA = tcg_temp_new();
3962 gen_set_access_type(ctx, ACCESS_CACHE);
3963 gen_addr_reg_index(ctx, EA);
3964 val = tcg_temp_new();
3965 /* XXX: specification says this should be treated as a store by the MMU */
3966 gen_qemu_ld8u(ctx, val, EA);
3967 gen_qemu_st8(ctx, val, EA);
3974 static void gen_dcbst(DisasContext *ctx)
3976 /* XXX: specification say this is treated as a load by the MMU */
3978 gen_set_access_type(ctx, ACCESS_CACHE);
3979 t0 = tcg_temp_new();
3980 gen_addr_reg_index(ctx, t0);
3981 gen_qemu_ld8u(ctx, t0, t0);
3986 static void gen_dcbt(DisasContext *ctx)
3988 /* interpreted as no-op */
3989 /* XXX: specification say this is treated as a load by the MMU
3990 * but does not generate any exception
3995 static void gen_dcbtst(DisasContext *ctx)
3997 /* interpreted as no-op */
3998 /* XXX: specification say this is treated as a load by the MMU
3999 * but does not generate any exception
4004 static void gen_dcbz(DisasContext *ctx)
4007 gen_set_access_type(ctx, ACCESS_CACHE);
4008 /* NIP cannot be restored if the memory exception comes from an helper */
4009 gen_update_nip(ctx, ctx->nip - 4);
4010 t0 = tcg_temp_new();
4011 gen_addr_reg_index(ctx, t0);
4012 gen_helper_dcbz(t0);
4016 static void gen_dcbz_970(DisasContext *ctx)
4019 gen_set_access_type(ctx, ACCESS_CACHE);
4020 /* NIP cannot be restored if the memory exception comes from an helper */
4021 gen_update_nip(ctx, ctx->nip - 4);
4022 t0 = tcg_temp_new();
4023 gen_addr_reg_index(ctx, t0);
4024 if (ctx->opcode & 0x00200000)
4025 gen_helper_dcbz(t0);
4027 gen_helper_dcbz_970(t0);
4032 static void gen_dst(DisasContext *ctx)
4034 if (rA(ctx->opcode) == 0) {
4035 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4037 /* interpreted as no-op */
4042 static void gen_dstst(DisasContext *ctx)
4044 if (rA(ctx->opcode) == 0) {
4045 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4047 /* interpreted as no-op */
4053 static void gen_dss(DisasContext *ctx)
4055 /* interpreted as no-op */
4059 static void gen_icbi(DisasContext *ctx)
4062 gen_set_access_type(ctx, ACCESS_CACHE);
4063 /* NIP cannot be restored if the memory exception comes from an helper */
4064 gen_update_nip(ctx, ctx->nip - 4);
4065 t0 = tcg_temp_new();
4066 gen_addr_reg_index(ctx, t0);
4067 gen_helper_icbi(t0);
4073 static void gen_dcba(DisasContext *ctx)
4075 /* interpreted as no-op */
4076 /* XXX: specification say this is treated as a store by the MMU
4077 * but does not generate any exception
4081 /*** Segment register manipulation ***/
4082 /* Supervisor only: */
4085 static void gen_mfsr(DisasContext *ctx)
4087 #if defined(CONFIG_USER_ONLY)
4088 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4091 if (unlikely(!ctx->mem_idx)) {
4092 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4095 t0 = tcg_const_tl(SR(ctx->opcode));
4096 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4102 static void gen_mfsrin(DisasContext *ctx)
4104 #if defined(CONFIG_USER_ONLY)
4105 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4108 if (unlikely(!ctx->mem_idx)) {
4109 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4112 t0 = tcg_temp_new();
4113 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4114 tcg_gen_andi_tl(t0, t0, 0xF);
4115 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4121 static void gen_mtsr(DisasContext *ctx)
4123 #if defined(CONFIG_USER_ONLY)
4124 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4127 if (unlikely(!ctx->mem_idx)) {
4128 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4131 t0 = tcg_const_tl(SR(ctx->opcode));
4132 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4138 static void gen_mtsrin(DisasContext *ctx)
4140 #if defined(CONFIG_USER_ONLY)
4141 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4144 if (unlikely(!ctx->mem_idx)) {
4145 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4148 t0 = tcg_temp_new();
4149 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4150 tcg_gen_andi_tl(t0, t0, 0xF);
4151 gen_helper_store_sr(t0, cpu_gpr[rD(ctx->opcode)]);
4156 #if defined(TARGET_PPC64)
4157 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4160 static void gen_mfsr_64b(DisasContext *ctx)
4162 #if defined(CONFIG_USER_ONLY)
4163 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4166 if (unlikely(!ctx->mem_idx)) {
4167 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4170 t0 = tcg_const_tl(SR(ctx->opcode));
4171 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4177 static void gen_mfsrin_64b(DisasContext *ctx)
4179 #if defined(CONFIG_USER_ONLY)
4180 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4183 if (unlikely(!ctx->mem_idx)) {
4184 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4187 t0 = tcg_temp_new();
4188 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4189 tcg_gen_andi_tl(t0, t0, 0xF);
4190 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4196 static void gen_mtsr_64b(DisasContext *ctx)
4198 #if defined(CONFIG_USER_ONLY)
4199 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4202 if (unlikely(!ctx->mem_idx)) {
4203 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4206 t0 = tcg_const_tl(SR(ctx->opcode));
4207 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4213 static void gen_mtsrin_64b(DisasContext *ctx)
4215 #if defined(CONFIG_USER_ONLY)
4216 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4219 if (unlikely(!ctx->mem_idx)) {
4220 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4223 t0 = tcg_temp_new();
4224 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4225 tcg_gen_andi_tl(t0, t0, 0xF);
4226 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4232 static void gen_slbmte(DisasContext *ctx)
4234 #if defined(CONFIG_USER_ONLY)
4235 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4237 if (unlikely(!ctx->mem_idx)) {
4238 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4241 gen_helper_store_slb(cpu_gpr[rB(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
4245 static void gen_slbmfee(DisasContext *ctx)
4247 #if defined(CONFIG_USER_ONLY)
4248 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4250 if (unlikely(!ctx->mem_idx)) {
4251 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4254 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)],
4255 cpu_gpr[rB(ctx->opcode)]);
4259 static void gen_slbmfev(DisasContext *ctx)
4261 #if defined(CONFIG_USER_ONLY)
4262 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4264 if (unlikely(!ctx->mem_idx)) {
4265 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4268 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)],
4269 cpu_gpr[rB(ctx->opcode)]);
4272 #endif /* defined(TARGET_PPC64) */
4274 /*** Lookaside buffer management ***/
4275 /* Optional & mem_idx only: */
4278 static void gen_tlbia(DisasContext *ctx)
4280 #if defined(CONFIG_USER_ONLY)
4281 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4283 if (unlikely(!ctx->mem_idx)) {
4284 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4292 static void gen_tlbiel(DisasContext *ctx)
4294 #if defined(CONFIG_USER_ONLY)
4295 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4297 if (unlikely(!ctx->mem_idx)) {
4298 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4301 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4306 static void gen_tlbie(DisasContext *ctx)
4308 #if defined(CONFIG_USER_ONLY)
4309 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4311 if (unlikely(!ctx->mem_idx)) {
4312 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4315 #if defined(TARGET_PPC64)
4316 if (!ctx->sf_mode) {
4317 TCGv t0 = tcg_temp_new();
4318 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4319 gen_helper_tlbie(t0);
4323 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4328 static void gen_tlbsync(DisasContext *ctx)
4330 #if defined(CONFIG_USER_ONLY)
4331 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4333 if (unlikely(!ctx->mem_idx)) {
4334 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4337 /* This has no effect: it should ensure that all previous
4338 * tlbie have completed
4340 gen_stop_exception(ctx);
4344 #if defined(TARGET_PPC64)
4346 static void gen_slbia(DisasContext *ctx)
4348 #if defined(CONFIG_USER_ONLY)
4349 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4351 if (unlikely(!ctx->mem_idx)) {
4352 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4360 static void gen_slbie(DisasContext *ctx)
4362 #if defined(CONFIG_USER_ONLY)
4363 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4365 if (unlikely(!ctx->mem_idx)) {
4366 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4369 gen_helper_slbie(cpu_gpr[rB(ctx->opcode)]);
4374 /*** External control ***/
4378 static void gen_eciwx(DisasContext *ctx)
4381 /* Should check EAR[E] ! */
4382 gen_set_access_type(ctx, ACCESS_EXT);
4383 t0 = tcg_temp_new();
4384 gen_addr_reg_index(ctx, t0);
4385 gen_check_align(ctx, t0, 0x03);
4386 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4391 static void gen_ecowx(DisasContext *ctx)
4394 /* Should check EAR[E] ! */
4395 gen_set_access_type(ctx, ACCESS_EXT);
4396 t0 = tcg_temp_new();
4397 gen_addr_reg_index(ctx, t0);
4398 gen_check_align(ctx, t0, 0x03);
4399 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4403 /* PowerPC 601 specific instructions */
4406 static void gen_abs(DisasContext *ctx)
4408 int l1 = gen_new_label();
4409 int l2 = gen_new_label();
4410 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4411 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4414 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4416 if (unlikely(Rc(ctx->opcode) != 0))
4417 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4421 static void gen_abso(DisasContext *ctx)
4423 int l1 = gen_new_label();
4424 int l2 = gen_new_label();
4425 int l3 = gen_new_label();
4426 /* Start with XER OV disabled, the most likely case */
4427 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4428 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4429 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4430 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4433 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4436 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4438 if (unlikely(Rc(ctx->opcode) != 0))
4439 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4443 static void gen_clcs(DisasContext *ctx)
4445 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4446 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], t0);
4447 tcg_temp_free_i32(t0);
4448 /* Rc=1 sets CR0 to an undefined state */
4452 static void gen_div(DisasContext *ctx)
4454 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4455 if (unlikely(Rc(ctx->opcode) != 0))
4456 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4460 static void gen_divo(DisasContext *ctx)
4462 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4463 if (unlikely(Rc(ctx->opcode) != 0))
4464 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4468 static void gen_divs(DisasContext *ctx)
4470 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4471 if (unlikely(Rc(ctx->opcode) != 0))
4472 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4475 /* divso - divso. */
4476 static void gen_divso(DisasContext *ctx)
4478 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4479 if (unlikely(Rc(ctx->opcode) != 0))
4480 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4484 static void gen_doz(DisasContext *ctx)
4486 int l1 = gen_new_label();
4487 int l2 = gen_new_label();
4488 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4489 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4492 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4494 if (unlikely(Rc(ctx->opcode) != 0))
4495 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4499 static void gen_dozo(DisasContext *ctx)
4501 int l1 = gen_new_label();
4502 int l2 = gen_new_label();
4503 TCGv t0 = tcg_temp_new();
4504 TCGv t1 = tcg_temp_new();
4505 TCGv t2 = tcg_temp_new();
4506 /* Start with XER OV disabled, the most likely case */
4507 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4508 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4509 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4510 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4511 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4512 tcg_gen_andc_tl(t1, t1, t2);
4513 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4514 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4515 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4518 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4523 if (unlikely(Rc(ctx->opcode) != 0))
4524 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4528 static void gen_dozi(DisasContext *ctx)
4530 target_long simm = SIMM(ctx->opcode);
4531 int l1 = gen_new_label();
4532 int l2 = gen_new_label();
4533 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4534 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4537 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4539 if (unlikely(Rc(ctx->opcode) != 0))
4540 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4543 /* lscbx - lscbx. */
4544 static void gen_lscbx(DisasContext *ctx)
4546 TCGv t0 = tcg_temp_new();
4547 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4548 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4549 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4551 gen_addr_reg_index(ctx, t0);
4552 /* NIP cannot be restored if the memory exception comes from an helper */
4553 gen_update_nip(ctx, ctx->nip - 4);
4554 gen_helper_lscbx(t0, t0, t1, t2, t3);
4555 tcg_temp_free_i32(t1);
4556 tcg_temp_free_i32(t2);
4557 tcg_temp_free_i32(t3);
4558 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4559 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4560 if (unlikely(Rc(ctx->opcode) != 0))
4561 gen_set_Rc0(ctx, t0);
4565 /* maskg - maskg. */
4566 static void gen_maskg(DisasContext *ctx)
4568 int l1 = gen_new_label();
4569 TCGv t0 = tcg_temp_new();
4570 TCGv t1 = tcg_temp_new();
4571 TCGv t2 = tcg_temp_new();
4572 TCGv t3 = tcg_temp_new();
4573 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4574 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4575 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4576 tcg_gen_addi_tl(t2, t0, 1);
4577 tcg_gen_shr_tl(t2, t3, t2);
4578 tcg_gen_shr_tl(t3, t3, t1);
4579 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4580 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4581 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4587 if (unlikely(Rc(ctx->opcode) != 0))
4588 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4591 /* maskir - maskir. */
4592 static void gen_maskir(DisasContext *ctx)
4594 TCGv t0 = tcg_temp_new();
4595 TCGv t1 = tcg_temp_new();
4596 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4597 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4598 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4601 if (unlikely(Rc(ctx->opcode) != 0))
4602 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4606 static void gen_mul(DisasContext *ctx)
4608 TCGv_i64 t0 = tcg_temp_new_i64();
4609 TCGv_i64 t1 = tcg_temp_new_i64();
4610 TCGv t2 = tcg_temp_new();
4611 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4612 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4613 tcg_gen_mul_i64(t0, t0, t1);
4614 tcg_gen_trunc_i64_tl(t2, t0);
4615 gen_store_spr(SPR_MQ, t2);
4616 tcg_gen_shri_i64(t1, t0, 32);
4617 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4618 tcg_temp_free_i64(t0);
4619 tcg_temp_free_i64(t1);
4621 if (unlikely(Rc(ctx->opcode) != 0))
4622 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4626 static void gen_mulo(DisasContext *ctx)
4628 int l1 = gen_new_label();
4629 TCGv_i64 t0 = tcg_temp_new_i64();
4630 TCGv_i64 t1 = tcg_temp_new_i64();
4631 TCGv t2 = tcg_temp_new();
4632 /* Start with XER OV disabled, the most likely case */
4633 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4634 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4635 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4636 tcg_gen_mul_i64(t0, t0, t1);
4637 tcg_gen_trunc_i64_tl(t2, t0);
4638 gen_store_spr(SPR_MQ, t2);
4639 tcg_gen_shri_i64(t1, t0, 32);
4640 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4641 tcg_gen_ext32s_i64(t1, t0);
4642 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4643 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4645 tcg_temp_free_i64(t0);
4646 tcg_temp_free_i64(t1);
4648 if (unlikely(Rc(ctx->opcode) != 0))
4649 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4653 static void gen_nabs(DisasContext *ctx)
4655 int l1 = gen_new_label();
4656 int l2 = gen_new_label();
4657 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4658 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4661 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4663 if (unlikely(Rc(ctx->opcode) != 0))
4664 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4667 /* nabso - nabso. */
4668 static void gen_nabso(DisasContext *ctx)
4670 int l1 = gen_new_label();
4671 int l2 = gen_new_label();
4672 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4673 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4676 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4678 /* nabs never overflows */
4679 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4680 if (unlikely(Rc(ctx->opcode) != 0))
4681 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4685 static void gen_rlmi(DisasContext *ctx)
4687 uint32_t mb = MB(ctx->opcode);
4688 uint32_t me = ME(ctx->opcode);
4689 TCGv t0 = tcg_temp_new();
4690 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4691 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4692 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4693 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4694 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4696 if (unlikely(Rc(ctx->opcode) != 0))
4697 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4701 static void gen_rrib(DisasContext *ctx)
4703 TCGv t0 = tcg_temp_new();
4704 TCGv t1 = tcg_temp_new();
4705 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4706 tcg_gen_movi_tl(t1, 0x80000000);
4707 tcg_gen_shr_tl(t1, t1, t0);
4708 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4709 tcg_gen_and_tl(t0, t0, t1);
4710 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4711 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4714 if (unlikely(Rc(ctx->opcode) != 0))
4715 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4719 static void gen_sle(DisasContext *ctx)
4721 TCGv t0 = tcg_temp_new();
4722 TCGv t1 = tcg_temp_new();
4723 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4724 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4725 tcg_gen_subfi_tl(t1, 32, t1);
4726 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4727 tcg_gen_or_tl(t1, t0, t1);
4728 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4729 gen_store_spr(SPR_MQ, t1);
4732 if (unlikely(Rc(ctx->opcode) != 0))
4733 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4737 static void gen_sleq(DisasContext *ctx)
4739 TCGv t0 = tcg_temp_new();
4740 TCGv t1 = tcg_temp_new();
4741 TCGv t2 = tcg_temp_new();
4742 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4743 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4744 tcg_gen_shl_tl(t2, t2, t0);
4745 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4746 gen_load_spr(t1, SPR_MQ);
4747 gen_store_spr(SPR_MQ, t0);
4748 tcg_gen_and_tl(t0, t0, t2);
4749 tcg_gen_andc_tl(t1, t1, t2);
4750 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4754 if (unlikely(Rc(ctx->opcode) != 0))
4755 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4759 static void gen_sliq(DisasContext *ctx)
4761 int sh = SH(ctx->opcode);
4762 TCGv t0 = tcg_temp_new();
4763 TCGv t1 = tcg_temp_new();
4764 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4765 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4766 tcg_gen_or_tl(t1, t0, t1);
4767 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4768 gen_store_spr(SPR_MQ, t1);
4771 if (unlikely(Rc(ctx->opcode) != 0))
4772 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4775 /* slliq - slliq. */
4776 static void gen_slliq(DisasContext *ctx)
4778 int sh = SH(ctx->opcode);
4779 TCGv t0 = tcg_temp_new();
4780 TCGv t1 = tcg_temp_new();
4781 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4782 gen_load_spr(t1, SPR_MQ);
4783 gen_store_spr(SPR_MQ, t0);
4784 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4785 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4786 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4789 if (unlikely(Rc(ctx->opcode) != 0))
4790 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4794 static void gen_sllq(DisasContext *ctx)
4796 int l1 = gen_new_label();
4797 int l2 = gen_new_label();
4798 TCGv t0 = tcg_temp_local_new();
4799 TCGv t1 = tcg_temp_local_new();
4800 TCGv t2 = tcg_temp_local_new();
4801 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4802 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4803 tcg_gen_shl_tl(t1, t1, t2);
4804 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4805 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4806 gen_load_spr(t0, SPR_MQ);
4807 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4810 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4811 gen_load_spr(t2, SPR_MQ);
4812 tcg_gen_andc_tl(t1, t2, t1);
4813 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4818 if (unlikely(Rc(ctx->opcode) != 0))
4819 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4823 static void gen_slq(DisasContext *ctx)
4825 int l1 = gen_new_label();
4826 TCGv t0 = tcg_temp_new();
4827 TCGv t1 = tcg_temp_new();
4828 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4829 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4830 tcg_gen_subfi_tl(t1, 32, t1);
4831 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4832 tcg_gen_or_tl(t1, t0, t1);
4833 gen_store_spr(SPR_MQ, t1);
4834 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4835 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4836 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4837 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4841 if (unlikely(Rc(ctx->opcode) != 0))
4842 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4845 /* sraiq - sraiq. */
4846 static void gen_sraiq(DisasContext *ctx)
4848 int sh = SH(ctx->opcode);
4849 int l1 = gen_new_label();
4850 TCGv t0 = tcg_temp_new();
4851 TCGv t1 = tcg_temp_new();
4852 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4853 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4854 tcg_gen_or_tl(t0, t0, t1);
4855 gen_store_spr(SPR_MQ, t0);
4856 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4857 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4858 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4859 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4861 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
4864 if (unlikely(Rc(ctx->opcode) != 0))
4865 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4869 static void gen_sraq(DisasContext *ctx)
4871 int l1 = gen_new_label();
4872 int l2 = gen_new_label();
4873 TCGv t0 = tcg_temp_new();
4874 TCGv t1 = tcg_temp_local_new();
4875 TCGv t2 = tcg_temp_local_new();
4876 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4877 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4878 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
4879 tcg_gen_subfi_tl(t2, 32, t2);
4880 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
4881 tcg_gen_or_tl(t0, t0, t2);
4882 gen_store_spr(SPR_MQ, t0);
4883 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4884 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
4885 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
4886 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
4889 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
4890 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4891 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4892 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
4893 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4897 if (unlikely(Rc(ctx->opcode) != 0))
4898 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4902 static void gen_sre(DisasContext *ctx)
4904 TCGv t0 = tcg_temp_new();
4905 TCGv t1 = tcg_temp_new();
4906 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4907 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4908 tcg_gen_subfi_tl(t1, 32, t1);
4909 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4910 tcg_gen_or_tl(t1, t0, t1);
4911 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4912 gen_store_spr(SPR_MQ, t1);
4915 if (unlikely(Rc(ctx->opcode) != 0))
4916 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4920 static void gen_srea(DisasContext *ctx)
4922 TCGv t0 = tcg_temp_new();
4923 TCGv t1 = tcg_temp_new();
4924 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4925 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4926 gen_store_spr(SPR_MQ, t0);
4927 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
4930 if (unlikely(Rc(ctx->opcode) != 0))
4931 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4935 static void gen_sreq(DisasContext *ctx)
4937 TCGv t0 = tcg_temp_new();
4938 TCGv t1 = tcg_temp_new();
4939 TCGv t2 = tcg_temp_new();
4940 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4941 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4942 tcg_gen_shr_tl(t1, t1, t0);
4943 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4944 gen_load_spr(t2, SPR_MQ);
4945 gen_store_spr(SPR_MQ, t0);
4946 tcg_gen_and_tl(t0, t0, t1);
4947 tcg_gen_andc_tl(t2, t2, t1);
4948 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
4952 if (unlikely(Rc(ctx->opcode) != 0))
4953 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4957 static void gen_sriq(DisasContext *ctx)
4959 int sh = SH(ctx->opcode);
4960 TCGv t0 = tcg_temp_new();
4961 TCGv t1 = tcg_temp_new();
4962 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4963 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4964 tcg_gen_or_tl(t1, t0, t1);
4965 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4966 gen_store_spr(SPR_MQ, t1);
4969 if (unlikely(Rc(ctx->opcode) != 0))
4970 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4974 static void gen_srliq(DisasContext *ctx)
4976 int sh = SH(ctx->opcode);
4977 TCGv t0 = tcg_temp_new();
4978 TCGv t1 = tcg_temp_new();
4979 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4980 gen_load_spr(t1, SPR_MQ);
4981 gen_store_spr(SPR_MQ, t0);
4982 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
4983 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
4984 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4987 if (unlikely(Rc(ctx->opcode) != 0))
4988 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4992 static void gen_srlq(DisasContext *ctx)
4994 int l1 = gen_new_label();
4995 int l2 = gen_new_label();
4996 TCGv t0 = tcg_temp_local_new();
4997 TCGv t1 = tcg_temp_local_new();
4998 TCGv t2 = tcg_temp_local_new();
4999 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5000 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5001 tcg_gen_shr_tl(t2, t1, t2);
5002 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5003 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5004 gen_load_spr(t0, SPR_MQ);
5005 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5008 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5009 tcg_gen_and_tl(t0, t0, t2);
5010 gen_load_spr(t1, SPR_MQ);
5011 tcg_gen_andc_tl(t1, t1, t2);
5012 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5017 if (unlikely(Rc(ctx->opcode) != 0))
5018 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5022 static void gen_srq(DisasContext *ctx)
5024 int l1 = gen_new_label();
5025 TCGv t0 = tcg_temp_new();
5026 TCGv t1 = tcg_temp_new();
5027 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5028 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5029 tcg_gen_subfi_tl(t1, 32, t1);
5030 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5031 tcg_gen_or_tl(t1, t0, t1);
5032 gen_store_spr(SPR_MQ, t1);
5033 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5034 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5035 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5036 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5040 if (unlikely(Rc(ctx->opcode) != 0))
5041 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5044 /* PowerPC 602 specific instructions */
5047 static void gen_dsa(DisasContext *ctx)
5050 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5054 static void gen_esa(DisasContext *ctx)
5057 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5061 static void gen_mfrom(DisasContext *ctx)
5063 #if defined(CONFIG_USER_ONLY)
5064 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5066 if (unlikely(!ctx->mem_idx)) {
5067 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5070 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5074 /* 602 - 603 - G2 TLB management */
5077 static void gen_tlbld_6xx(DisasContext *ctx)
5079 #if defined(CONFIG_USER_ONLY)
5080 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5082 if (unlikely(!ctx->mem_idx)) {
5083 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5086 gen_helper_6xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5091 static void gen_tlbli_6xx(DisasContext *ctx)
5093 #if defined(CONFIG_USER_ONLY)
5094 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5096 if (unlikely(!ctx->mem_idx)) {
5097 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5100 gen_helper_6xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5104 /* 74xx TLB management */
5107 static void gen_tlbld_74xx(DisasContext *ctx)
5109 #if defined(CONFIG_USER_ONLY)
5110 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5112 if (unlikely(!ctx->mem_idx)) {
5113 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5116 gen_helper_74xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5121 static void gen_tlbli_74xx(DisasContext *ctx)
5123 #if defined(CONFIG_USER_ONLY)
5124 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5126 if (unlikely(!ctx->mem_idx)) {
5127 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5130 gen_helper_74xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5134 /* POWER instructions not in PowerPC 601 */
5137 static void gen_clf(DisasContext *ctx)
5139 /* Cache line flush: implemented as no-op */
5143 static void gen_cli(DisasContext *ctx)
5145 /* Cache line invalidate: privileged and treated as no-op */
5146 #if defined(CONFIG_USER_ONLY)
5147 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5149 if (unlikely(!ctx->mem_idx)) {
5150 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5157 static void gen_dclst(DisasContext *ctx)
5159 /* Data cache line store: treated as no-op */
5162 static void gen_mfsri(DisasContext *ctx)
5164 #if defined(CONFIG_USER_ONLY)
5165 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5167 int ra = rA(ctx->opcode);
5168 int rd = rD(ctx->opcode);
5170 if (unlikely(!ctx->mem_idx)) {
5171 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5174 t0 = tcg_temp_new();
5175 gen_addr_reg_index(ctx, t0);
5176 tcg_gen_shri_tl(t0, t0, 28);
5177 tcg_gen_andi_tl(t0, t0, 0xF);
5178 gen_helper_load_sr(cpu_gpr[rd], t0);
5180 if (ra != 0 && ra != rd)
5181 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5185 static void gen_rac(DisasContext *ctx)
5187 #if defined(CONFIG_USER_ONLY)
5188 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5191 if (unlikely(!ctx->mem_idx)) {
5192 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5195 t0 = tcg_temp_new();
5196 gen_addr_reg_index(ctx, t0);
5197 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], t0);
5202 static void gen_rfsvc(DisasContext *ctx)
5204 #if defined(CONFIG_USER_ONLY)
5205 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5207 if (unlikely(!ctx->mem_idx)) {
5208 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5212 gen_sync_exception(ctx);
5216 /* svc is not implemented for now */
5218 /* POWER2 specific instructions */
5219 /* Quad manipulation (load/store two floats at a time) */
5222 static void gen_lfq(DisasContext *ctx)
5224 int rd = rD(ctx->opcode);
5226 gen_set_access_type(ctx, ACCESS_FLOAT);
5227 t0 = tcg_temp_new();
5228 gen_addr_imm_index(ctx, t0, 0);
5229 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5230 gen_addr_add(ctx, t0, t0, 8);
5231 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5236 static void gen_lfqu(DisasContext *ctx)
5238 int ra = rA(ctx->opcode);
5239 int rd = rD(ctx->opcode);
5241 gen_set_access_type(ctx, ACCESS_FLOAT);
5242 t0 = tcg_temp_new();
5243 t1 = tcg_temp_new();
5244 gen_addr_imm_index(ctx, t0, 0);
5245 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5246 gen_addr_add(ctx, t1, t0, 8);
5247 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5249 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5255 static void gen_lfqux(DisasContext *ctx)
5257 int ra = rA(ctx->opcode);
5258 int rd = rD(ctx->opcode);
5259 gen_set_access_type(ctx, ACCESS_FLOAT);
5261 t0 = tcg_temp_new();
5262 gen_addr_reg_index(ctx, t0);
5263 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5264 t1 = tcg_temp_new();
5265 gen_addr_add(ctx, t1, t0, 8);
5266 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5269 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5274 static void gen_lfqx(DisasContext *ctx)
5276 int rd = rD(ctx->opcode);
5278 gen_set_access_type(ctx, ACCESS_FLOAT);
5279 t0 = tcg_temp_new();
5280 gen_addr_reg_index(ctx, t0);
5281 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5282 gen_addr_add(ctx, t0, t0, 8);
5283 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5288 static void gen_stfq(DisasContext *ctx)
5290 int rd = rD(ctx->opcode);
5292 gen_set_access_type(ctx, ACCESS_FLOAT);
5293 t0 = tcg_temp_new();
5294 gen_addr_imm_index(ctx, t0, 0);
5295 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5296 gen_addr_add(ctx, t0, t0, 8);
5297 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5302 static void gen_stfqu(DisasContext *ctx)
5304 int ra = rA(ctx->opcode);
5305 int rd = rD(ctx->opcode);
5307 gen_set_access_type(ctx, ACCESS_FLOAT);
5308 t0 = tcg_temp_new();
5309 gen_addr_imm_index(ctx, t0, 0);
5310 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5311 t1 = tcg_temp_new();
5312 gen_addr_add(ctx, t1, t0, 8);
5313 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5316 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5321 static void gen_stfqux(DisasContext *ctx)
5323 int ra = rA(ctx->opcode);
5324 int rd = rD(ctx->opcode);
5326 gen_set_access_type(ctx, ACCESS_FLOAT);
5327 t0 = tcg_temp_new();
5328 gen_addr_reg_index(ctx, t0);
5329 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5330 t1 = tcg_temp_new();
5331 gen_addr_add(ctx, t1, t0, 8);
5332 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5335 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5340 static void gen_stfqx(DisasContext *ctx)
5342 int rd = rD(ctx->opcode);
5344 gen_set_access_type(ctx, ACCESS_FLOAT);
5345 t0 = tcg_temp_new();
5346 gen_addr_reg_index(ctx, t0);
5347 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5348 gen_addr_add(ctx, t0, t0, 8);
5349 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5353 /* BookE specific instructions */
5355 /* XXX: not implemented on 440 ? */
5356 static void gen_mfapidi(DisasContext *ctx)
5359 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5362 /* XXX: not implemented on 440 ? */
5363 static void gen_tlbiva(DisasContext *ctx)
5365 #if defined(CONFIG_USER_ONLY)
5366 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5369 if (unlikely(!ctx->mem_idx)) {
5370 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5373 t0 = tcg_temp_new();
5374 gen_addr_reg_index(ctx, t0);
5375 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
5380 /* All 405 MAC instructions are translated here */
5381 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5382 int ra, int rb, int rt, int Rc)
5386 t0 = tcg_temp_local_new();
5387 t1 = tcg_temp_local_new();
5389 switch (opc3 & 0x0D) {
5391 /* macchw - macchw. - macchwo - macchwo. */
5392 /* macchws - macchws. - macchwso - macchwso. */
5393 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5394 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5395 /* mulchw - mulchw. */
5396 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5397 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5398 tcg_gen_ext16s_tl(t1, t1);
5401 /* macchwu - macchwu. - macchwuo - macchwuo. */
5402 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5403 /* mulchwu - mulchwu. */
5404 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5405 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5406 tcg_gen_ext16u_tl(t1, t1);
5409 /* machhw - machhw. - machhwo - machhwo. */
5410 /* machhws - machhws. - machhwso - machhwso. */
5411 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5412 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5413 /* mulhhw - mulhhw. */
5414 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5415 tcg_gen_ext16s_tl(t0, t0);
5416 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5417 tcg_gen_ext16s_tl(t1, t1);
5420 /* machhwu - machhwu. - machhwuo - machhwuo. */
5421 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5422 /* mulhhwu - mulhhwu. */
5423 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5424 tcg_gen_ext16u_tl(t0, t0);
5425 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5426 tcg_gen_ext16u_tl(t1, t1);
5429 /* maclhw - maclhw. - maclhwo - maclhwo. */
5430 /* maclhws - maclhws. - maclhwso - maclhwso. */
5431 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5432 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5433 /* mullhw - mullhw. */
5434 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5435 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5438 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5439 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5440 /* mullhwu - mullhwu. */
5441 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5442 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5446 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5447 tcg_gen_mul_tl(t1, t0, t1);
5449 /* nmultiply-and-accumulate (0x0E) */
5450 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5452 /* multiply-and-accumulate (0x0C) */
5453 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5457 /* Check overflow and/or saturate */
5458 int l1 = gen_new_label();
5461 /* Start with XER OV disabled, the most likely case */
5462 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
5466 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5467 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5468 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5469 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5472 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5473 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5477 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5480 tcg_gen_movi_tl(t0, UINT32_MAX);
5484 /* Check overflow */
5485 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
5488 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5491 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5495 if (unlikely(Rc) != 0) {
5497 gen_set_Rc0(ctx, cpu_gpr[rt]);
5501 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5502 static void glue(gen_, name)(DisasContext *ctx) \
5504 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5505 rD(ctx->opcode), Rc(ctx->opcode)); \
5508 /* macchw - macchw. */
5509 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5510 /* macchwo - macchwo. */
5511 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5512 /* macchws - macchws. */
5513 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5514 /* macchwso - macchwso. */
5515 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5516 /* macchwsu - macchwsu. */
5517 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5518 /* macchwsuo - macchwsuo. */
5519 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5520 /* macchwu - macchwu. */
5521 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5522 /* macchwuo - macchwuo. */
5523 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5524 /* machhw - machhw. */
5525 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5526 /* machhwo - machhwo. */
5527 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5528 /* machhws - machhws. */
5529 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5530 /* machhwso - machhwso. */
5531 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5532 /* machhwsu - machhwsu. */
5533 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5534 /* machhwsuo - machhwsuo. */
5535 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5536 /* machhwu - machhwu. */
5537 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5538 /* machhwuo - machhwuo. */
5539 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5540 /* maclhw - maclhw. */
5541 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5542 /* maclhwo - maclhwo. */
5543 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5544 /* maclhws - maclhws. */
5545 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5546 /* maclhwso - maclhwso. */
5547 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5548 /* maclhwu - maclhwu. */
5549 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5550 /* maclhwuo - maclhwuo. */
5551 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5552 /* maclhwsu - maclhwsu. */
5553 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5554 /* maclhwsuo - maclhwsuo. */
5555 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5556 /* nmacchw - nmacchw. */
5557 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5558 /* nmacchwo - nmacchwo. */
5559 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5560 /* nmacchws - nmacchws. */
5561 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5562 /* nmacchwso - nmacchwso. */
5563 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5564 /* nmachhw - nmachhw. */
5565 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5566 /* nmachhwo - nmachhwo. */
5567 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5568 /* nmachhws - nmachhws. */
5569 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5570 /* nmachhwso - nmachhwso. */
5571 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5572 /* nmaclhw - nmaclhw. */
5573 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5574 /* nmaclhwo - nmaclhwo. */
5575 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5576 /* nmaclhws - nmaclhws. */
5577 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5578 /* nmaclhwso - nmaclhwso. */
5579 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5581 /* mulchw - mulchw. */
5582 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5583 /* mulchwu - mulchwu. */
5584 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5585 /* mulhhw - mulhhw. */
5586 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5587 /* mulhhwu - mulhhwu. */
5588 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5589 /* mullhw - mullhw. */
5590 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5591 /* mullhwu - mullhwu. */
5592 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5595 static void gen_mfdcr(DisasContext *ctx)
5597 #if defined(CONFIG_USER_ONLY)
5598 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5601 if (unlikely(!ctx->mem_idx)) {
5602 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5605 /* NIP cannot be restored if the memory exception comes from an helper */
5606 gen_update_nip(ctx, ctx->nip - 4);
5607 dcrn = tcg_const_tl(SPR(ctx->opcode));
5608 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], dcrn);
5609 tcg_temp_free(dcrn);
5614 static void gen_mtdcr(DisasContext *ctx)
5616 #if defined(CONFIG_USER_ONLY)
5617 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5620 if (unlikely(!ctx->mem_idx)) {
5621 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5624 /* NIP cannot be restored if the memory exception comes from an helper */
5625 gen_update_nip(ctx, ctx->nip - 4);
5626 dcrn = tcg_const_tl(SPR(ctx->opcode));
5627 gen_helper_store_dcr(dcrn, cpu_gpr[rS(ctx->opcode)]);
5628 tcg_temp_free(dcrn);
5633 /* XXX: not implemented on 440 ? */
5634 static void gen_mfdcrx(DisasContext *ctx)
5636 #if defined(CONFIG_USER_ONLY)
5637 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5639 if (unlikely(!ctx->mem_idx)) {
5640 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5643 /* NIP cannot be restored if the memory exception comes from an helper */
5644 gen_update_nip(ctx, ctx->nip - 4);
5645 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5646 /* Note: Rc update flag set leads to undefined state of Rc0 */
5651 /* XXX: not implemented on 440 ? */
5652 static void gen_mtdcrx(DisasContext *ctx)
5654 #if defined(CONFIG_USER_ONLY)
5655 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5657 if (unlikely(!ctx->mem_idx)) {
5658 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5661 /* NIP cannot be restored if the memory exception comes from an helper */
5662 gen_update_nip(ctx, ctx->nip - 4);
5663 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5664 /* Note: Rc update flag set leads to undefined state of Rc0 */
5668 /* mfdcrux (PPC 460) : user-mode access to DCR */
5669 static void gen_mfdcrux(DisasContext *ctx)
5671 /* NIP cannot be restored if the memory exception comes from an helper */
5672 gen_update_nip(ctx, ctx->nip - 4);
5673 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5674 /* Note: Rc update flag set leads to undefined state of Rc0 */
5677 /* mtdcrux (PPC 460) : user-mode access to DCR */
5678 static void gen_mtdcrux(DisasContext *ctx)
5680 /* NIP cannot be restored if the memory exception comes from an helper */
5681 gen_update_nip(ctx, ctx->nip - 4);
5682 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5683 /* Note: Rc update flag set leads to undefined state of Rc0 */
5687 static void gen_dccci(DisasContext *ctx)
5689 #if defined(CONFIG_USER_ONLY)
5690 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5692 if (unlikely(!ctx->mem_idx)) {
5693 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5696 /* interpreted as no-op */
5701 static void gen_dcread(DisasContext *ctx)
5703 #if defined(CONFIG_USER_ONLY)
5704 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5707 if (unlikely(!ctx->mem_idx)) {
5708 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5711 gen_set_access_type(ctx, ACCESS_CACHE);
5712 EA = tcg_temp_new();
5713 gen_addr_reg_index(ctx, EA);
5714 val = tcg_temp_new();
5715 gen_qemu_ld32u(ctx, val, EA);
5717 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5723 static void gen_icbt_40x(DisasContext *ctx)
5725 /* interpreted as no-op */
5726 /* XXX: specification say this is treated as a load by the MMU
5727 * but does not generate any exception
5732 static void gen_iccci(DisasContext *ctx)
5734 #if defined(CONFIG_USER_ONLY)
5735 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5737 if (unlikely(!ctx->mem_idx)) {
5738 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5741 /* interpreted as no-op */
5746 static void gen_icread(DisasContext *ctx)
5748 #if defined(CONFIG_USER_ONLY)
5749 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5751 if (unlikely(!ctx->mem_idx)) {
5752 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5755 /* interpreted as no-op */
5759 /* rfci (mem_idx only) */
5760 static void gen_rfci_40x(DisasContext *ctx)
5762 #if defined(CONFIG_USER_ONLY)
5763 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5765 if (unlikely(!ctx->mem_idx)) {
5766 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5769 /* Restore CPU state */
5770 gen_helper_40x_rfci();
5771 gen_sync_exception(ctx);
5775 static void gen_rfci(DisasContext *ctx)
5777 #if defined(CONFIG_USER_ONLY)
5778 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5780 if (unlikely(!ctx->mem_idx)) {
5781 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5784 /* Restore CPU state */
5786 gen_sync_exception(ctx);
5790 /* BookE specific */
5792 /* XXX: not implemented on 440 ? */
5793 static void gen_rfdi(DisasContext *ctx)
5795 #if defined(CONFIG_USER_ONLY)
5796 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5798 if (unlikely(!ctx->mem_idx)) {
5799 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5802 /* Restore CPU state */
5804 gen_sync_exception(ctx);
5808 /* XXX: not implemented on 440 ? */
5809 static void gen_rfmci(DisasContext *ctx)
5811 #if defined(CONFIG_USER_ONLY)
5812 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5814 if (unlikely(!ctx->mem_idx)) {
5815 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5818 /* Restore CPU state */
5820 gen_sync_exception(ctx);
5824 /* TLB management - PowerPC 405 implementation */
5827 static void gen_tlbre_40x(DisasContext *ctx)
5829 #if defined(CONFIG_USER_ONLY)
5830 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5832 if (unlikely(!ctx->mem_idx)) {
5833 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5836 switch (rB(ctx->opcode)) {
5838 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5841 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5844 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5850 /* tlbsx - tlbsx. */
5851 static void gen_tlbsx_40x(DisasContext *ctx)
5853 #if defined(CONFIG_USER_ONLY)
5854 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5857 if (unlikely(!ctx->mem_idx)) {
5858 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5861 t0 = tcg_temp_new();
5862 gen_addr_reg_index(ctx, t0);
5863 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5865 if (Rc(ctx->opcode)) {
5866 int l1 = gen_new_label();
5867 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5868 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5869 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5870 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5871 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5878 static void gen_tlbwe_40x(DisasContext *ctx)
5880 #if defined(CONFIG_USER_ONLY)
5881 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5883 if (unlikely(!ctx->mem_idx)) {
5884 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5887 switch (rB(ctx->opcode)) {
5889 gen_helper_4xx_tlbwe_hi(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5892 gen_helper_4xx_tlbwe_lo(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5895 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5901 /* TLB management - PowerPC 440 implementation */
5904 static void gen_tlbre_440(DisasContext *ctx)
5906 #if defined(CONFIG_USER_ONLY)
5907 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5909 if (unlikely(!ctx->mem_idx)) {
5910 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5913 switch (rB(ctx->opcode)) {
5918 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5919 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], t0, cpu_gpr[rA(ctx->opcode)]);
5920 tcg_temp_free_i32(t0);
5924 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5930 /* tlbsx - tlbsx. */
5931 static void gen_tlbsx_440(DisasContext *ctx)
5933 #if defined(CONFIG_USER_ONLY)
5934 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5937 if (unlikely(!ctx->mem_idx)) {
5938 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5941 t0 = tcg_temp_new();
5942 gen_addr_reg_index(ctx, t0);
5943 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5945 if (Rc(ctx->opcode)) {
5946 int l1 = gen_new_label();
5947 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5948 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5949 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5950 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5951 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5958 static void gen_tlbwe_440(DisasContext *ctx)
5960 #if defined(CONFIG_USER_ONLY)
5961 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5963 if (unlikely(!ctx->mem_idx)) {
5964 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5967 switch (rB(ctx->opcode)) {
5972 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5973 gen_helper_440_tlbwe(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5974 tcg_temp_free_i32(t0);
5978 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5984 /* TLB management - PowerPC BookE 2.06 implementation */
5987 static void gen_tlbre_booke206(DisasContext *ctx)
5989 #if defined(CONFIG_USER_ONLY)
5990 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5992 if (unlikely(!ctx->mem_idx)) {
5993 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5997 gen_helper_booke206_tlbre();
6001 /* tlbsx - tlbsx. */
6002 static void gen_tlbsx_booke206(DisasContext *ctx)
6004 #if defined(CONFIG_USER_ONLY)
6005 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6008 if (unlikely(!ctx->mem_idx)) {
6009 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6013 if (rA(ctx->opcode)) {
6014 t0 = tcg_temp_new();
6015 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6017 t0 = tcg_const_tl(0);
6020 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6021 gen_helper_booke206_tlbsx(t0);
6026 static void gen_tlbwe_booke206(DisasContext *ctx)
6028 #if defined(CONFIG_USER_ONLY)
6029 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6031 if (unlikely(!ctx->mem_idx)) {
6032 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6035 gen_helper_booke206_tlbwe();
6039 static void gen_tlbivax_booke206(DisasContext *ctx)
6041 #if defined(CONFIG_USER_ONLY)
6042 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6045 if (unlikely(!ctx->mem_idx)) {
6046 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6050 t0 = tcg_temp_new();
6051 gen_addr_reg_index(ctx, t0);
6053 gen_helper_booke206_tlbivax(t0);
6059 static void gen_wrtee(DisasContext *ctx)
6061 #if defined(CONFIG_USER_ONLY)
6062 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6065 if (unlikely(!ctx->mem_idx)) {
6066 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6069 t0 = tcg_temp_new();
6070 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6071 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6072 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6074 /* Stop translation to have a chance to raise an exception
6075 * if we just set msr_ee to 1
6077 gen_stop_exception(ctx);
6082 static void gen_wrteei(DisasContext *ctx)
6084 #if defined(CONFIG_USER_ONLY)
6085 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6087 if (unlikely(!ctx->mem_idx)) {
6088 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6091 if (ctx->opcode & 0x00008000) {
6092 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6093 /* Stop translation to have a chance to raise an exception */
6094 gen_stop_exception(ctx);
6096 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6101 /* PowerPC 440 specific instructions */
6104 static void gen_dlmzb(DisasContext *ctx)
6106 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6107 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
6108 cpu_gpr[rB(ctx->opcode)], t0);
6109 tcg_temp_free_i32(t0);
6112 /* mbar replaces eieio on 440 */
6113 static void gen_mbar(DisasContext *ctx)
6115 /* interpreted as no-op */
6118 /* msync replaces sync on 440 */
6119 static void gen_msync(DisasContext *ctx)
6121 /* interpreted as no-op */
6125 static void gen_icbt_440(DisasContext *ctx)
6127 /* interpreted as no-op */
6128 /* XXX: specification say this is treated as a load by the MMU
6129 * but does not generate any exception
6133 /*** Altivec vector extension ***/
6134 /* Altivec registers moves */
6136 static inline TCGv_ptr gen_avr_ptr(int reg)
6138 TCGv_ptr r = tcg_temp_new_ptr();
6139 tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
6143 #define GEN_VR_LDX(name, opc2, opc3) \
6144 static void glue(gen_, name)(DisasContext *ctx) \
6147 if (unlikely(!ctx->altivec_enabled)) { \
6148 gen_exception(ctx, POWERPC_EXCP_VPU); \
6151 gen_set_access_type(ctx, ACCESS_INT); \
6152 EA = tcg_temp_new(); \
6153 gen_addr_reg_index(ctx, EA); \
6154 tcg_gen_andi_tl(EA, EA, ~0xf); \
6155 if (ctx->le_mode) { \
6156 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6157 tcg_gen_addi_tl(EA, EA, 8); \
6158 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6160 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6161 tcg_gen_addi_tl(EA, EA, 8); \
6162 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6164 tcg_temp_free(EA); \
6167 #define GEN_VR_STX(name, opc2, opc3) \
6168 static void gen_st##name(DisasContext *ctx) \
6171 if (unlikely(!ctx->altivec_enabled)) { \
6172 gen_exception(ctx, POWERPC_EXCP_VPU); \
6175 gen_set_access_type(ctx, ACCESS_INT); \
6176 EA = tcg_temp_new(); \
6177 gen_addr_reg_index(ctx, EA); \
6178 tcg_gen_andi_tl(EA, EA, ~0xf); \
6179 if (ctx->le_mode) { \
6180 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6181 tcg_gen_addi_tl(EA, EA, 8); \
6182 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6184 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6185 tcg_gen_addi_tl(EA, EA, 8); \
6186 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6188 tcg_temp_free(EA); \
6191 #define GEN_VR_LVE(name, opc2, opc3) \
6192 static void gen_lve##name(DisasContext *ctx) \
6196 if (unlikely(!ctx->altivec_enabled)) { \
6197 gen_exception(ctx, POWERPC_EXCP_VPU); \
6200 gen_set_access_type(ctx, ACCESS_INT); \
6201 EA = tcg_temp_new(); \
6202 gen_addr_reg_index(ctx, EA); \
6203 rs = gen_avr_ptr(rS(ctx->opcode)); \
6204 gen_helper_lve##name (rs, EA); \
6205 tcg_temp_free(EA); \
6206 tcg_temp_free_ptr(rs); \
6209 #define GEN_VR_STVE(name, opc2, opc3) \
6210 static void gen_stve##name(DisasContext *ctx) \
6214 if (unlikely(!ctx->altivec_enabled)) { \
6215 gen_exception(ctx, POWERPC_EXCP_VPU); \
6218 gen_set_access_type(ctx, ACCESS_INT); \
6219 EA = tcg_temp_new(); \
6220 gen_addr_reg_index(ctx, EA); \
6221 rs = gen_avr_ptr(rS(ctx->opcode)); \
6222 gen_helper_stve##name (rs, EA); \
6223 tcg_temp_free(EA); \
6224 tcg_temp_free_ptr(rs); \
6227 GEN_VR_LDX(lvx, 0x07, 0x03);
6228 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6229 GEN_VR_LDX(lvxl, 0x07, 0x0B);
6231 GEN_VR_LVE(bx, 0x07, 0x00);
6232 GEN_VR_LVE(hx, 0x07, 0x01);
6233 GEN_VR_LVE(wx, 0x07, 0x02);
6235 GEN_VR_STX(svx, 0x07, 0x07);
6236 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6237 GEN_VR_STX(svxl, 0x07, 0x0F);
6239 GEN_VR_STVE(bx, 0x07, 0x04);
6240 GEN_VR_STVE(hx, 0x07, 0x05);
6241 GEN_VR_STVE(wx, 0x07, 0x06);
6243 static void gen_lvsl(DisasContext *ctx)
6247 if (unlikely(!ctx->altivec_enabled)) {
6248 gen_exception(ctx, POWERPC_EXCP_VPU);
6251 EA = tcg_temp_new();
6252 gen_addr_reg_index(ctx, EA);
6253 rd = gen_avr_ptr(rD(ctx->opcode));
6254 gen_helper_lvsl(rd, EA);
6256 tcg_temp_free_ptr(rd);
6259 static void gen_lvsr(DisasContext *ctx)
6263 if (unlikely(!ctx->altivec_enabled)) {
6264 gen_exception(ctx, POWERPC_EXCP_VPU);
6267 EA = tcg_temp_new();
6268 gen_addr_reg_index(ctx, EA);
6269 rd = gen_avr_ptr(rD(ctx->opcode));
6270 gen_helper_lvsr(rd, EA);
6272 tcg_temp_free_ptr(rd);
6275 static void gen_mfvscr(DisasContext *ctx)
6278 if (unlikely(!ctx->altivec_enabled)) {
6279 gen_exception(ctx, POWERPC_EXCP_VPU);
6282 tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
6283 t = tcg_temp_new_i32();
6284 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, vscr));
6285 tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
6286 tcg_temp_free_i32(t);
6289 static void gen_mtvscr(DisasContext *ctx)
6292 if (unlikely(!ctx->altivec_enabled)) {
6293 gen_exception(ctx, POWERPC_EXCP_VPU);
6296 p = gen_avr_ptr(rD(ctx->opcode));
6297 gen_helper_mtvscr(p);
6298 tcg_temp_free_ptr(p);
6301 /* Logical operations */
6302 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
6303 static void glue(gen_, name)(DisasContext *ctx) \
6305 if (unlikely(!ctx->altivec_enabled)) { \
6306 gen_exception(ctx, POWERPC_EXCP_VPU); \
6309 tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
6310 tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
6313 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
6314 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
6315 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
6316 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
6317 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
6319 #define GEN_VXFORM(name, opc2, opc3) \
6320 static void glue(gen_, name)(DisasContext *ctx) \
6322 TCGv_ptr ra, rb, rd; \
6323 if (unlikely(!ctx->altivec_enabled)) { \
6324 gen_exception(ctx, POWERPC_EXCP_VPU); \
6327 ra = gen_avr_ptr(rA(ctx->opcode)); \
6328 rb = gen_avr_ptr(rB(ctx->opcode)); \
6329 rd = gen_avr_ptr(rD(ctx->opcode)); \
6330 gen_helper_##name (rd, ra, rb); \
6331 tcg_temp_free_ptr(ra); \
6332 tcg_temp_free_ptr(rb); \
6333 tcg_temp_free_ptr(rd); \
6336 GEN_VXFORM(vaddubm, 0, 0);
6337 GEN_VXFORM(vadduhm, 0, 1);
6338 GEN_VXFORM(vadduwm, 0, 2);
6339 GEN_VXFORM(vsububm, 0, 16);
6340 GEN_VXFORM(vsubuhm, 0, 17);
6341 GEN_VXFORM(vsubuwm, 0, 18);
6342 GEN_VXFORM(vmaxub, 1, 0);
6343 GEN_VXFORM(vmaxuh, 1, 1);
6344 GEN_VXFORM(vmaxuw, 1, 2);
6345 GEN_VXFORM(vmaxsb, 1, 4);
6346 GEN_VXFORM(vmaxsh, 1, 5);
6347 GEN_VXFORM(vmaxsw, 1, 6);
6348 GEN_VXFORM(vminub, 1, 8);
6349 GEN_VXFORM(vminuh, 1, 9);
6350 GEN_VXFORM(vminuw, 1, 10);
6351 GEN_VXFORM(vminsb, 1, 12);
6352 GEN_VXFORM(vminsh, 1, 13);
6353 GEN_VXFORM(vminsw, 1, 14);
6354 GEN_VXFORM(vavgub, 1, 16);
6355 GEN_VXFORM(vavguh, 1, 17);
6356 GEN_VXFORM(vavguw, 1, 18);
6357 GEN_VXFORM(vavgsb, 1, 20);
6358 GEN_VXFORM(vavgsh, 1, 21);
6359 GEN_VXFORM(vavgsw, 1, 22);
6360 GEN_VXFORM(vmrghb, 6, 0);
6361 GEN_VXFORM(vmrghh, 6, 1);
6362 GEN_VXFORM(vmrghw, 6, 2);
6363 GEN_VXFORM(vmrglb, 6, 4);
6364 GEN_VXFORM(vmrglh, 6, 5);
6365 GEN_VXFORM(vmrglw, 6, 6);
6366 GEN_VXFORM(vmuloub, 4, 0);
6367 GEN_VXFORM(vmulouh, 4, 1);
6368 GEN_VXFORM(vmulosb, 4, 4);
6369 GEN_VXFORM(vmulosh, 4, 5);
6370 GEN_VXFORM(vmuleub, 4, 8);
6371 GEN_VXFORM(vmuleuh, 4, 9);
6372 GEN_VXFORM(vmulesb, 4, 12);
6373 GEN_VXFORM(vmulesh, 4, 13);
6374 GEN_VXFORM(vslb, 2, 4);
6375 GEN_VXFORM(vslh, 2, 5);
6376 GEN_VXFORM(vslw, 2, 6);
6377 GEN_VXFORM(vsrb, 2, 8);
6378 GEN_VXFORM(vsrh, 2, 9);
6379 GEN_VXFORM(vsrw, 2, 10);
6380 GEN_VXFORM(vsrab, 2, 12);
6381 GEN_VXFORM(vsrah, 2, 13);
6382 GEN_VXFORM(vsraw, 2, 14);
6383 GEN_VXFORM(vslo, 6, 16);
6384 GEN_VXFORM(vsro, 6, 17);
6385 GEN_VXFORM(vaddcuw, 0, 6);
6386 GEN_VXFORM(vsubcuw, 0, 22);
6387 GEN_VXFORM(vaddubs, 0, 8);
6388 GEN_VXFORM(vadduhs, 0, 9);
6389 GEN_VXFORM(vadduws, 0, 10);
6390 GEN_VXFORM(vaddsbs, 0, 12);
6391 GEN_VXFORM(vaddshs, 0, 13);
6392 GEN_VXFORM(vaddsws, 0, 14);
6393 GEN_VXFORM(vsububs, 0, 24);
6394 GEN_VXFORM(vsubuhs, 0, 25);
6395 GEN_VXFORM(vsubuws, 0, 26);
6396 GEN_VXFORM(vsubsbs, 0, 28);
6397 GEN_VXFORM(vsubshs, 0, 29);
6398 GEN_VXFORM(vsubsws, 0, 30);
6399 GEN_VXFORM(vrlb, 2, 0);
6400 GEN_VXFORM(vrlh, 2, 1);
6401 GEN_VXFORM(vrlw, 2, 2);
6402 GEN_VXFORM(vsl, 2, 7);
6403 GEN_VXFORM(vsr, 2, 11);
6404 GEN_VXFORM(vpkuhum, 7, 0);
6405 GEN_VXFORM(vpkuwum, 7, 1);
6406 GEN_VXFORM(vpkuhus, 7, 2);
6407 GEN_VXFORM(vpkuwus, 7, 3);
6408 GEN_VXFORM(vpkshus, 7, 4);
6409 GEN_VXFORM(vpkswus, 7, 5);
6410 GEN_VXFORM(vpkshss, 7, 6);
6411 GEN_VXFORM(vpkswss, 7, 7);
6412 GEN_VXFORM(vpkpx, 7, 12);
6413 GEN_VXFORM(vsum4ubs, 4, 24);
6414 GEN_VXFORM(vsum4sbs, 4, 28);
6415 GEN_VXFORM(vsum4shs, 4, 25);
6416 GEN_VXFORM(vsum2sws, 4, 26);
6417 GEN_VXFORM(vsumsws, 4, 30);
6418 GEN_VXFORM(vaddfp, 5, 0);
6419 GEN_VXFORM(vsubfp, 5, 1);
6420 GEN_VXFORM(vmaxfp, 5, 16);
6421 GEN_VXFORM(vminfp, 5, 17);
6423 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
6424 static void glue(gen_, name)(DisasContext *ctx) \
6426 TCGv_ptr ra, rb, rd; \
6427 if (unlikely(!ctx->altivec_enabled)) { \
6428 gen_exception(ctx, POWERPC_EXCP_VPU); \
6431 ra = gen_avr_ptr(rA(ctx->opcode)); \
6432 rb = gen_avr_ptr(rB(ctx->opcode)); \
6433 rd = gen_avr_ptr(rD(ctx->opcode)); \
6434 gen_helper_##opname (rd, ra, rb); \
6435 tcg_temp_free_ptr(ra); \
6436 tcg_temp_free_ptr(rb); \
6437 tcg_temp_free_ptr(rd); \
6440 #define GEN_VXRFORM(name, opc2, opc3) \
6441 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
6442 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
6444 GEN_VXRFORM(vcmpequb, 3, 0)
6445 GEN_VXRFORM(vcmpequh, 3, 1)
6446 GEN_VXRFORM(vcmpequw, 3, 2)
6447 GEN_VXRFORM(vcmpgtsb, 3, 12)
6448 GEN_VXRFORM(vcmpgtsh, 3, 13)
6449 GEN_VXRFORM(vcmpgtsw, 3, 14)
6450 GEN_VXRFORM(vcmpgtub, 3, 8)
6451 GEN_VXRFORM(vcmpgtuh, 3, 9)
6452 GEN_VXRFORM(vcmpgtuw, 3, 10)
6453 GEN_VXRFORM(vcmpeqfp, 3, 3)
6454 GEN_VXRFORM(vcmpgefp, 3, 7)
6455 GEN_VXRFORM(vcmpgtfp, 3, 11)
6456 GEN_VXRFORM(vcmpbfp, 3, 15)
6458 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6459 static void glue(gen_, name)(DisasContext *ctx) \
6463 if (unlikely(!ctx->altivec_enabled)) { \
6464 gen_exception(ctx, POWERPC_EXCP_VPU); \
6467 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6468 rd = gen_avr_ptr(rD(ctx->opcode)); \
6469 gen_helper_##name (rd, simm); \
6470 tcg_temp_free_i32(simm); \
6471 tcg_temp_free_ptr(rd); \
6474 GEN_VXFORM_SIMM(vspltisb, 6, 12);
6475 GEN_VXFORM_SIMM(vspltish, 6, 13);
6476 GEN_VXFORM_SIMM(vspltisw, 6, 14);
6478 #define GEN_VXFORM_NOA(name, opc2, opc3) \
6479 static void glue(gen_, name)(DisasContext *ctx) \
6482 if (unlikely(!ctx->altivec_enabled)) { \
6483 gen_exception(ctx, POWERPC_EXCP_VPU); \
6486 rb = gen_avr_ptr(rB(ctx->opcode)); \
6487 rd = gen_avr_ptr(rD(ctx->opcode)); \
6488 gen_helper_##name (rd, rb); \
6489 tcg_temp_free_ptr(rb); \
6490 tcg_temp_free_ptr(rd); \
6493 GEN_VXFORM_NOA(vupkhsb, 7, 8);
6494 GEN_VXFORM_NOA(vupkhsh, 7, 9);
6495 GEN_VXFORM_NOA(vupklsb, 7, 10);
6496 GEN_VXFORM_NOA(vupklsh, 7, 11);
6497 GEN_VXFORM_NOA(vupkhpx, 7, 13);
6498 GEN_VXFORM_NOA(vupklpx, 7, 15);
6499 GEN_VXFORM_NOA(vrefp, 5, 4);
6500 GEN_VXFORM_NOA(vrsqrtefp, 5, 5);
6501 GEN_VXFORM_NOA(vexptefp, 5, 6);
6502 GEN_VXFORM_NOA(vlogefp, 5, 7);
6503 GEN_VXFORM_NOA(vrfim, 5, 8);
6504 GEN_VXFORM_NOA(vrfin, 5, 9);
6505 GEN_VXFORM_NOA(vrfip, 5, 10);
6506 GEN_VXFORM_NOA(vrfiz, 5, 11);
6508 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6509 static void glue(gen_, name)(DisasContext *ctx) \
6513 if (unlikely(!ctx->altivec_enabled)) { \
6514 gen_exception(ctx, POWERPC_EXCP_VPU); \
6517 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6518 rd = gen_avr_ptr(rD(ctx->opcode)); \
6519 gen_helper_##name (rd, simm); \
6520 tcg_temp_free_i32(simm); \
6521 tcg_temp_free_ptr(rd); \
6524 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
6525 static void glue(gen_, name)(DisasContext *ctx) \
6529 if (unlikely(!ctx->altivec_enabled)) { \
6530 gen_exception(ctx, POWERPC_EXCP_VPU); \
6533 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6534 rb = gen_avr_ptr(rB(ctx->opcode)); \
6535 rd = gen_avr_ptr(rD(ctx->opcode)); \
6536 gen_helper_##name (rd, rb, uimm); \
6537 tcg_temp_free_i32(uimm); \
6538 tcg_temp_free_ptr(rb); \
6539 tcg_temp_free_ptr(rd); \
6542 GEN_VXFORM_UIMM(vspltb, 6, 8);
6543 GEN_VXFORM_UIMM(vsplth, 6, 9);
6544 GEN_VXFORM_UIMM(vspltw, 6, 10);
6545 GEN_VXFORM_UIMM(vcfux, 5, 12);
6546 GEN_VXFORM_UIMM(vcfsx, 5, 13);
6547 GEN_VXFORM_UIMM(vctuxs, 5, 14);
6548 GEN_VXFORM_UIMM(vctsxs, 5, 15);
6550 static void gen_vsldoi(DisasContext *ctx)
6552 TCGv_ptr ra, rb, rd;
6554 if (unlikely(!ctx->altivec_enabled)) {
6555 gen_exception(ctx, POWERPC_EXCP_VPU);
6558 ra = gen_avr_ptr(rA(ctx->opcode));
6559 rb = gen_avr_ptr(rB(ctx->opcode));
6560 rd = gen_avr_ptr(rD(ctx->opcode));
6561 sh = tcg_const_i32(VSH(ctx->opcode));
6562 gen_helper_vsldoi (rd, ra, rb, sh);
6563 tcg_temp_free_ptr(ra);
6564 tcg_temp_free_ptr(rb);
6565 tcg_temp_free_ptr(rd);
6566 tcg_temp_free_i32(sh);
6569 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
6570 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6572 TCGv_ptr ra, rb, rc, rd; \
6573 if (unlikely(!ctx->altivec_enabled)) { \
6574 gen_exception(ctx, POWERPC_EXCP_VPU); \
6577 ra = gen_avr_ptr(rA(ctx->opcode)); \
6578 rb = gen_avr_ptr(rB(ctx->opcode)); \
6579 rc = gen_avr_ptr(rC(ctx->opcode)); \
6580 rd = gen_avr_ptr(rD(ctx->opcode)); \
6581 if (Rc(ctx->opcode)) { \
6582 gen_helper_##name1 (rd, ra, rb, rc); \
6584 gen_helper_##name0 (rd, ra, rb, rc); \
6586 tcg_temp_free_ptr(ra); \
6587 tcg_temp_free_ptr(rb); \
6588 tcg_temp_free_ptr(rc); \
6589 tcg_temp_free_ptr(rd); \
6592 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
6594 static void gen_vmladduhm(DisasContext *ctx)
6596 TCGv_ptr ra, rb, rc, rd;
6597 if (unlikely(!ctx->altivec_enabled)) {
6598 gen_exception(ctx, POWERPC_EXCP_VPU);
6601 ra = gen_avr_ptr(rA(ctx->opcode));
6602 rb = gen_avr_ptr(rB(ctx->opcode));
6603 rc = gen_avr_ptr(rC(ctx->opcode));
6604 rd = gen_avr_ptr(rD(ctx->opcode));
6605 gen_helper_vmladduhm(rd, ra, rb, rc);
6606 tcg_temp_free_ptr(ra);
6607 tcg_temp_free_ptr(rb);
6608 tcg_temp_free_ptr(rc);
6609 tcg_temp_free_ptr(rd);
6612 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
6613 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
6614 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
6615 GEN_VAFORM_PAIRED(vsel, vperm, 21)
6616 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
6618 /*** SPE extension ***/
6619 /* Register moves */
6622 static inline void gen_evmra(DisasContext *ctx)
6625 if (unlikely(!ctx->spe_enabled)) {
6626 gen_exception(ctx, POWERPC_EXCP_APU);
6630 #if defined(TARGET_PPC64)
6632 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6635 tcg_gen_st_i64(cpu_gpr[rA(ctx->opcode)],
6637 offsetof(CPUState, spe_acc));
6639 TCGv_i64 tmp = tcg_temp_new_i64();
6641 /* tmp := rA_lo + rA_hi << 32 */
6642 tcg_gen_concat_i32_i64(tmp, cpu_gpr[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6644 /* spe_acc := tmp */
6645 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
6646 tcg_temp_free_i64(tmp);
6649 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6650 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6654 static inline void gen_load_gpr64(TCGv_i64 t, int reg)
6656 #if defined(TARGET_PPC64)
6657 tcg_gen_mov_i64(t, cpu_gpr[reg]);
6659 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
6663 static inline void gen_store_gpr64(int reg, TCGv_i64 t)
6665 #if defined(TARGET_PPC64)
6666 tcg_gen_mov_i64(cpu_gpr[reg], t);
6668 TCGv_i64 tmp = tcg_temp_new_i64();
6669 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
6670 tcg_gen_shri_i64(tmp, t, 32);
6671 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
6672 tcg_temp_free_i64(tmp);
6676 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
6677 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6679 if (Rc(ctx->opcode)) \
6685 /* Handler for undefined SPE opcodes */
6686 static inline void gen_speundef(DisasContext *ctx)
6688 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6692 #if defined(TARGET_PPC64)
6693 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6694 static inline void gen_##name(DisasContext *ctx) \
6696 if (unlikely(!ctx->spe_enabled)) { \
6697 gen_exception(ctx, POWERPC_EXCP_APU); \
6700 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6701 cpu_gpr[rB(ctx->opcode)]); \
6704 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6705 static inline void gen_##name(DisasContext *ctx) \
6707 if (unlikely(!ctx->spe_enabled)) { \
6708 gen_exception(ctx, POWERPC_EXCP_APU); \
6711 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6712 cpu_gpr[rB(ctx->opcode)]); \
6713 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6714 cpu_gprh[rB(ctx->opcode)]); \
6718 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
6719 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
6720 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
6721 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
6722 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
6723 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
6724 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
6725 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
6727 /* SPE logic immediate */
6728 #if defined(TARGET_PPC64)
6729 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6730 static inline void gen_##name(DisasContext *ctx) \
6732 if (unlikely(!ctx->spe_enabled)) { \
6733 gen_exception(ctx, POWERPC_EXCP_APU); \
6736 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6737 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6738 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6739 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6740 tcg_opi(t0, t0, rB(ctx->opcode)); \
6741 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6742 tcg_gen_trunc_i64_i32(t1, t2); \
6743 tcg_temp_free_i64(t2); \
6744 tcg_opi(t1, t1, rB(ctx->opcode)); \
6745 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6746 tcg_temp_free_i32(t0); \
6747 tcg_temp_free_i32(t1); \
6750 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6751 static inline void gen_##name(DisasContext *ctx) \
6753 if (unlikely(!ctx->spe_enabled)) { \
6754 gen_exception(ctx, POWERPC_EXCP_APU); \
6757 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6759 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6763 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
6764 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
6765 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
6766 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
6768 /* SPE arithmetic */
6769 #if defined(TARGET_PPC64)
6770 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6771 static inline void gen_##name(DisasContext *ctx) \
6773 if (unlikely(!ctx->spe_enabled)) { \
6774 gen_exception(ctx, POWERPC_EXCP_APU); \
6777 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6778 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6779 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6780 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6782 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6783 tcg_gen_trunc_i64_i32(t1, t2); \
6784 tcg_temp_free_i64(t2); \
6786 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6787 tcg_temp_free_i32(t0); \
6788 tcg_temp_free_i32(t1); \
6791 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6792 static inline void gen_##name(DisasContext *ctx) \
6794 if (unlikely(!ctx->spe_enabled)) { \
6795 gen_exception(ctx, POWERPC_EXCP_APU); \
6798 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
6799 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
6803 static inline void gen_op_evabs(TCGv_i32 ret, TCGv_i32 arg1)
6805 int l1 = gen_new_label();
6806 int l2 = gen_new_label();
6808 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
6809 tcg_gen_neg_i32(ret, arg1);
6812 tcg_gen_mov_i32(ret, arg1);
6815 GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6816 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6817 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6818 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6819 static inline void gen_op_evrndw(TCGv_i32 ret, TCGv_i32 arg1)
6821 tcg_gen_addi_i32(ret, arg1, 0x8000);
6822 tcg_gen_ext16u_i32(ret, ret);
6824 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6825 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
6826 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
6828 #if defined(TARGET_PPC64)
6829 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6830 static inline void gen_##name(DisasContext *ctx) \
6832 if (unlikely(!ctx->spe_enabled)) { \
6833 gen_exception(ctx, POWERPC_EXCP_APU); \
6836 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6837 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6838 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
6839 TCGv_i64 t3 = tcg_temp_local_new_i64(); \
6840 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6841 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
6842 tcg_op(t0, t0, t2); \
6843 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
6844 tcg_gen_trunc_i64_i32(t1, t3); \
6845 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
6846 tcg_gen_trunc_i64_i32(t2, t3); \
6847 tcg_temp_free_i64(t3); \
6848 tcg_op(t1, t1, t2); \
6849 tcg_temp_free_i32(t2); \
6850 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6851 tcg_temp_free_i32(t0); \
6852 tcg_temp_free_i32(t1); \
6855 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6856 static inline void gen_##name(DisasContext *ctx) \
6858 if (unlikely(!ctx->spe_enabled)) { \
6859 gen_exception(ctx, POWERPC_EXCP_APU); \
6862 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6863 cpu_gpr[rB(ctx->opcode)]); \
6864 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6865 cpu_gprh[rB(ctx->opcode)]); \
6869 static inline void gen_op_evsrwu(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6874 l1 = gen_new_label();
6875 l2 = gen_new_label();
6876 t0 = tcg_temp_local_new_i32();
6877 /* No error here: 6 bits are used */
6878 tcg_gen_andi_i32(t0, arg2, 0x3F);
6879 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6880 tcg_gen_shr_i32(ret, arg1, t0);
6883 tcg_gen_movi_i32(ret, 0);
6885 tcg_temp_free_i32(t0);
6887 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
6888 static inline void gen_op_evsrws(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6893 l1 = gen_new_label();
6894 l2 = gen_new_label();
6895 t0 = tcg_temp_local_new_i32();
6896 /* No error here: 6 bits are used */
6897 tcg_gen_andi_i32(t0, arg2, 0x3F);
6898 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6899 tcg_gen_sar_i32(ret, arg1, t0);
6902 tcg_gen_movi_i32(ret, 0);
6904 tcg_temp_free_i32(t0);
6906 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
6907 static inline void gen_op_evslw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6912 l1 = gen_new_label();
6913 l2 = gen_new_label();
6914 t0 = tcg_temp_local_new_i32();
6915 /* No error here: 6 bits are used */
6916 tcg_gen_andi_i32(t0, arg2, 0x3F);
6917 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6918 tcg_gen_shl_i32(ret, arg1, t0);
6921 tcg_gen_movi_i32(ret, 0);
6923 tcg_temp_free_i32(t0);
6925 GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
6926 static inline void gen_op_evrlw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6928 TCGv_i32 t0 = tcg_temp_new_i32();
6929 tcg_gen_andi_i32(t0, arg2, 0x1F);
6930 tcg_gen_rotl_i32(ret, arg1, t0);
6931 tcg_temp_free_i32(t0);
6933 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
6934 static inline void gen_evmergehi(DisasContext *ctx)
6936 if (unlikely(!ctx->spe_enabled)) {
6937 gen_exception(ctx, POWERPC_EXCP_APU);
6940 #if defined(TARGET_PPC64)
6941 TCGv t0 = tcg_temp_new();
6942 TCGv t1 = tcg_temp_new();
6943 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6944 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6945 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6949 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6950 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6953 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
6954 static inline void gen_op_evsubf(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6956 tcg_gen_sub_i32(ret, arg2, arg1);
6958 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
6960 /* SPE arithmetic immediate */
6961 #if defined(TARGET_PPC64)
6962 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6963 static inline void gen_##name(DisasContext *ctx) \
6965 if (unlikely(!ctx->spe_enabled)) { \
6966 gen_exception(ctx, POWERPC_EXCP_APU); \
6969 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6970 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6971 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6972 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
6973 tcg_op(t0, t0, rA(ctx->opcode)); \
6974 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6975 tcg_gen_trunc_i64_i32(t1, t2); \
6976 tcg_temp_free_i64(t2); \
6977 tcg_op(t1, t1, rA(ctx->opcode)); \
6978 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6979 tcg_temp_free_i32(t0); \
6980 tcg_temp_free_i32(t1); \
6983 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6984 static inline void gen_##name(DisasContext *ctx) \
6986 if (unlikely(!ctx->spe_enabled)) { \
6987 gen_exception(ctx, POWERPC_EXCP_APU); \
6990 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
6992 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
6996 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
6997 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
6999 /* SPE comparison */
7000 #if defined(TARGET_PPC64)
7001 #define GEN_SPEOP_COMP(name, tcg_cond) \
7002 static inline void gen_##name(DisasContext *ctx) \
7004 if (unlikely(!ctx->spe_enabled)) { \
7005 gen_exception(ctx, POWERPC_EXCP_APU); \
7008 int l1 = gen_new_label(); \
7009 int l2 = gen_new_label(); \
7010 int l3 = gen_new_label(); \
7011 int l4 = gen_new_label(); \
7012 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7013 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7014 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7015 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7016 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7017 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
7018 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
7020 gen_set_label(l1); \
7021 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7022 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7023 gen_set_label(l2); \
7024 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7025 tcg_gen_trunc_i64_i32(t0, t2); \
7026 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7027 tcg_gen_trunc_i64_i32(t1, t2); \
7028 tcg_temp_free_i64(t2); \
7029 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
7030 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7031 ~(CRF_CH | CRF_CH_AND_CL)); \
7033 gen_set_label(l3); \
7034 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7035 CRF_CH | CRF_CH_OR_CL); \
7036 gen_set_label(l4); \
7037 tcg_temp_free_i32(t0); \
7038 tcg_temp_free_i32(t1); \
7041 #define GEN_SPEOP_COMP(name, tcg_cond) \
7042 static inline void gen_##name(DisasContext *ctx) \
7044 if (unlikely(!ctx->spe_enabled)) { \
7045 gen_exception(ctx, POWERPC_EXCP_APU); \
7048 int l1 = gen_new_label(); \
7049 int l2 = gen_new_label(); \
7050 int l3 = gen_new_label(); \
7051 int l4 = gen_new_label(); \
7053 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
7054 cpu_gpr[rB(ctx->opcode)], l1); \
7055 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
7057 gen_set_label(l1); \
7058 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7059 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7060 gen_set_label(l2); \
7061 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
7062 cpu_gprh[rB(ctx->opcode)], l3); \
7063 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7064 ~(CRF_CH | CRF_CH_AND_CL)); \
7066 gen_set_label(l3); \
7067 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7068 CRF_CH | CRF_CH_OR_CL); \
7069 gen_set_label(l4); \
7072 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
7073 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
7074 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
7075 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
7076 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
7079 static inline void gen_brinc(DisasContext *ctx)
7081 /* Note: brinc is usable even if SPE is disabled */
7082 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
7083 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7085 static inline void gen_evmergelo(DisasContext *ctx)
7087 if (unlikely(!ctx->spe_enabled)) {
7088 gen_exception(ctx, POWERPC_EXCP_APU);
7091 #if defined(TARGET_PPC64)
7092 TCGv t0 = tcg_temp_new();
7093 TCGv t1 = tcg_temp_new();
7094 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7095 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7096 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7100 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7101 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7104 static inline void gen_evmergehilo(DisasContext *ctx)
7106 if (unlikely(!ctx->spe_enabled)) {
7107 gen_exception(ctx, POWERPC_EXCP_APU);
7110 #if defined(TARGET_PPC64)
7111 TCGv t0 = tcg_temp_new();
7112 TCGv t1 = tcg_temp_new();
7113 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7114 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7115 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7119 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7120 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7123 static inline void gen_evmergelohi(DisasContext *ctx)
7125 if (unlikely(!ctx->spe_enabled)) {
7126 gen_exception(ctx, POWERPC_EXCP_APU);
7129 #if defined(TARGET_PPC64)
7130 TCGv t0 = tcg_temp_new();
7131 TCGv t1 = tcg_temp_new();
7132 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7133 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7134 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7138 if (rD(ctx->opcode) == rA(ctx->opcode)) {
7139 TCGv_i32 tmp = tcg_temp_new_i32();
7140 tcg_gen_mov_i32(tmp, cpu_gpr[rA(ctx->opcode)]);
7141 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7142 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], tmp);
7143 tcg_temp_free_i32(tmp);
7145 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7146 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7150 static inline void gen_evsplati(DisasContext *ctx)
7152 uint64_t imm = ((int32_t)(rA(ctx->opcode) << 27)) >> 27;
7154 #if defined(TARGET_PPC64)
7155 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7157 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7158 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7161 static inline void gen_evsplatfi(DisasContext *ctx)
7163 uint64_t imm = rA(ctx->opcode) << 27;
7165 #if defined(TARGET_PPC64)
7166 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7168 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7169 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7173 static inline void gen_evsel(DisasContext *ctx)
7175 int l1 = gen_new_label();
7176 int l2 = gen_new_label();
7177 int l3 = gen_new_label();
7178 int l4 = gen_new_label();
7179 TCGv_i32 t0 = tcg_temp_local_new_i32();
7180 #if defined(TARGET_PPC64)
7181 TCGv t1 = tcg_temp_local_new();
7182 TCGv t2 = tcg_temp_local_new();
7184 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
7185 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
7186 #if defined(TARGET_PPC64)
7187 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7189 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7193 #if defined(TARGET_PPC64)
7194 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7196 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7199 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
7200 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
7201 #if defined(TARGET_PPC64)
7202 tcg_gen_ext32u_tl(t2, cpu_gpr[rA(ctx->opcode)]);
7204 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7208 #if defined(TARGET_PPC64)
7209 tcg_gen_ext32u_tl(t2, cpu_gpr[rB(ctx->opcode)]);
7211 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7214 tcg_temp_free_i32(t0);
7215 #if defined(TARGET_PPC64)
7216 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
7222 static void gen_evsel0(DisasContext *ctx)
7227 static void gen_evsel1(DisasContext *ctx)
7232 static void gen_evsel2(DisasContext *ctx)
7237 static void gen_evsel3(DisasContext *ctx)
7244 static inline void gen_evmwumi(DisasContext *ctx)
7248 if (unlikely(!ctx->spe_enabled)) {
7249 gen_exception(ctx, POWERPC_EXCP_APU);
7253 t0 = tcg_temp_new_i64();
7254 t1 = tcg_temp_new_i64();
7256 /* t0 := rA; t1 := rB */
7257 #if defined(TARGET_PPC64)
7258 tcg_gen_ext32u_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7259 tcg_gen_ext32u_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7261 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7262 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7265 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7267 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7269 tcg_temp_free_i64(t0);
7270 tcg_temp_free_i64(t1);
7273 static inline void gen_evmwumia(DisasContext *ctx)
7277 if (unlikely(!ctx->spe_enabled)) {
7278 gen_exception(ctx, POWERPC_EXCP_APU);
7282 gen_evmwumi(ctx); /* rD := rA * rB */
7284 tmp = tcg_temp_new_i64();
7287 gen_load_gpr64(tmp, rD(ctx->opcode));
7288 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7289 tcg_temp_free_i64(tmp);
7292 static inline void gen_evmwumiaa(DisasContext *ctx)
7297 if (unlikely(!ctx->spe_enabled)) {
7298 gen_exception(ctx, POWERPC_EXCP_APU);
7302 gen_evmwumi(ctx); /* rD := rA * rB */
7304 acc = tcg_temp_new_i64();
7305 tmp = tcg_temp_new_i64();
7308 gen_load_gpr64(tmp, rD(ctx->opcode));
7311 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7313 /* acc := tmp + acc */
7314 tcg_gen_add_i64(acc, acc, tmp);
7317 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7320 gen_store_gpr64(rD(ctx->opcode), acc);
7322 tcg_temp_free_i64(acc);
7323 tcg_temp_free_i64(tmp);
7326 static inline void gen_evmwsmi(DisasContext *ctx)
7330 if (unlikely(!ctx->spe_enabled)) {
7331 gen_exception(ctx, POWERPC_EXCP_APU);
7335 t0 = tcg_temp_new_i64();
7336 t1 = tcg_temp_new_i64();
7338 /* t0 := rA; t1 := rB */
7339 #if defined(TARGET_PPC64)
7340 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7341 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7343 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7344 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7347 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7349 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7351 tcg_temp_free_i64(t0);
7352 tcg_temp_free_i64(t1);
7355 static inline void gen_evmwsmia(DisasContext *ctx)
7359 gen_evmwsmi(ctx); /* rD := rA * rB */
7361 tmp = tcg_temp_new_i64();
7364 gen_load_gpr64(tmp, rD(ctx->opcode));
7365 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7367 tcg_temp_free_i64(tmp);
7370 static inline void gen_evmwsmiaa(DisasContext *ctx)
7372 TCGv_i64 acc = tcg_temp_new_i64();
7373 TCGv_i64 tmp = tcg_temp_new_i64();
7375 gen_evmwsmi(ctx); /* rD := rA * rB */
7377 acc = tcg_temp_new_i64();
7378 tmp = tcg_temp_new_i64();
7381 gen_load_gpr64(tmp, rD(ctx->opcode));
7384 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7386 /* acc := tmp + acc */
7387 tcg_gen_add_i64(acc, acc, tmp);
7390 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7393 gen_store_gpr64(rD(ctx->opcode), acc);
7395 tcg_temp_free_i64(acc);
7396 tcg_temp_free_i64(tmp);
7399 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
7400 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
7401 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
7402 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
7403 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
7404 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
7405 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
7406 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
7407 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, PPC_SPE);
7408 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
7409 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
7410 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
7411 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
7412 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
7413 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
7414 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
7415 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
7416 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
7417 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
7418 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
7419 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
7420 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
7421 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
7422 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
7423 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
7424 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
7425 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
7426 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
7427 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
7429 /* SPE load and stores */
7430 static inline void gen_addr_spe_imm_index(DisasContext *ctx, TCGv EA, int sh)
7432 target_ulong uimm = rB(ctx->opcode);
7434 if (rA(ctx->opcode) == 0) {
7435 tcg_gen_movi_tl(EA, uimm << sh);
7437 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
7438 #if defined(TARGET_PPC64)
7439 if (!ctx->sf_mode) {
7440 tcg_gen_ext32u_tl(EA, EA);
7446 static inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
7448 #if defined(TARGET_PPC64)
7449 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7451 TCGv_i64 t0 = tcg_temp_new_i64();
7452 gen_qemu_ld64(ctx, t0, addr);
7453 tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
7454 tcg_gen_shri_i64(t0, t0, 32);
7455 tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
7456 tcg_temp_free_i64(t0);
7460 static inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
7462 #if defined(TARGET_PPC64)
7463 TCGv t0 = tcg_temp_new();
7464 gen_qemu_ld32u(ctx, t0, addr);
7465 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7466 gen_addr_add(ctx, addr, addr, 4);
7467 gen_qemu_ld32u(ctx, t0, addr);
7468 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7471 gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7472 gen_addr_add(ctx, addr, addr, 4);
7473 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7477 static inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
7479 TCGv t0 = tcg_temp_new();
7480 #if defined(TARGET_PPC64)
7481 gen_qemu_ld16u(ctx, t0, addr);
7482 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7483 gen_addr_add(ctx, addr, addr, 2);
7484 gen_qemu_ld16u(ctx, t0, addr);
7485 tcg_gen_shli_tl(t0, t0, 32);
7486 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7487 gen_addr_add(ctx, addr, addr, 2);
7488 gen_qemu_ld16u(ctx, t0, addr);
7489 tcg_gen_shli_tl(t0, t0, 16);
7490 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7491 gen_addr_add(ctx, addr, addr, 2);
7492 gen_qemu_ld16u(ctx, t0, addr);
7493 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7495 gen_qemu_ld16u(ctx, t0, addr);
7496 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7497 gen_addr_add(ctx, addr, addr, 2);
7498 gen_qemu_ld16u(ctx, t0, addr);
7499 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7500 gen_addr_add(ctx, addr, addr, 2);
7501 gen_qemu_ld16u(ctx, t0, addr);
7502 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7503 gen_addr_add(ctx, addr, addr, 2);
7504 gen_qemu_ld16u(ctx, t0, addr);
7505 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7510 static inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
7512 TCGv t0 = tcg_temp_new();
7513 gen_qemu_ld16u(ctx, t0, addr);
7514 #if defined(TARGET_PPC64)
7515 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7516 tcg_gen_shli_tl(t0, t0, 16);
7517 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7519 tcg_gen_shli_tl(t0, t0, 16);
7520 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7521 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7526 static inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
7528 TCGv t0 = tcg_temp_new();
7529 gen_qemu_ld16u(ctx, t0, addr);
7530 #if defined(TARGET_PPC64)
7531 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7532 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7534 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7535 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7540 static inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
7542 TCGv t0 = tcg_temp_new();
7543 gen_qemu_ld16s(ctx, t0, addr);
7544 #if defined(TARGET_PPC64)
7545 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7546 tcg_gen_ext32u_tl(t0, t0);
7547 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7549 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7550 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7555 static inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
7557 TCGv t0 = tcg_temp_new();
7558 #if defined(TARGET_PPC64)
7559 gen_qemu_ld16u(ctx, t0, addr);
7560 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7561 gen_addr_add(ctx, addr, addr, 2);
7562 gen_qemu_ld16u(ctx, t0, addr);
7563 tcg_gen_shli_tl(t0, t0, 16);
7564 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7566 gen_qemu_ld16u(ctx, t0, addr);
7567 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7568 gen_addr_add(ctx, addr, addr, 2);
7569 gen_qemu_ld16u(ctx, t0, addr);
7570 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7575 static inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
7577 #if defined(TARGET_PPC64)
7578 TCGv t0 = tcg_temp_new();
7579 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7580 gen_addr_add(ctx, addr, addr, 2);
7581 gen_qemu_ld16u(ctx, t0, addr);
7582 tcg_gen_shli_tl(t0, t0, 32);
7583 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7586 gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7587 gen_addr_add(ctx, addr, addr, 2);
7588 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7592 static inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
7594 #if defined(TARGET_PPC64)
7595 TCGv t0 = tcg_temp_new();
7596 gen_qemu_ld16s(ctx, t0, addr);
7597 tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
7598 gen_addr_add(ctx, addr, addr, 2);
7599 gen_qemu_ld16s(ctx, t0, addr);
7600 tcg_gen_shli_tl(t0, t0, 32);
7601 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7604 gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7605 gen_addr_add(ctx, addr, addr, 2);
7606 gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7610 static inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
7612 TCGv t0 = tcg_temp_new();
7613 gen_qemu_ld32u(ctx, t0, addr);
7614 #if defined(TARGET_PPC64)
7615 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7616 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7618 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7619 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7624 static inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
7626 TCGv t0 = tcg_temp_new();
7627 #if defined(TARGET_PPC64)
7628 gen_qemu_ld16u(ctx, t0, addr);
7629 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7630 tcg_gen_shli_tl(t0, t0, 32);
7631 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7632 gen_addr_add(ctx, addr, addr, 2);
7633 gen_qemu_ld16u(ctx, t0, addr);
7634 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7635 tcg_gen_shli_tl(t0, t0, 16);
7636 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7638 gen_qemu_ld16u(ctx, t0, addr);
7639 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7640 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7641 gen_addr_add(ctx, addr, addr, 2);
7642 gen_qemu_ld16u(ctx, t0, addr);
7643 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7644 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7649 static inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
7651 #if defined(TARGET_PPC64)
7652 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7654 TCGv_i64 t0 = tcg_temp_new_i64();
7655 tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
7656 gen_qemu_st64(ctx, t0, addr);
7657 tcg_temp_free_i64(t0);
7661 static inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
7663 #if defined(TARGET_PPC64)
7664 TCGv t0 = tcg_temp_new();
7665 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7666 gen_qemu_st32(ctx, t0, addr);
7669 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7671 gen_addr_add(ctx, addr, addr, 4);
7672 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7675 static inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
7677 TCGv t0 = tcg_temp_new();
7678 #if defined(TARGET_PPC64)
7679 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7681 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7683 gen_qemu_st16(ctx, t0, addr);
7684 gen_addr_add(ctx, addr, addr, 2);
7685 #if defined(TARGET_PPC64)
7686 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7687 gen_qemu_st16(ctx, t0, addr);
7689 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7691 gen_addr_add(ctx, addr, addr, 2);
7692 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7693 gen_qemu_st16(ctx, t0, addr);
7695 gen_addr_add(ctx, addr, addr, 2);
7696 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7699 static inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
7701 TCGv t0 = tcg_temp_new();
7702 #if defined(TARGET_PPC64)
7703 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7705 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7707 gen_qemu_st16(ctx, t0, addr);
7708 gen_addr_add(ctx, addr, addr, 2);
7709 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7710 gen_qemu_st16(ctx, t0, addr);
7714 static inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
7716 #if defined(TARGET_PPC64)
7717 TCGv t0 = tcg_temp_new();
7718 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7719 gen_qemu_st16(ctx, t0, addr);
7722 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7724 gen_addr_add(ctx, addr, addr, 2);
7725 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7728 static inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
7730 #if defined(TARGET_PPC64)
7731 TCGv t0 = tcg_temp_new();
7732 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7733 gen_qemu_st32(ctx, t0, addr);
7736 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7740 static inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
7742 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7745 #define GEN_SPEOP_LDST(name, opc2, sh) \
7746 static void glue(gen_, name)(DisasContext *ctx) \
7749 if (unlikely(!ctx->spe_enabled)) { \
7750 gen_exception(ctx, POWERPC_EXCP_APU); \
7753 gen_set_access_type(ctx, ACCESS_INT); \
7754 t0 = tcg_temp_new(); \
7755 if (Rc(ctx->opcode)) { \
7756 gen_addr_spe_imm_index(ctx, t0, sh); \
7758 gen_addr_reg_index(ctx, t0); \
7760 gen_op_##name(ctx, t0); \
7761 tcg_temp_free(t0); \
7764 GEN_SPEOP_LDST(evldd, 0x00, 3);
7765 GEN_SPEOP_LDST(evldw, 0x01, 3);
7766 GEN_SPEOP_LDST(evldh, 0x02, 3);
7767 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
7768 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
7769 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
7770 GEN_SPEOP_LDST(evlwhe, 0x08, 2);
7771 GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
7772 GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
7773 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
7774 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
7776 GEN_SPEOP_LDST(evstdd, 0x10, 3);
7777 GEN_SPEOP_LDST(evstdw, 0x11, 3);
7778 GEN_SPEOP_LDST(evstdh, 0x12, 3);
7779 GEN_SPEOP_LDST(evstwhe, 0x18, 2);
7780 GEN_SPEOP_LDST(evstwho, 0x1A, 2);
7781 GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
7782 GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
7784 /* Multiply and add - TODO */
7786 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
7787 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
7788 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
7789 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
7790 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
7791 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
7792 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
7793 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
7794 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
7795 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
7796 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
7797 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
7799 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
7800 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
7801 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
7802 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
7803 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
7804 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
7805 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
7806 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
7807 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
7808 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
7809 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
7810 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
7812 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
7813 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
7814 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
7815 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
7816 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
7818 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
7819 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
7820 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
7821 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
7822 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
7823 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
7824 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
7825 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
7826 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
7827 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
7828 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
7829 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
7831 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
7832 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
7833 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
7834 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
7836 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
7837 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
7838 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
7839 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
7840 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
7841 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
7842 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
7843 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
7844 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
7845 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
7846 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
7847 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
7849 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
7850 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
7851 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
7852 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
7853 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
7856 /*** SPE floating-point extension ***/
7857 #if defined(TARGET_PPC64)
7858 #define GEN_SPEFPUOP_CONV_32_32(name) \
7859 static inline void gen_##name(DisasContext *ctx) \
7863 t0 = tcg_temp_new_i32(); \
7864 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7865 gen_helper_##name(t0, t0); \
7866 t1 = tcg_temp_new(); \
7867 tcg_gen_extu_i32_tl(t1, t0); \
7868 tcg_temp_free_i32(t0); \
7869 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7870 0xFFFFFFFF00000000ULL); \
7871 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7872 tcg_temp_free(t1); \
7874 #define GEN_SPEFPUOP_CONV_32_64(name) \
7875 static inline void gen_##name(DisasContext *ctx) \
7879 t0 = tcg_temp_new_i32(); \
7880 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7881 t1 = tcg_temp_new(); \
7882 tcg_gen_extu_i32_tl(t1, t0); \
7883 tcg_temp_free_i32(t0); \
7884 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7885 0xFFFFFFFF00000000ULL); \
7886 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7887 tcg_temp_free(t1); \
7889 #define GEN_SPEFPUOP_CONV_64_32(name) \
7890 static inline void gen_##name(DisasContext *ctx) \
7892 TCGv_i32 t0 = tcg_temp_new_i32(); \
7893 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7894 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7895 tcg_temp_free_i32(t0); \
7897 #define GEN_SPEFPUOP_CONV_64_64(name) \
7898 static inline void gen_##name(DisasContext *ctx) \
7900 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7902 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7903 static inline void gen_##name(DisasContext *ctx) \
7907 if (unlikely(!ctx->spe_enabled)) { \
7908 gen_exception(ctx, POWERPC_EXCP_APU); \
7911 t0 = tcg_temp_new_i32(); \
7912 t1 = tcg_temp_new_i32(); \
7913 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7914 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7915 gen_helper_##name(t0, t0, t1); \
7916 tcg_temp_free_i32(t1); \
7917 t2 = tcg_temp_new(); \
7918 tcg_gen_extu_i32_tl(t2, t0); \
7919 tcg_temp_free_i32(t0); \
7920 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7921 0xFFFFFFFF00000000ULL); \
7922 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
7923 tcg_temp_free(t2); \
7925 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
7926 static inline void gen_##name(DisasContext *ctx) \
7928 if (unlikely(!ctx->spe_enabled)) { \
7929 gen_exception(ctx, POWERPC_EXCP_APU); \
7932 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7933 cpu_gpr[rB(ctx->opcode)]); \
7935 #define GEN_SPEFPUOP_COMP_32(name) \
7936 static inline void gen_##name(DisasContext *ctx) \
7939 if (unlikely(!ctx->spe_enabled)) { \
7940 gen_exception(ctx, POWERPC_EXCP_APU); \
7943 t0 = tcg_temp_new_i32(); \
7944 t1 = tcg_temp_new_i32(); \
7945 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7946 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7947 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7948 tcg_temp_free_i32(t0); \
7949 tcg_temp_free_i32(t1); \
7951 #define GEN_SPEFPUOP_COMP_64(name) \
7952 static inline void gen_##name(DisasContext *ctx) \
7954 if (unlikely(!ctx->spe_enabled)) { \
7955 gen_exception(ctx, POWERPC_EXCP_APU); \
7958 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7959 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7962 #define GEN_SPEFPUOP_CONV_32_32(name) \
7963 static inline void gen_##name(DisasContext *ctx) \
7965 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7967 #define GEN_SPEFPUOP_CONV_32_64(name) \
7968 static inline void gen_##name(DisasContext *ctx) \
7970 TCGv_i64 t0 = tcg_temp_new_i64(); \
7971 gen_load_gpr64(t0, rB(ctx->opcode)); \
7972 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7973 tcg_temp_free_i64(t0); \
7975 #define GEN_SPEFPUOP_CONV_64_32(name) \
7976 static inline void gen_##name(DisasContext *ctx) \
7978 TCGv_i64 t0 = tcg_temp_new_i64(); \
7979 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7980 gen_store_gpr64(rD(ctx->opcode), t0); \
7981 tcg_temp_free_i64(t0); \
7983 #define GEN_SPEFPUOP_CONV_64_64(name) \
7984 static inline void gen_##name(DisasContext *ctx) \
7986 TCGv_i64 t0 = tcg_temp_new_i64(); \
7987 gen_load_gpr64(t0, rB(ctx->opcode)); \
7988 gen_helper_##name(t0, t0); \
7989 gen_store_gpr64(rD(ctx->opcode), t0); \
7990 tcg_temp_free_i64(t0); \
7992 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7993 static inline void gen_##name(DisasContext *ctx) \
7995 if (unlikely(!ctx->spe_enabled)) { \
7996 gen_exception(ctx, POWERPC_EXCP_APU); \
7999 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], \
8000 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8002 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
8003 static inline void gen_##name(DisasContext *ctx) \
8006 if (unlikely(!ctx->spe_enabled)) { \
8007 gen_exception(ctx, POWERPC_EXCP_APU); \
8010 t0 = tcg_temp_new_i64(); \
8011 t1 = tcg_temp_new_i64(); \
8012 gen_load_gpr64(t0, rA(ctx->opcode)); \
8013 gen_load_gpr64(t1, rB(ctx->opcode)); \
8014 gen_helper_##name(t0, t0, t1); \
8015 gen_store_gpr64(rD(ctx->opcode), t0); \
8016 tcg_temp_free_i64(t0); \
8017 tcg_temp_free_i64(t1); \
8019 #define GEN_SPEFPUOP_COMP_32(name) \
8020 static inline void gen_##name(DisasContext *ctx) \
8022 if (unlikely(!ctx->spe_enabled)) { \
8023 gen_exception(ctx, POWERPC_EXCP_APU); \
8026 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
8027 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8029 #define GEN_SPEFPUOP_COMP_64(name) \
8030 static inline void gen_##name(DisasContext *ctx) \
8033 if (unlikely(!ctx->spe_enabled)) { \
8034 gen_exception(ctx, POWERPC_EXCP_APU); \
8037 t0 = tcg_temp_new_i64(); \
8038 t1 = tcg_temp_new_i64(); \
8039 gen_load_gpr64(t0, rA(ctx->opcode)); \
8040 gen_load_gpr64(t1, rB(ctx->opcode)); \
8041 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
8042 tcg_temp_free_i64(t0); \
8043 tcg_temp_free_i64(t1); \
8047 /* Single precision floating-point vectors operations */
8049 GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
8050 GEN_SPEFPUOP_ARITH2_64_64(evfssub);
8051 GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
8052 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
8053 static inline void gen_evfsabs(DisasContext *ctx)
8055 if (unlikely(!ctx->spe_enabled)) {
8056 gen_exception(ctx, POWERPC_EXCP_APU);
8059 #if defined(TARGET_PPC64)
8060 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
8062 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
8063 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8066 static inline void gen_evfsnabs(DisasContext *ctx)
8068 if (unlikely(!ctx->spe_enabled)) {
8069 gen_exception(ctx, POWERPC_EXCP_APU);
8072 #if defined(TARGET_PPC64)
8073 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8075 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8076 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8079 static inline void gen_evfsneg(DisasContext *ctx)
8081 if (unlikely(!ctx->spe_enabled)) {
8082 gen_exception(ctx, POWERPC_EXCP_APU);
8085 #if defined(TARGET_PPC64)
8086 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8088 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8089 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8094 GEN_SPEFPUOP_CONV_64_64(evfscfui);
8095 GEN_SPEFPUOP_CONV_64_64(evfscfsi);
8096 GEN_SPEFPUOP_CONV_64_64(evfscfuf);
8097 GEN_SPEFPUOP_CONV_64_64(evfscfsf);
8098 GEN_SPEFPUOP_CONV_64_64(evfsctui);
8099 GEN_SPEFPUOP_CONV_64_64(evfsctsi);
8100 GEN_SPEFPUOP_CONV_64_64(evfsctuf);
8101 GEN_SPEFPUOP_CONV_64_64(evfsctsf);
8102 GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
8103 GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
8106 GEN_SPEFPUOP_COMP_64(evfscmpgt);
8107 GEN_SPEFPUOP_COMP_64(evfscmplt);
8108 GEN_SPEFPUOP_COMP_64(evfscmpeq);
8109 GEN_SPEFPUOP_COMP_64(evfststgt);
8110 GEN_SPEFPUOP_COMP_64(evfststlt);
8111 GEN_SPEFPUOP_COMP_64(evfststeq);
8113 /* Opcodes definitions */
8114 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPE_SINGLE); //
8115 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPE_SINGLE); //
8116 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPE_SINGLE); //
8117 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPE_SINGLE); //
8118 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8119 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8120 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8121 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8122 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8123 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8124 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8125 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8126 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8127 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8129 /* Single precision floating-point operations */
8131 GEN_SPEFPUOP_ARITH2_32_32(efsadd);
8132 GEN_SPEFPUOP_ARITH2_32_32(efssub);
8133 GEN_SPEFPUOP_ARITH2_32_32(efsmul);
8134 GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
8135 static inline void gen_efsabs(DisasContext *ctx)
8137 if (unlikely(!ctx->spe_enabled)) {
8138 gen_exception(ctx, POWERPC_EXCP_APU);
8141 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
8143 static inline void gen_efsnabs(DisasContext *ctx)
8145 if (unlikely(!ctx->spe_enabled)) {
8146 gen_exception(ctx, POWERPC_EXCP_APU);
8149 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8151 static inline void gen_efsneg(DisasContext *ctx)
8153 if (unlikely(!ctx->spe_enabled)) {
8154 gen_exception(ctx, POWERPC_EXCP_APU);
8157 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8161 GEN_SPEFPUOP_CONV_32_32(efscfui);
8162 GEN_SPEFPUOP_CONV_32_32(efscfsi);
8163 GEN_SPEFPUOP_CONV_32_32(efscfuf);
8164 GEN_SPEFPUOP_CONV_32_32(efscfsf);
8165 GEN_SPEFPUOP_CONV_32_32(efsctui);
8166 GEN_SPEFPUOP_CONV_32_32(efsctsi);
8167 GEN_SPEFPUOP_CONV_32_32(efsctuf);
8168 GEN_SPEFPUOP_CONV_32_32(efsctsf);
8169 GEN_SPEFPUOP_CONV_32_32(efsctuiz);
8170 GEN_SPEFPUOP_CONV_32_32(efsctsiz);
8171 GEN_SPEFPUOP_CONV_32_64(efscfd);
8174 GEN_SPEFPUOP_COMP_32(efscmpgt);
8175 GEN_SPEFPUOP_COMP_32(efscmplt);
8176 GEN_SPEFPUOP_COMP_32(efscmpeq);
8177 GEN_SPEFPUOP_COMP_32(efststgt);
8178 GEN_SPEFPUOP_COMP_32(efststlt);
8179 GEN_SPEFPUOP_COMP_32(efststeq);
8181 /* Opcodes definitions */
8182 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPE_SINGLE); //
8183 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPE_SINGLE); //
8184 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPE_SINGLE); //
8185 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPE_SINGLE); //
8186 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8187 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8188 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8189 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8190 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8191 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8192 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8193 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8194 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8195 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8197 /* Double precision floating-point operations */
8199 GEN_SPEFPUOP_ARITH2_64_64(efdadd);
8200 GEN_SPEFPUOP_ARITH2_64_64(efdsub);
8201 GEN_SPEFPUOP_ARITH2_64_64(efdmul);
8202 GEN_SPEFPUOP_ARITH2_64_64(efddiv);
8203 static inline void gen_efdabs(DisasContext *ctx)
8205 if (unlikely(!ctx->spe_enabled)) {
8206 gen_exception(ctx, POWERPC_EXCP_APU);
8209 #if defined(TARGET_PPC64)
8210 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
8212 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8213 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8216 static inline void gen_efdnabs(DisasContext *ctx)
8218 if (unlikely(!ctx->spe_enabled)) {
8219 gen_exception(ctx, POWERPC_EXCP_APU);
8222 #if defined(TARGET_PPC64)
8223 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8225 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8226 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8229 static inline void gen_efdneg(DisasContext *ctx)
8231 if (unlikely(!ctx->spe_enabled)) {
8232 gen_exception(ctx, POWERPC_EXCP_APU);
8235 #if defined(TARGET_PPC64)
8236 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8238 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8239 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8244 GEN_SPEFPUOP_CONV_64_32(efdcfui);
8245 GEN_SPEFPUOP_CONV_64_32(efdcfsi);
8246 GEN_SPEFPUOP_CONV_64_32(efdcfuf);
8247 GEN_SPEFPUOP_CONV_64_32(efdcfsf);
8248 GEN_SPEFPUOP_CONV_32_64(efdctui);
8249 GEN_SPEFPUOP_CONV_32_64(efdctsi);
8250 GEN_SPEFPUOP_CONV_32_64(efdctuf);
8251 GEN_SPEFPUOP_CONV_32_64(efdctsf);
8252 GEN_SPEFPUOP_CONV_32_64(efdctuiz);
8253 GEN_SPEFPUOP_CONV_32_64(efdctsiz);
8254 GEN_SPEFPUOP_CONV_64_32(efdcfs);
8255 GEN_SPEFPUOP_CONV_64_64(efdcfuid);
8256 GEN_SPEFPUOP_CONV_64_64(efdcfsid);
8257 GEN_SPEFPUOP_CONV_64_64(efdctuidz);
8258 GEN_SPEFPUOP_CONV_64_64(efdctsidz);
8261 GEN_SPEFPUOP_COMP_64(efdcmpgt);
8262 GEN_SPEFPUOP_COMP_64(efdcmplt);
8263 GEN_SPEFPUOP_COMP_64(efdcmpeq);
8264 GEN_SPEFPUOP_COMP_64(efdtstgt);
8265 GEN_SPEFPUOP_COMP_64(efdtstlt);
8266 GEN_SPEFPUOP_COMP_64(efdtsteq);
8268 /* Opcodes definitions */
8269 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPE_DOUBLE); //
8270 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8271 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPE_DOUBLE); //
8272 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPE_DOUBLE); //
8273 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPE_DOUBLE); //
8274 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8275 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8276 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8277 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8278 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8279 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8280 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8281 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8282 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8283 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8284 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8286 static opcode_t opcodes[] = {
8287 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
8288 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
8289 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8290 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER),
8291 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8292 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
8293 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8294 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8295 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8296 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8297 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
8298 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
8299 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
8300 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
8301 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8302 #if defined(TARGET_PPC64)
8303 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
8305 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
8306 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
8307 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8308 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8309 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8310 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
8311 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
8312 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
8313 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8314 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8315 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8316 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8317 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB),
8318 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
8319 #if defined(TARGET_PPC64)
8320 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
8321 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
8323 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8324 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8325 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8326 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
8327 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
8328 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
8329 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
8330 #if defined(TARGET_PPC64)
8331 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
8332 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
8333 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
8334 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
8335 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
8337 GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES),
8338 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8339 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8340 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT),
8341 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT),
8342 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT),
8343 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT),
8344 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT),
8345 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT),
8346 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT),
8347 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x00010000, PPC_FLOAT),
8348 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT),
8349 #if defined(TARGET_PPC64)
8350 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
8351 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
8352 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
8354 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8355 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8356 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
8357 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
8358 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
8359 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
8360 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
8361 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
8362 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
8363 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
8364 #if defined(TARGET_PPC64)
8365 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
8366 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
8368 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
8369 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
8370 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8371 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8372 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
8373 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
8374 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
8375 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
8376 #if defined(TARGET_PPC64)
8377 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
8378 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
8380 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
8381 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
8382 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8383 #if defined(TARGET_PPC64)
8384 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
8385 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8387 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8388 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8389 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8390 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8391 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8392 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8393 #if defined(TARGET_PPC64)
8394 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8396 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC),
8397 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC),
8398 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8399 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8400 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8401 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE),
8402 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE),
8403 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ),
8404 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT),
8405 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8406 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
8407 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8408 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8409 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8410 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8411 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8412 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8413 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8414 #if defined(TARGET_PPC64)
8415 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8416 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8418 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8419 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8421 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8422 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8423 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8425 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8426 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x03FF0001, PPC_MEM_TLBIE),
8427 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE),
8428 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8429 #if defined(TARGET_PPC64)
8430 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI),
8431 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8433 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8434 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8435 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8436 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8437 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8438 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8439 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8440 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8441 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8442 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8443 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8444 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8445 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8446 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8447 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8448 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8449 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8450 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8451 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8452 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8453 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8454 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8455 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8456 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8457 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8458 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8459 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8460 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8461 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8462 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8463 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8464 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8465 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8466 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8467 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8468 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8469 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8470 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8471 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8472 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8473 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8474 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8475 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8476 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8477 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8478 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8479 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8480 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8481 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8482 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8483 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8484 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8485 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8486 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8487 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8488 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8489 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8490 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8491 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8492 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8493 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8494 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8495 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8496 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8497 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8498 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8499 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8500 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8501 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8502 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8503 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8504 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8505 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8506 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8507 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8508 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8509 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8510 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8511 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8512 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8513 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8514 PPC_NONE, PPC2_BOOKE206),
8515 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8516 PPC_NONE, PPC2_BOOKE206),
8517 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8518 PPC_NONE, PPC2_BOOKE206),
8519 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8520 PPC_NONE, PPC2_BOOKE206),
8521 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8522 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8523 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8524 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8525 PPC_BOOKE, PPC2_BOOKE206),
8526 GEN_HANDLER_E(msync, 0x1F, 0x16, 0x12, 0x03FFF801,
8527 PPC_BOOKE, PPC2_BOOKE206),
8528 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8529 PPC_BOOKE, PPC2_BOOKE206),
8530 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8531 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8532 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8533 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8534 GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC),
8535 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8536 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE),
8537 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE),
8538 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE),
8539 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE),
8541 #undef GEN_INT_ARITH_ADD
8542 #undef GEN_INT_ARITH_ADD_CONST
8543 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8544 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8545 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8546 add_ca, compute_ca, compute_ov) \
8547 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8548 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8549 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8550 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8551 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8552 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8553 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8554 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8555 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8556 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8557 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8559 #undef GEN_INT_ARITH_DIVW
8560 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8561 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8562 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8563 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8564 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8565 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8567 #if defined(TARGET_PPC64)
8568 #undef GEN_INT_ARITH_DIVD
8569 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8570 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8571 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8572 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8573 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8574 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8576 #undef GEN_INT_ARITH_MUL_HELPER
8577 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8578 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8579 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8580 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8581 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8584 #undef GEN_INT_ARITH_SUBF
8585 #undef GEN_INT_ARITH_SUBF_CONST
8586 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8587 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8588 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8589 add_ca, compute_ca, compute_ov) \
8590 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8591 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8592 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8593 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8594 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8595 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8596 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8597 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8598 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8599 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8600 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8604 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
8605 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8606 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
8607 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8608 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8609 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8610 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8611 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8612 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8613 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8614 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8615 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8616 #if defined(TARGET_PPC64)
8617 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8620 #if defined(TARGET_PPC64)
8623 #define GEN_PPC64_R2(name, opc1, opc2) \
8624 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8625 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8627 #define GEN_PPC64_R4(name, opc1, opc2) \
8628 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8629 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8631 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8633 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8635 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
8636 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
8637 GEN_PPC64_R4(rldic, 0x1E, 0x04),
8638 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
8639 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
8640 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
8643 #undef _GEN_FLOAT_ACB
8644 #undef GEN_FLOAT_ACB
8645 #undef _GEN_FLOAT_AB
8647 #undef _GEN_FLOAT_AC
8651 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
8652 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
8653 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
8654 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
8655 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
8656 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8657 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8658 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
8659 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8660 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8661 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8662 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8663 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
8664 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8665 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8666 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
8667 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
8668 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
8669 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
8671 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT),
8672 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT),
8673 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT),
8674 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT),
8675 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES),
8676 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE),
8677 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL),
8678 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT),
8679 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT),
8680 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT),
8681 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT),
8682 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT),
8683 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT),
8684 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT),
8685 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT),
8686 #if defined(TARGET_PPC64)
8687 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B),
8688 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B),
8689 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B),
8691 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT),
8692 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT),
8693 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT),
8694 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT),
8695 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT),
8696 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT),
8697 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT),
8704 #define GEN_LD(name, ldop, opc, type) \
8705 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8706 #define GEN_LDU(name, ldop, opc, type) \
8707 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8708 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
8709 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8710 #define GEN_LDX(name, ldop, opc2, opc3, type) \
8711 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8712 #define GEN_LDS(name, ldop, op, type) \
8713 GEN_LD(name, ldop, op | 0x20, type) \
8714 GEN_LDU(name, ldop, op | 0x21, type) \
8715 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
8716 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
8718 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
8719 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
8720 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
8721 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
8722 #if defined(TARGET_PPC64)
8723 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
8724 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
8725 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B)
8726 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B)
8728 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8729 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8736 #define GEN_ST(name, stop, opc, type) \
8737 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8738 #define GEN_STU(name, stop, opc, type) \
8739 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
8740 #define GEN_STUX(name, stop, opc2, opc3, type) \
8741 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8742 #define GEN_STX(name, stop, opc2, opc3, type) \
8743 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8744 #define GEN_STS(name, stop, op, type) \
8745 GEN_ST(name, stop, op | 0x20, type) \
8746 GEN_STU(name, stop, op | 0x21, type) \
8747 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
8748 GEN_STX(name, stop, 0x17, op | 0x00, type)
8750 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
8751 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
8752 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
8753 #if defined(TARGET_PPC64)
8754 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B)
8755 GEN_STX(std, st64, 0x15, 0x04, PPC_64B)
8757 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8758 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8765 #define GEN_LDF(name, ldop, opc, type) \
8766 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8767 #define GEN_LDUF(name, ldop, opc, type) \
8768 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8769 #define GEN_LDUXF(name, ldop, opc, type) \
8770 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8771 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
8772 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8773 #define GEN_LDFS(name, ldop, op, type) \
8774 GEN_LDF(name, ldop, op | 0x20, type) \
8775 GEN_LDUF(name, ldop, op | 0x21, type) \
8776 GEN_LDUXF(name, ldop, op | 0x01, type) \
8777 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
8779 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT)
8780 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT)
8787 #define GEN_STF(name, stop, opc, type) \
8788 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8789 #define GEN_STUF(name, stop, opc, type) \
8790 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8791 #define GEN_STUXF(name, stop, opc, type) \
8792 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8793 #define GEN_STXF(name, stop, opc2, opc3, type) \
8794 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8795 #define GEN_STFS(name, stop, op, type) \
8796 GEN_STF(name, stop, op | 0x20, type) \
8797 GEN_STUF(name, stop, op | 0x21, type) \
8798 GEN_STUXF(name, stop, op | 0x01, type) \
8799 GEN_STXF(name, stop, 0x17, op | 0x00, type)
8801 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT)
8802 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT)
8803 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX)
8806 #define GEN_CRLOGIC(name, tcg_op, opc) \
8807 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8808 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8809 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8810 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8811 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8812 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8813 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8814 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8815 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8817 #undef GEN_MAC_HANDLER
8818 #define GEN_MAC_HANDLER(name, opc2, opc3) \
8819 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
8820 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
8821 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
8822 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
8823 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
8824 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
8825 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
8826 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
8827 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
8828 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
8829 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
8830 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
8831 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
8832 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
8833 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
8834 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
8835 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
8836 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
8837 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
8838 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
8839 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
8840 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
8841 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
8842 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
8843 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
8844 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
8845 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
8846 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
8847 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
8848 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
8849 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
8850 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
8851 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
8852 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
8853 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
8854 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
8855 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
8856 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
8857 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
8858 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
8859 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
8860 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
8861 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
8867 #define GEN_VR_LDX(name, opc2, opc3) \
8868 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8869 #define GEN_VR_STX(name, opc2, opc3) \
8870 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8871 #define GEN_VR_LVE(name, opc2, opc3) \
8872 GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8873 #define GEN_VR_STVE(name, opc2, opc3) \
8874 GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8875 GEN_VR_LDX(lvx, 0x07, 0x03),
8876 GEN_VR_LDX(lvxl, 0x07, 0x0B),
8877 GEN_VR_LVE(bx, 0x07, 0x00),
8878 GEN_VR_LVE(hx, 0x07, 0x01),
8879 GEN_VR_LVE(wx, 0x07, 0x02),
8880 GEN_VR_STX(svx, 0x07, 0x07),
8881 GEN_VR_STX(svxl, 0x07, 0x0F),
8882 GEN_VR_STVE(bx, 0x07, 0x04),
8883 GEN_VR_STVE(hx, 0x07, 0x05),
8884 GEN_VR_STVE(wx, 0x07, 0x06),
8886 #undef GEN_VX_LOGICAL
8887 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
8888 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8889 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
8890 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
8891 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
8892 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
8893 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
8896 #define GEN_VXFORM(name, opc2, opc3) \
8897 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8898 GEN_VXFORM(vaddubm, 0, 0),
8899 GEN_VXFORM(vadduhm, 0, 1),
8900 GEN_VXFORM(vadduwm, 0, 2),
8901 GEN_VXFORM(vsububm, 0, 16),
8902 GEN_VXFORM(vsubuhm, 0, 17),
8903 GEN_VXFORM(vsubuwm, 0, 18),
8904 GEN_VXFORM(vmaxub, 1, 0),
8905 GEN_VXFORM(vmaxuh, 1, 1),
8906 GEN_VXFORM(vmaxuw, 1, 2),
8907 GEN_VXFORM(vmaxsb, 1, 4),
8908 GEN_VXFORM(vmaxsh, 1, 5),
8909 GEN_VXFORM(vmaxsw, 1, 6),
8910 GEN_VXFORM(vminub, 1, 8),
8911 GEN_VXFORM(vminuh, 1, 9),
8912 GEN_VXFORM(vminuw, 1, 10),
8913 GEN_VXFORM(vminsb, 1, 12),
8914 GEN_VXFORM(vminsh, 1, 13),
8915 GEN_VXFORM(vminsw, 1, 14),
8916 GEN_VXFORM(vavgub, 1, 16),
8917 GEN_VXFORM(vavguh, 1, 17),
8918 GEN_VXFORM(vavguw, 1, 18),
8919 GEN_VXFORM(vavgsb, 1, 20),
8920 GEN_VXFORM(vavgsh, 1, 21),
8921 GEN_VXFORM(vavgsw, 1, 22),
8922 GEN_VXFORM(vmrghb, 6, 0),
8923 GEN_VXFORM(vmrghh, 6, 1),
8924 GEN_VXFORM(vmrghw, 6, 2),
8925 GEN_VXFORM(vmrglb, 6, 4),
8926 GEN_VXFORM(vmrglh, 6, 5),
8927 GEN_VXFORM(vmrglw, 6, 6),
8928 GEN_VXFORM(vmuloub, 4, 0),
8929 GEN_VXFORM(vmulouh, 4, 1),
8930 GEN_VXFORM(vmulosb, 4, 4),
8931 GEN_VXFORM(vmulosh, 4, 5),
8932 GEN_VXFORM(vmuleub, 4, 8),
8933 GEN_VXFORM(vmuleuh, 4, 9),
8934 GEN_VXFORM(vmulesb, 4, 12),
8935 GEN_VXFORM(vmulesh, 4, 13),
8936 GEN_VXFORM(vslb, 2, 4),
8937 GEN_VXFORM(vslh, 2, 5),
8938 GEN_VXFORM(vslw, 2, 6),
8939 GEN_VXFORM(vsrb, 2, 8),
8940 GEN_VXFORM(vsrh, 2, 9),
8941 GEN_VXFORM(vsrw, 2, 10),
8942 GEN_VXFORM(vsrab, 2, 12),
8943 GEN_VXFORM(vsrah, 2, 13),
8944 GEN_VXFORM(vsraw, 2, 14),
8945 GEN_VXFORM(vslo, 6, 16),
8946 GEN_VXFORM(vsro, 6, 17),
8947 GEN_VXFORM(vaddcuw, 0, 6),
8948 GEN_VXFORM(vsubcuw, 0, 22),
8949 GEN_VXFORM(vaddubs, 0, 8),
8950 GEN_VXFORM(vadduhs, 0, 9),
8951 GEN_VXFORM(vadduws, 0, 10),
8952 GEN_VXFORM(vaddsbs, 0, 12),
8953 GEN_VXFORM(vaddshs, 0, 13),
8954 GEN_VXFORM(vaddsws, 0, 14),
8955 GEN_VXFORM(vsububs, 0, 24),
8956 GEN_VXFORM(vsubuhs, 0, 25),
8957 GEN_VXFORM(vsubuws, 0, 26),
8958 GEN_VXFORM(vsubsbs, 0, 28),
8959 GEN_VXFORM(vsubshs, 0, 29),
8960 GEN_VXFORM(vsubsws, 0, 30),
8961 GEN_VXFORM(vrlb, 2, 0),
8962 GEN_VXFORM(vrlh, 2, 1),
8963 GEN_VXFORM(vrlw, 2, 2),
8964 GEN_VXFORM(vsl, 2, 7),
8965 GEN_VXFORM(vsr, 2, 11),
8966 GEN_VXFORM(vpkuhum, 7, 0),
8967 GEN_VXFORM(vpkuwum, 7, 1),
8968 GEN_VXFORM(vpkuhus, 7, 2),
8969 GEN_VXFORM(vpkuwus, 7, 3),
8970 GEN_VXFORM(vpkshus, 7, 4),
8971 GEN_VXFORM(vpkswus, 7, 5),
8972 GEN_VXFORM(vpkshss, 7, 6),
8973 GEN_VXFORM(vpkswss, 7, 7),
8974 GEN_VXFORM(vpkpx, 7, 12),
8975 GEN_VXFORM(vsum4ubs, 4, 24),
8976 GEN_VXFORM(vsum4sbs, 4, 28),
8977 GEN_VXFORM(vsum4shs, 4, 25),
8978 GEN_VXFORM(vsum2sws, 4, 26),
8979 GEN_VXFORM(vsumsws, 4, 30),
8980 GEN_VXFORM(vaddfp, 5, 0),
8981 GEN_VXFORM(vsubfp, 5, 1),
8982 GEN_VXFORM(vmaxfp, 5, 16),
8983 GEN_VXFORM(vminfp, 5, 17),
8987 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
8988 GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
8989 #define GEN_VXRFORM(name, opc2, opc3) \
8990 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
8991 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
8992 GEN_VXRFORM(vcmpequb, 3, 0)
8993 GEN_VXRFORM(vcmpequh, 3, 1)
8994 GEN_VXRFORM(vcmpequw, 3, 2)
8995 GEN_VXRFORM(vcmpgtsb, 3, 12)
8996 GEN_VXRFORM(vcmpgtsh, 3, 13)
8997 GEN_VXRFORM(vcmpgtsw, 3, 14)
8998 GEN_VXRFORM(vcmpgtub, 3, 8)
8999 GEN_VXRFORM(vcmpgtuh, 3, 9)
9000 GEN_VXRFORM(vcmpgtuw, 3, 10)
9001 GEN_VXRFORM(vcmpeqfp, 3, 3)
9002 GEN_VXRFORM(vcmpgefp, 3, 7)
9003 GEN_VXRFORM(vcmpgtfp, 3, 11)
9004 GEN_VXRFORM(vcmpbfp, 3, 15)
9006 #undef GEN_VXFORM_SIMM
9007 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
9008 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9009 GEN_VXFORM_SIMM(vspltisb, 6, 12),
9010 GEN_VXFORM_SIMM(vspltish, 6, 13),
9011 GEN_VXFORM_SIMM(vspltisw, 6, 14),
9013 #undef GEN_VXFORM_NOA
9014 #define GEN_VXFORM_NOA(name, opc2, opc3) \
9015 GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
9016 GEN_VXFORM_NOA(vupkhsb, 7, 8),
9017 GEN_VXFORM_NOA(vupkhsh, 7, 9),
9018 GEN_VXFORM_NOA(vupklsb, 7, 10),
9019 GEN_VXFORM_NOA(vupklsh, 7, 11),
9020 GEN_VXFORM_NOA(vupkhpx, 7, 13),
9021 GEN_VXFORM_NOA(vupklpx, 7, 15),
9022 GEN_VXFORM_NOA(vrefp, 5, 4),
9023 GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
9024 GEN_VXFORM_NOA(vexptefp, 5, 6),
9025 GEN_VXFORM_NOA(vlogefp, 5, 7),
9026 GEN_VXFORM_NOA(vrfim, 5, 8),
9027 GEN_VXFORM_NOA(vrfin, 5, 9),
9028 GEN_VXFORM_NOA(vrfip, 5, 10),
9029 GEN_VXFORM_NOA(vrfiz, 5, 11),
9031 #undef GEN_VXFORM_UIMM
9032 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
9033 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9034 GEN_VXFORM_UIMM(vspltb, 6, 8),
9035 GEN_VXFORM_UIMM(vsplth, 6, 9),
9036 GEN_VXFORM_UIMM(vspltw, 6, 10),
9037 GEN_VXFORM_UIMM(vcfux, 5, 12),
9038 GEN_VXFORM_UIMM(vcfsx, 5, 13),
9039 GEN_VXFORM_UIMM(vctuxs, 5, 14),
9040 GEN_VXFORM_UIMM(vctsxs, 5, 15),
9042 #undef GEN_VAFORM_PAIRED
9043 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
9044 GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
9045 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
9046 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
9047 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
9048 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
9049 GEN_VAFORM_PAIRED(vsel, vperm, 21),
9050 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
9053 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
9054 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type)
9055 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE),
9056 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE),
9057 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE),
9058 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE),
9059 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE),
9060 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE),
9061 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE),
9062 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE),
9063 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, PPC_SPE),
9064 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE),
9065 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE),
9066 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE),
9067 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE),
9068 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE),
9069 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE),
9070 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE),
9071 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE),
9072 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE),
9073 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE),
9074 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE),
9075 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE),
9076 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE),
9077 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE),
9078 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE),
9079 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE),
9080 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE),
9081 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE),
9082 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE),
9083 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE),
9085 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPE_SINGLE),
9086 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPE_SINGLE),
9087 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPE_SINGLE),
9088 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPE_SINGLE),
9089 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9090 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9091 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9092 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9093 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9094 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9095 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9096 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9097 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9098 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9100 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPE_SINGLE),
9101 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPE_SINGLE),
9102 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPE_SINGLE),
9103 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPE_SINGLE),
9104 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9105 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9106 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9107 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9108 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9109 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9110 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9111 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9112 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9113 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9115 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPE_DOUBLE),
9116 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9117 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPE_DOUBLE),
9118 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPE_DOUBLE),
9119 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPE_DOUBLE),
9120 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9121 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9122 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9123 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9124 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9125 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9126 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9127 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9128 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9129 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9130 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9132 #undef GEN_SPEOP_LDST
9133 #define GEN_SPEOP_LDST(name, opc2, sh) \
9134 GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
9135 GEN_SPEOP_LDST(evldd, 0x00, 3),
9136 GEN_SPEOP_LDST(evldw, 0x01, 3),
9137 GEN_SPEOP_LDST(evldh, 0x02, 3),
9138 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1),
9139 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1),
9140 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1),
9141 GEN_SPEOP_LDST(evlwhe, 0x08, 2),
9142 GEN_SPEOP_LDST(evlwhou, 0x0A, 2),
9143 GEN_SPEOP_LDST(evlwhos, 0x0B, 2),
9144 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2),
9145 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2),
9147 GEN_SPEOP_LDST(evstdd, 0x10, 3),
9148 GEN_SPEOP_LDST(evstdw, 0x11, 3),
9149 GEN_SPEOP_LDST(evstdh, 0x12, 3),
9150 GEN_SPEOP_LDST(evstwhe, 0x18, 2),
9151 GEN_SPEOP_LDST(evstwho, 0x1A, 2),
9152 GEN_SPEOP_LDST(evstwwe, 0x1C, 2),
9153 GEN_SPEOP_LDST(evstwwo, 0x1E, 2),
9156 #include "translate_init.c"
9157 #include "helper_regs.h"
9159 /*****************************************************************************/
9160 /* Misc PowerPC helpers */
9161 void cpu_dump_state (CPUState *env, FILE *f, fprintf_function cpu_fprintf,
9169 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
9170 TARGET_FMT_lx " XER " TARGET_FMT_lx "\n",
9171 env->nip, env->lr, env->ctr, env->xer);
9172 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
9173 TARGET_FMT_lx " idx %d\n", env->msr, env->spr[SPR_HID0],
9174 env->hflags, env->mmu_idx);
9175 #if !defined(NO_TIMER_DUMP)
9176 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
9177 #if !defined(CONFIG_USER_ONLY)
9181 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
9182 #if !defined(CONFIG_USER_ONLY)
9183 , cpu_ppc_load_decr(env)
9187 for (i = 0; i < 32; i++) {
9188 if ((i & (RGPL - 1)) == 0)
9189 cpu_fprintf(f, "GPR%02d", i);
9190 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
9191 if ((i & (RGPL - 1)) == (RGPL - 1))
9192 cpu_fprintf(f, "\n");
9194 cpu_fprintf(f, "CR ");
9195 for (i = 0; i < 8; i++)
9196 cpu_fprintf(f, "%01x", env->crf[i]);
9197 cpu_fprintf(f, " [");
9198 for (i = 0; i < 8; i++) {
9200 if (env->crf[i] & 0x08)
9202 else if (env->crf[i] & 0x04)
9204 else if (env->crf[i] & 0x02)
9206 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
9208 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
9210 for (i = 0; i < 32; i++) {
9211 if ((i & (RFPL - 1)) == 0)
9212 cpu_fprintf(f, "FPR%02d", i);
9213 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
9214 if ((i & (RFPL - 1)) == (RFPL - 1))
9215 cpu_fprintf(f, "\n");
9217 cpu_fprintf(f, "FPSCR %08x\n", env->fpscr);
9218 #if !defined(CONFIG_USER_ONLY)
9219 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx
9220 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n",
9221 env->spr[SPR_SRR0], env->spr[SPR_SRR1],
9222 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]);
9224 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx
9225 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n",
9226 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1],
9227 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]);
9229 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx
9230 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n",
9231 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5],
9232 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]);
9234 if (env->excp_model == POWERPC_EXCP_BOOKE) {
9235 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx
9236 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n",
9237 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1],
9238 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]);
9240 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx
9241 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n",
9242 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR],
9243 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]);
9245 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx
9246 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n",
9247 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR],
9248 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]);
9250 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx
9251 " EPR " TARGET_FMT_lx "\n",
9252 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8],
9253 env->spr[SPR_BOOKE_EPR]);
9256 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx
9257 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n",
9258 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1],
9259 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]);
9262 * IVORs are left out as they are large and do not change often --
9263 * they can be read with "p $ivor0", "p $ivor1", etc.
9267 switch (env->mmu_model) {
9268 case POWERPC_MMU_32B:
9269 case POWERPC_MMU_601:
9270 case POWERPC_MMU_SOFT_6xx:
9271 case POWERPC_MMU_SOFT_74xx:
9272 #if defined(TARGET_PPC64)
9273 case POWERPC_MMU_620:
9274 case POWERPC_MMU_64B:
9276 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx "\n", env->spr[SPR_SDR1]);
9278 case POWERPC_MMU_BOOKE206:
9279 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx
9280 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n",
9281 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1],
9282 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]);
9284 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx
9285 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n",
9286 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6],
9287 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]);
9289 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx
9290 " TLB1CFG " TARGET_FMT_lx "\n",
9291 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG],
9292 env->spr[SPR_BOOKE_TLB1CFG]);
9303 void cpu_dump_statistics (CPUState *env, FILE*f, fprintf_function cpu_fprintf,
9306 #if defined(DO_PPC_STATISTICS)
9307 opc_handler_t **t1, **t2, **t3, *handler;
9311 for (op1 = 0; op1 < 64; op1++) {
9313 if (is_indirect_opcode(handler)) {
9314 t2 = ind_table(handler);
9315 for (op2 = 0; op2 < 32; op2++) {
9317 if (is_indirect_opcode(handler)) {
9318 t3 = ind_table(handler);
9319 for (op3 = 0; op3 < 32; op3++) {
9321 if (handler->count == 0)
9323 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
9324 "%016" PRIx64 " %" PRId64 "\n",
9325 op1, op2, op3, op1, (op3 << 5) | op2,
9327 handler->count, handler->count);
9330 if (handler->count == 0)
9332 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
9333 "%016" PRIx64 " %" PRId64 "\n",
9334 op1, op2, op1, op2, handler->oname,
9335 handler->count, handler->count);
9339 if (handler->count == 0)
9341 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
9343 op1, op1, handler->oname,
9344 handler->count, handler->count);
9350 /*****************************************************************************/
9351 static inline void gen_intermediate_code_internal(CPUState *env,
9352 TranslationBlock *tb,
9355 DisasContext ctx, *ctxp = &ctx;
9356 opc_handler_t **table, *handler;
9357 target_ulong pc_start;
9358 uint16_t *gen_opc_end;
9365 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
9368 ctx.exception = POWERPC_EXCP_NONE;
9369 ctx.spr_cb = env->spr_cb;
9370 ctx.mem_idx = env->mmu_idx;
9371 ctx.access_type = -1;
9372 ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0;
9373 #if defined(TARGET_PPC64)
9374 ctx.sf_mode = msr_sf;
9376 ctx.fpu_enabled = msr_fp;
9377 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
9378 ctx.spe_enabled = msr_spe;
9380 ctx.spe_enabled = 0;
9381 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
9382 ctx.altivec_enabled = msr_vr;
9384 ctx.altivec_enabled = 0;
9385 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
9386 ctx.singlestep_enabled = CPU_SINGLE_STEP;
9388 ctx.singlestep_enabled = 0;
9389 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
9390 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
9391 if (unlikely(env->singlestep_enabled))
9392 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9393 #if defined (DO_SINGLE_STEP) && 0
9394 /* Single step trace mode */
9398 max_insns = tb->cflags & CF_COUNT_MASK;
9400 max_insns = CF_COUNT_MASK;
9403 /* Set env in case of segfault during code fetch */
9404 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
9405 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
9406 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
9407 if (bp->pc == ctx.nip) {
9408 gen_debug_exception(ctxp);
9413 if (unlikely(search_pc)) {
9414 j = gen_opc_ptr - gen_opc_buf;
9418 gen_opc_instr_start[lj++] = 0;
9420 gen_opc_pc[lj] = ctx.nip;
9421 gen_opc_instr_start[lj] = 1;
9422 gen_opc_icount[lj] = num_insns;
9424 LOG_DISAS("----------------\n");
9425 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9426 ctx.nip, ctx.mem_idx, (int)msr_ir);
9427 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
9429 if (unlikely(ctx.le_mode)) {
9430 ctx.opcode = bswap32(ldl_code(ctx.nip));
9432 ctx.opcode = ldl_code(ctx.nip);
9434 LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n",
9435 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
9436 opc3(ctx.opcode), little_endian ? "little" : "big");
9437 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
9438 tcg_gen_debug_insn_start(ctx.nip);
9440 table = env->opcodes;
9442 handler = table[opc1(ctx.opcode)];
9443 if (is_indirect_opcode(handler)) {
9444 table = ind_table(handler);
9445 handler = table[opc2(ctx.opcode)];
9446 if (is_indirect_opcode(handler)) {
9447 table = ind_table(handler);
9448 handler = table[opc3(ctx.opcode)];
9451 /* Is opcode *REALLY* valid ? */
9452 if (unlikely(handler->handler == &gen_invalid)) {
9453 if (qemu_log_enabled()) {
9454 qemu_log("invalid/unsupported opcode: "
9455 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx " %d\n",
9456 opc1(ctx.opcode), opc2(ctx.opcode),
9457 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
9460 if (unlikely((ctx.opcode & handler->inval) != 0)) {
9461 if (qemu_log_enabled()) {
9462 qemu_log("invalid bits: %08x for opcode: "
9463 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx "\n",
9464 ctx.opcode & handler->inval, opc1(ctx.opcode),
9465 opc2(ctx.opcode), opc3(ctx.opcode),
9466 ctx.opcode, ctx.nip - 4);
9468 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
9472 (*(handler->handler))(&ctx);
9473 #if defined(DO_PPC_STATISTICS)
9476 /* Check trace mode exceptions */
9477 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
9478 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
9479 ctx.exception != POWERPC_SYSCALL &&
9480 ctx.exception != POWERPC_EXCP_TRAP &&
9481 ctx.exception != POWERPC_EXCP_BRANCH)) {
9482 gen_exception(ctxp, POWERPC_EXCP_TRACE);
9483 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
9484 (env->singlestep_enabled) ||
9486 num_insns >= max_insns)) {
9487 /* if we reach a page boundary or are single stepping, stop
9493 if (tb->cflags & CF_LAST_IO)
9495 if (ctx.exception == POWERPC_EXCP_NONE) {
9496 gen_goto_tb(&ctx, 0, ctx.nip);
9497 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
9498 if (unlikely(env->singlestep_enabled)) {
9499 gen_debug_exception(ctxp);
9501 /* Generate the return instruction */
9504 gen_icount_end(tb, num_insns);
9505 *gen_opc_ptr = INDEX_op_end;
9506 if (unlikely(search_pc)) {
9507 j = gen_opc_ptr - gen_opc_buf;
9510 gen_opc_instr_start[lj++] = 0;
9512 tb->size = ctx.nip - pc_start;
9513 tb->icount = num_insns;
9515 #if defined(DEBUG_DISAS)
9516 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
9518 flags = env->bfd_mach;
9519 flags |= ctx.le_mode << 16;
9520 qemu_log("IN: %s\n", lookup_symbol(pc_start));
9521 log_target_disas(pc_start, ctx.nip - pc_start, flags);
9527 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
9529 gen_intermediate_code_internal(env, tb, 0);
9532 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
9534 gen_intermediate_code_internal(env, tb, 1);
9537 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
9539 env->nip = gen_opc_pc[pc_pos];