2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
22 #include "disas/disas.h"
24 #include "qemu/host-utils.h"
30 #define CPU_SINGLE_STEP 0x1
31 #define CPU_BRANCH_STEP 0x2
32 #define GDBSTUB_SINGLE_STEP 0x4
34 /* Include definitions for instructions classes and implementations flags */
35 //#define PPC_DEBUG_DISAS
36 //#define DO_PPC_STATISTICS
38 #ifdef PPC_DEBUG_DISAS
39 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
41 # define LOG_DISAS(...) do { } while (0)
43 /*****************************************************************************/
44 /* Code translation helpers */
46 /* global register indexes */
47 static TCGv_ptr cpu_env;
48 static char cpu_reg_names[10*3 + 22*4 /* GPR */
49 #if !defined(TARGET_PPC64)
50 + 10*4 + 22*5 /* SPE GPRh */
52 + 10*4 + 22*5 /* FPR */
53 + 2*(10*6 + 22*7) /* AVRh, AVRl */
55 static TCGv cpu_gpr[32];
56 #if !defined(TARGET_PPC64)
57 static TCGv cpu_gprh[32];
59 static TCGv_i64 cpu_fpr[32];
60 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
61 static TCGv_i32 cpu_crf[8];
66 #if defined(TARGET_PPC64)
69 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca;
70 static TCGv cpu_reserve;
71 static TCGv cpu_fpscr;
72 static TCGv_i32 cpu_access_type;
74 #include "exec/gen-icount.h"
76 void ppc_translate_init(void)
80 size_t cpu_reg_names_size;
81 static int done_init = 0;
86 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
89 cpu_reg_names_size = sizeof(cpu_reg_names);
91 for (i = 0; i < 8; i++) {
92 snprintf(p, cpu_reg_names_size, "crf%d", i);
93 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
94 offsetof(CPUPPCState, crf[i]), p);
96 cpu_reg_names_size -= 5;
99 for (i = 0; i < 32; i++) {
100 snprintf(p, cpu_reg_names_size, "r%d", i);
101 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
102 offsetof(CPUPPCState, gpr[i]), p);
103 p += (i < 10) ? 3 : 4;
104 cpu_reg_names_size -= (i < 10) ? 3 : 4;
105 #if !defined(TARGET_PPC64)
106 snprintf(p, cpu_reg_names_size, "r%dH", i);
107 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
108 offsetof(CPUPPCState, gprh[i]), p);
109 p += (i < 10) ? 4 : 5;
110 cpu_reg_names_size -= (i < 10) ? 4 : 5;
113 snprintf(p, cpu_reg_names_size, "fp%d", i);
114 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
115 offsetof(CPUPPCState, fpr[i]), p);
116 p += (i < 10) ? 4 : 5;
117 cpu_reg_names_size -= (i < 10) ? 4 : 5;
119 snprintf(p, cpu_reg_names_size, "avr%dH", i);
120 #ifdef HOST_WORDS_BIGENDIAN
121 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
122 offsetof(CPUPPCState, avr[i].u64[0]), p);
124 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
125 offsetof(CPUPPCState, avr[i].u64[1]), p);
127 p += (i < 10) ? 6 : 7;
128 cpu_reg_names_size -= (i < 10) ? 6 : 7;
130 snprintf(p, cpu_reg_names_size, "avr%dL", i);
131 #ifdef HOST_WORDS_BIGENDIAN
132 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
133 offsetof(CPUPPCState, avr[i].u64[1]), p);
135 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
136 offsetof(CPUPPCState, avr[i].u64[0]), p);
138 p += (i < 10) ? 6 : 7;
139 cpu_reg_names_size -= (i < 10) ? 6 : 7;
142 cpu_nip = tcg_global_mem_new(TCG_AREG0,
143 offsetof(CPUPPCState, nip), "nip");
145 cpu_msr = tcg_global_mem_new(TCG_AREG0,
146 offsetof(CPUPPCState, msr), "msr");
148 cpu_ctr = tcg_global_mem_new(TCG_AREG0,
149 offsetof(CPUPPCState, ctr), "ctr");
151 cpu_lr = tcg_global_mem_new(TCG_AREG0,
152 offsetof(CPUPPCState, lr), "lr");
154 #if defined(TARGET_PPC64)
155 cpu_cfar = tcg_global_mem_new(TCG_AREG0,
156 offsetof(CPUPPCState, cfar), "cfar");
159 cpu_xer = tcg_global_mem_new(TCG_AREG0,
160 offsetof(CPUPPCState, xer), "xer");
161 cpu_so = tcg_global_mem_new(TCG_AREG0,
162 offsetof(CPUPPCState, so), "SO");
163 cpu_ov = tcg_global_mem_new(TCG_AREG0,
164 offsetof(CPUPPCState, ov), "OV");
165 cpu_ca = tcg_global_mem_new(TCG_AREG0,
166 offsetof(CPUPPCState, ca), "CA");
168 cpu_reserve = tcg_global_mem_new(TCG_AREG0,
169 offsetof(CPUPPCState, reserve_addr),
172 cpu_fpscr = tcg_global_mem_new(TCG_AREG0,
173 offsetof(CPUPPCState, fpscr), "fpscr");
175 cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
176 offsetof(CPUPPCState, access_type), "access_type");
178 /* register helpers */
185 /* internal defines */
186 typedef struct DisasContext {
187 struct TranslationBlock *tb;
191 /* Routine used to access memory */
194 /* Translation flags */
196 #if defined(TARGET_PPC64)
203 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
204 int singlestep_enabled;
205 uint64_t insns_flags;
206 uint64_t insns_flags2;
209 /* True when active word size < size of target_long. */
211 # define NARROW_MODE(C) (!(C)->sf_mode)
213 # define NARROW_MODE(C) 0
216 struct opc_handler_t {
217 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
219 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
221 /* instruction type */
223 /* extended instruction type */
226 void (*handler)(DisasContext *ctx);
227 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
230 #if defined(DO_PPC_STATISTICS)
235 static inline void gen_reset_fpstatus(void)
237 gen_helper_reset_fpstatus(cpu_env);
240 static inline void gen_compute_fprf(TCGv_i64 arg, int set_fprf, int set_rc)
242 TCGv_i32 t0 = tcg_temp_new_i32();
245 /* This case might be optimized later */
246 tcg_gen_movi_i32(t0, 1);
247 gen_helper_compute_fprf(t0, cpu_env, arg, t0);
248 if (unlikely(set_rc)) {
249 tcg_gen_mov_i32(cpu_crf[1], t0);
251 gen_helper_float_check_status(cpu_env);
252 } else if (unlikely(set_rc)) {
253 /* We always need to compute fpcc */
254 tcg_gen_movi_i32(t0, 0);
255 gen_helper_compute_fprf(t0, cpu_env, arg, t0);
256 tcg_gen_mov_i32(cpu_crf[1], t0);
259 tcg_temp_free_i32(t0);
262 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
264 if (ctx->access_type != access_type) {
265 tcg_gen_movi_i32(cpu_access_type, access_type);
266 ctx->access_type = access_type;
270 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
272 if (NARROW_MODE(ctx)) {
275 tcg_gen_movi_tl(cpu_nip, nip);
278 static inline void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
281 if (ctx->exception == POWERPC_EXCP_NONE) {
282 gen_update_nip(ctx, ctx->nip);
284 t0 = tcg_const_i32(excp);
285 t1 = tcg_const_i32(error);
286 gen_helper_raise_exception_err(cpu_env, t0, t1);
287 tcg_temp_free_i32(t0);
288 tcg_temp_free_i32(t1);
289 ctx->exception = (excp);
292 static inline void gen_exception(DisasContext *ctx, uint32_t excp)
295 if (ctx->exception == POWERPC_EXCP_NONE) {
296 gen_update_nip(ctx, ctx->nip);
298 t0 = tcg_const_i32(excp);
299 gen_helper_raise_exception(cpu_env, t0);
300 tcg_temp_free_i32(t0);
301 ctx->exception = (excp);
304 static inline void gen_debug_exception(DisasContext *ctx)
308 if ((ctx->exception != POWERPC_EXCP_BRANCH) &&
309 (ctx->exception != POWERPC_EXCP_SYNC)) {
310 gen_update_nip(ctx, ctx->nip);
312 t0 = tcg_const_i32(EXCP_DEBUG);
313 gen_helper_raise_exception(cpu_env, t0);
314 tcg_temp_free_i32(t0);
317 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
319 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error);
322 /* Stop translation */
323 static inline void gen_stop_exception(DisasContext *ctx)
325 gen_update_nip(ctx, ctx->nip);
326 ctx->exception = POWERPC_EXCP_STOP;
329 /* No need to update nip here, as execution flow will change */
330 static inline void gen_sync_exception(DisasContext *ctx)
332 ctx->exception = POWERPC_EXCP_SYNC;
335 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
336 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
338 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
339 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
341 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
342 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
344 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
345 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
347 typedef struct opcode_t {
348 unsigned char opc1, opc2, opc3;
349 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
350 unsigned char pad[5];
352 unsigned char pad[1];
354 opc_handler_t handler;
358 /*****************************************************************************/
359 /*** Instruction decoding ***/
360 #define EXTRACT_HELPER(name, shift, nb) \
361 static inline uint32_t name(uint32_t opcode) \
363 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
366 #define EXTRACT_SHELPER(name, shift, nb) \
367 static inline int32_t name(uint32_t opcode) \
369 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
373 EXTRACT_HELPER(opc1, 26, 6);
375 EXTRACT_HELPER(opc2, 1, 5);
377 EXTRACT_HELPER(opc3, 6, 5);
378 /* Update Cr0 flags */
379 EXTRACT_HELPER(Rc, 0, 1);
381 EXTRACT_HELPER(rD, 21, 5);
383 EXTRACT_HELPER(rS, 21, 5);
385 EXTRACT_HELPER(rA, 16, 5);
387 EXTRACT_HELPER(rB, 11, 5);
389 EXTRACT_HELPER(rC, 6, 5);
391 EXTRACT_HELPER(crfD, 23, 3);
392 EXTRACT_HELPER(crfS, 18, 3);
393 EXTRACT_HELPER(crbD, 21, 5);
394 EXTRACT_HELPER(crbA, 16, 5);
395 EXTRACT_HELPER(crbB, 11, 5);
397 EXTRACT_HELPER(_SPR, 11, 10);
398 static inline uint32_t SPR(uint32_t opcode)
400 uint32_t sprn = _SPR(opcode);
402 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
404 /*** Get constants ***/
405 EXTRACT_HELPER(IMM, 12, 8);
406 /* 16 bits signed immediate value */
407 EXTRACT_SHELPER(SIMM, 0, 16);
408 /* 16 bits unsigned immediate value */
409 EXTRACT_HELPER(UIMM, 0, 16);
410 /* 5 bits signed immediate value */
411 EXTRACT_HELPER(SIMM5, 16, 5);
412 /* 5 bits signed immediate value */
413 EXTRACT_HELPER(UIMM5, 16, 5);
415 EXTRACT_HELPER(NB, 11, 5);
417 EXTRACT_HELPER(SH, 11, 5);
418 /* Vector shift count */
419 EXTRACT_HELPER(VSH, 6, 4);
421 EXTRACT_HELPER(MB, 6, 5);
423 EXTRACT_HELPER(ME, 1, 5);
425 EXTRACT_HELPER(TO, 21, 5);
427 EXTRACT_HELPER(CRM, 12, 8);
428 EXTRACT_HELPER(SR, 16, 4);
431 EXTRACT_HELPER(FPBF, 19, 3);
432 EXTRACT_HELPER(FPIMM, 12, 4);
433 EXTRACT_HELPER(FPL, 21, 1);
434 EXTRACT_HELPER(FPFLM, 17, 8);
435 EXTRACT_HELPER(FPW, 16, 1);
437 /*** Jump target decoding ***/
439 EXTRACT_SHELPER(d, 0, 16);
440 /* Immediate address */
441 static inline target_ulong LI(uint32_t opcode)
443 return (opcode >> 0) & 0x03FFFFFC;
446 static inline uint32_t BD(uint32_t opcode)
448 return (opcode >> 0) & 0xFFFC;
451 EXTRACT_HELPER(BO, 21, 5);
452 EXTRACT_HELPER(BI, 16, 5);
453 /* Absolute/relative address */
454 EXTRACT_HELPER(AA, 1, 1);
456 EXTRACT_HELPER(LK, 0, 1);
458 /* Create a mask between <start> and <end> bits */
459 static inline target_ulong MASK(uint32_t start, uint32_t end)
463 #if defined(TARGET_PPC64)
464 if (likely(start == 0)) {
465 ret = UINT64_MAX << (63 - end);
466 } else if (likely(end == 63)) {
467 ret = UINT64_MAX >> start;
470 if (likely(start == 0)) {
471 ret = UINT32_MAX << (31 - end);
472 } else if (likely(end == 31)) {
473 ret = UINT32_MAX >> start;
477 ret = (((target_ulong)(-1ULL)) >> (start)) ^
478 (((target_ulong)(-1ULL) >> (end)) >> 1);
479 if (unlikely(start > end))
486 /*****************************************************************************/
487 /* PowerPC instructions table */
489 #if defined(DO_PPC_STATISTICS)
490 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
500 .handler = &gen_##name, \
501 .oname = stringify(name), \
503 .oname = stringify(name), \
505 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
516 .handler = &gen_##name, \
517 .oname = stringify(name), \
519 .oname = stringify(name), \
521 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
531 .handler = &gen_##name, \
537 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
547 .handler = &gen_##name, \
549 .oname = stringify(name), \
551 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
562 .handler = &gen_##name, \
564 .oname = stringify(name), \
566 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
576 .handler = &gen_##name, \
582 /* SPR load/store helpers */
583 static inline void gen_load_spr(TCGv t, int reg)
585 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
588 static inline void gen_store_spr(int reg, TCGv t)
590 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
593 /* Invalid instruction */
594 static void gen_invalid(DisasContext *ctx)
596 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
599 static opc_handler_t invalid_handler = {
600 .inval1 = 0xFFFFFFFF,
601 .inval2 = 0xFFFFFFFF,
604 .handler = gen_invalid,
607 /*** Integer comparison ***/
609 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
611 TCGv t0 = tcg_temp_new();
612 TCGv_i32 t1 = tcg_temp_new_i32();
614 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
616 tcg_gen_setcond_tl((s ? TCG_COND_LT: TCG_COND_LTU), t0, arg0, arg1);
617 tcg_gen_trunc_tl_i32(t1, t0);
618 tcg_gen_shli_i32(t1, t1, CRF_LT);
619 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
621 tcg_gen_setcond_tl((s ? TCG_COND_GT: TCG_COND_GTU), t0, arg0, arg1);
622 tcg_gen_trunc_tl_i32(t1, t0);
623 tcg_gen_shli_i32(t1, t1, CRF_GT);
624 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
626 tcg_gen_setcond_tl(TCG_COND_EQ, t0, arg0, arg1);
627 tcg_gen_trunc_tl_i32(t1, t0);
628 tcg_gen_shli_i32(t1, t1, CRF_EQ);
629 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
632 tcg_temp_free_i32(t1);
635 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
637 TCGv t0 = tcg_const_tl(arg1);
638 gen_op_cmp(arg0, t0, s, crf);
642 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
648 tcg_gen_ext32s_tl(t0, arg0);
649 tcg_gen_ext32s_tl(t1, arg1);
651 tcg_gen_ext32u_tl(t0, arg0);
652 tcg_gen_ext32u_tl(t1, arg1);
654 gen_op_cmp(t0, t1, s, crf);
659 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
661 TCGv t0 = tcg_const_tl(arg1);
662 gen_op_cmp32(arg0, t0, s, crf);
666 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
668 if (NARROW_MODE(ctx)) {
669 gen_op_cmpi32(reg, 0, 1, 0);
671 gen_op_cmpi(reg, 0, 1, 0);
676 static void gen_cmp(DisasContext *ctx)
678 if (NARROW_MODE(ctx) || !(ctx->opcode & 0x00200000)) {
679 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
680 1, crfD(ctx->opcode));
682 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
683 1, crfD(ctx->opcode));
688 static void gen_cmpi(DisasContext *ctx)
690 if (NARROW_MODE(ctx) || !(ctx->opcode & 0x00200000)) {
691 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
692 1, crfD(ctx->opcode));
694 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
695 1, crfD(ctx->opcode));
700 static void gen_cmpl(DisasContext *ctx)
702 if (NARROW_MODE(ctx) || !(ctx->opcode & 0x00200000)) {
703 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
704 0, crfD(ctx->opcode));
706 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
707 0, crfD(ctx->opcode));
712 static void gen_cmpli(DisasContext *ctx)
714 if (NARROW_MODE(ctx) || !(ctx->opcode & 0x00200000)) {
715 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
716 0, crfD(ctx->opcode));
718 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
719 0, crfD(ctx->opcode));
723 /* isel (PowerPC 2.03 specification) */
724 static void gen_isel(DisasContext *ctx)
727 uint32_t bi = rC(ctx->opcode);
731 l1 = gen_new_label();
732 l2 = gen_new_label();
734 mask = 1 << (3 - (bi & 0x03));
735 t0 = tcg_temp_new_i32();
736 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
737 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
738 if (rA(ctx->opcode) == 0)
739 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
741 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
744 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
746 tcg_temp_free_i32(t0);
749 /* cmpb: PowerPC 2.05 specification */
750 static void gen_cmpb(DisasContext *ctx)
752 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
753 cpu_gpr[rB(ctx->opcode)]);
756 /*** Integer arithmetic ***/
758 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
759 TCGv arg1, TCGv arg2, int sub)
761 TCGv t0 = tcg_temp_new();
763 tcg_gen_xor_tl(cpu_ov, arg0, arg2);
764 tcg_gen_xor_tl(t0, arg1, arg2);
766 tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
768 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
771 if (NARROW_MODE(ctx)) {
772 tcg_gen_ext32s_tl(cpu_ov, cpu_ov);
774 tcg_gen_shri_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1);
775 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
778 /* Common add function */
779 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
780 TCGv arg2, bool add_ca, bool compute_ca,
781 bool compute_ov, bool compute_rc0)
785 if (compute_ca || compute_ov) {
790 if (NARROW_MODE(ctx)) {
791 /* Caution: a non-obvious corner case of the spec is that we
792 must produce the *entire* 64-bit addition, but produce the
793 carry into bit 32. */
794 TCGv t1 = tcg_temp_new();
795 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */
796 tcg_gen_add_tl(t0, arg1, arg2);
798 tcg_gen_add_tl(t0, t0, cpu_ca);
800 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */
802 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
803 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
805 TCGv zero = tcg_const_tl(0);
807 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero);
808 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero);
810 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero);
815 tcg_gen_add_tl(t0, arg1, arg2);
817 tcg_gen_add_tl(t0, t0, cpu_ca);
822 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
824 if (unlikely(compute_rc0)) {
825 gen_set_Rc0(ctx, t0);
828 if (!TCGV_EQUAL(t0, ret)) {
829 tcg_gen_mov_tl(ret, t0);
833 /* Add functions with two operands */
834 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
835 static void glue(gen_, name)(DisasContext *ctx) \
837 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
838 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
839 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
841 /* Add functions with one operand and one immediate */
842 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
843 add_ca, compute_ca, compute_ov) \
844 static void glue(gen_, name)(DisasContext *ctx) \
846 TCGv t0 = tcg_const_tl(const_val); \
847 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
848 cpu_gpr[rA(ctx->opcode)], t0, \
849 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
853 /* add add. addo addo. */
854 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
855 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
856 /* addc addc. addco addco. */
857 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
858 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
859 /* adde adde. addeo addeo. */
860 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
861 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
862 /* addme addme. addmeo addmeo. */
863 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
864 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
865 /* addze addze. addzeo addzeo.*/
866 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
867 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
869 static void gen_addi(DisasContext *ctx)
871 target_long simm = SIMM(ctx->opcode);
873 if (rA(ctx->opcode) == 0) {
875 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
877 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
878 cpu_gpr[rA(ctx->opcode)], simm);
882 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
884 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
885 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
886 c, 0, 1, 0, compute_rc0);
890 static void gen_addic(DisasContext *ctx)
892 gen_op_addic(ctx, 0);
895 static void gen_addic_(DisasContext *ctx)
897 gen_op_addic(ctx, 1);
901 static void gen_addis(DisasContext *ctx)
903 target_long simm = SIMM(ctx->opcode);
905 if (rA(ctx->opcode) == 0) {
907 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
909 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
910 cpu_gpr[rA(ctx->opcode)], simm << 16);
914 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
915 TCGv arg2, int sign, int compute_ov)
917 int l1 = gen_new_label();
918 int l2 = gen_new_label();
919 TCGv_i32 t0 = tcg_temp_local_new_i32();
920 TCGv_i32 t1 = tcg_temp_local_new_i32();
922 tcg_gen_trunc_tl_i32(t0, arg1);
923 tcg_gen_trunc_tl_i32(t1, arg2);
924 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
926 int l3 = gen_new_label();
927 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
928 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
930 tcg_gen_div_i32(t0, t0, t1);
932 tcg_gen_divu_i32(t0, t0, t1);
935 tcg_gen_movi_tl(cpu_ov, 0);
940 tcg_gen_sari_i32(t0, t0, 31);
942 tcg_gen_movi_i32(t0, 0);
945 tcg_gen_movi_tl(cpu_ov, 1);
946 tcg_gen_movi_tl(cpu_so, 1);
949 tcg_gen_extu_i32_tl(ret, t0);
950 tcg_temp_free_i32(t0);
951 tcg_temp_free_i32(t1);
952 if (unlikely(Rc(ctx->opcode) != 0))
953 gen_set_Rc0(ctx, ret);
956 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
957 static void glue(gen_, name)(DisasContext *ctx) \
959 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
960 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
963 /* divwu divwu. divwuo divwuo. */
964 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
965 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
966 /* divw divw. divwo divwo. */
967 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
968 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
969 #if defined(TARGET_PPC64)
970 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
971 TCGv arg2, int sign, int compute_ov)
973 int l1 = gen_new_label();
974 int l2 = gen_new_label();
976 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
978 int l3 = gen_new_label();
979 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
980 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
982 tcg_gen_div_i64(ret, arg1, arg2);
984 tcg_gen_divu_i64(ret, arg1, arg2);
987 tcg_gen_movi_tl(cpu_ov, 0);
992 tcg_gen_sari_i64(ret, arg1, 63);
994 tcg_gen_movi_i64(ret, 0);
997 tcg_gen_movi_tl(cpu_ov, 1);
998 tcg_gen_movi_tl(cpu_so, 1);
1001 if (unlikely(Rc(ctx->opcode) != 0))
1002 gen_set_Rc0(ctx, ret);
1004 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1005 static void glue(gen_, name)(DisasContext *ctx) \
1007 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1008 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1009 sign, compute_ov); \
1011 /* divwu divwu. divwuo divwuo. */
1012 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1013 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1014 /* divw divw. divwo divwo. */
1015 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1016 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1020 static void gen_mulhw(DisasContext *ctx)
1022 TCGv_i32 t0 = tcg_temp_new_i32();
1023 TCGv_i32 t1 = tcg_temp_new_i32();
1025 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1026 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1027 tcg_gen_muls2_i32(t0, t1, t0, t1);
1028 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1029 tcg_temp_free_i32(t0);
1030 tcg_temp_free_i32(t1);
1031 if (unlikely(Rc(ctx->opcode) != 0))
1032 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1035 /* mulhwu mulhwu. */
1036 static void gen_mulhwu(DisasContext *ctx)
1038 TCGv_i32 t0 = tcg_temp_new_i32();
1039 TCGv_i32 t1 = tcg_temp_new_i32();
1041 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1042 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1043 tcg_gen_mulu2_i32(t0, t1, t0, t1);
1044 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1045 tcg_temp_free_i32(t0);
1046 tcg_temp_free_i32(t1);
1047 if (unlikely(Rc(ctx->opcode) != 0))
1048 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1052 static void gen_mullw(DisasContext *ctx)
1054 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1055 cpu_gpr[rB(ctx->opcode)]);
1056 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1057 if (unlikely(Rc(ctx->opcode) != 0))
1058 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1061 /* mullwo mullwo. */
1062 static void gen_mullwo(DisasContext *ctx)
1064 TCGv_i32 t0 = tcg_temp_new_i32();
1065 TCGv_i32 t1 = tcg_temp_new_i32();
1067 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1068 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1069 tcg_gen_muls2_i32(t0, t1, t0, t1);
1070 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
1072 tcg_gen_sari_i32(t0, t0, 31);
1073 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
1074 tcg_gen_extu_i32_tl(cpu_ov, t0);
1075 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1077 tcg_temp_free_i32(t0);
1078 tcg_temp_free_i32(t1);
1079 if (unlikely(Rc(ctx->opcode) != 0))
1080 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1084 static void gen_mulli(DisasContext *ctx)
1086 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1090 #if defined(TARGET_PPC64)
1092 static void gen_mulhd(DisasContext *ctx)
1094 TCGv lo = tcg_temp_new();
1095 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1096 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1098 if (unlikely(Rc(ctx->opcode) != 0)) {
1099 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1103 /* mulhdu mulhdu. */
1104 static void gen_mulhdu(DisasContext *ctx)
1106 TCGv lo = tcg_temp_new();
1107 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1108 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1110 if (unlikely(Rc(ctx->opcode) != 0)) {
1111 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1116 static void gen_mulld(DisasContext *ctx)
1118 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1119 cpu_gpr[rB(ctx->opcode)]);
1120 if (unlikely(Rc(ctx->opcode) != 0))
1121 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1124 /* mulldo mulldo. */
1125 static void gen_mulldo(DisasContext *ctx)
1127 gen_helper_mulldo(cpu_gpr[rD(ctx->opcode)], cpu_env,
1128 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1129 if (unlikely(Rc(ctx->opcode) != 0)) {
1130 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1135 /* Common subf function */
1136 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1137 TCGv arg2, bool add_ca, bool compute_ca,
1138 bool compute_ov, bool compute_rc0)
1142 if (compute_ca || compute_ov) {
1143 t0 = tcg_temp_new();
1147 /* dest = ~arg1 + arg2 [+ ca]. */
1148 if (NARROW_MODE(ctx)) {
1149 /* Caution: a non-obvious corner case of the spec is that we
1150 must produce the *entire* 64-bit addition, but produce the
1151 carry into bit 32. */
1152 TCGv inv1 = tcg_temp_new();
1153 TCGv t1 = tcg_temp_new();
1154 tcg_gen_not_tl(inv1, arg1);
1156 tcg_gen_add_tl(t0, arg2, cpu_ca);
1158 tcg_gen_addi_tl(t0, arg2, 1);
1160 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */
1161 tcg_gen_add_tl(t0, t0, inv1);
1162 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */
1164 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
1165 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
1166 } else if (add_ca) {
1167 TCGv zero, inv1 = tcg_temp_new();
1168 tcg_gen_not_tl(inv1, arg1);
1169 zero = tcg_const_tl(0);
1170 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
1171 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
1172 tcg_temp_free(zero);
1173 tcg_temp_free(inv1);
1175 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
1176 tcg_gen_sub_tl(t0, arg2, arg1);
1178 } else if (add_ca) {
1179 /* Since we're ignoring carry-out, we can simplify the
1180 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */
1181 tcg_gen_sub_tl(t0, arg2, arg1);
1182 tcg_gen_add_tl(t0, t0, cpu_ca);
1183 tcg_gen_subi_tl(t0, t0, 1);
1185 tcg_gen_sub_tl(t0, arg2, arg1);
1189 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1191 if (unlikely(compute_rc0)) {
1192 gen_set_Rc0(ctx, t0);
1195 if (!TCGV_EQUAL(t0, ret)) {
1196 tcg_gen_mov_tl(ret, t0);
1200 /* Sub functions with Two operands functions */
1201 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1202 static void glue(gen_, name)(DisasContext *ctx) \
1204 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1205 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1206 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1208 /* Sub functions with one operand and one immediate */
1209 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1210 add_ca, compute_ca, compute_ov) \
1211 static void glue(gen_, name)(DisasContext *ctx) \
1213 TCGv t0 = tcg_const_tl(const_val); \
1214 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1215 cpu_gpr[rA(ctx->opcode)], t0, \
1216 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1217 tcg_temp_free(t0); \
1219 /* subf subf. subfo subfo. */
1220 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1221 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1222 /* subfc subfc. subfco subfco. */
1223 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1224 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1225 /* subfe subfe. subfeo subfo. */
1226 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1227 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1228 /* subfme subfme. subfmeo subfmeo. */
1229 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1230 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1231 /* subfze subfze. subfzeo subfzeo.*/
1232 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1233 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1236 static void gen_subfic(DisasContext *ctx)
1238 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1239 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1244 /* neg neg. nego nego. */
1245 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
1247 TCGv zero = tcg_const_tl(0);
1248 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1249 zero, 0, 0, compute_ov, Rc(ctx->opcode));
1250 tcg_temp_free(zero);
1253 static void gen_neg(DisasContext *ctx)
1255 gen_op_arith_neg(ctx, 0);
1258 static void gen_nego(DisasContext *ctx)
1260 gen_op_arith_neg(ctx, 1);
1263 /*** Integer logical ***/
1264 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1265 static void glue(gen_, name)(DisasContext *ctx) \
1267 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1268 cpu_gpr[rB(ctx->opcode)]); \
1269 if (unlikely(Rc(ctx->opcode) != 0)) \
1270 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1273 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1274 static void glue(gen_, name)(DisasContext *ctx) \
1276 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1277 if (unlikely(Rc(ctx->opcode) != 0)) \
1278 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1282 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1284 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1287 static void gen_andi_(DisasContext *ctx)
1289 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1290 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1294 static void gen_andis_(DisasContext *ctx)
1296 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1297 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1301 static void gen_cntlzw(DisasContext *ctx)
1303 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1304 if (unlikely(Rc(ctx->opcode) != 0))
1305 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1308 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1309 /* extsb & extsb. */
1310 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1311 /* extsh & extsh. */
1312 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1314 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1316 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1319 static void gen_or(DisasContext *ctx)
1323 rs = rS(ctx->opcode);
1324 ra = rA(ctx->opcode);
1325 rb = rB(ctx->opcode);
1326 /* Optimisation for mr. ri case */
1327 if (rs != ra || rs != rb) {
1329 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1331 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1332 if (unlikely(Rc(ctx->opcode) != 0))
1333 gen_set_Rc0(ctx, cpu_gpr[ra]);
1334 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1335 gen_set_Rc0(ctx, cpu_gpr[rs]);
1336 #if defined(TARGET_PPC64)
1342 /* Set process priority to low */
1346 /* Set process priority to medium-low */
1350 /* Set process priority to normal */
1353 #if !defined(CONFIG_USER_ONLY)
1355 if (ctx->mem_idx > 0) {
1356 /* Set process priority to very low */
1361 if (ctx->mem_idx > 0) {
1362 /* Set process priority to medium-hight */
1367 if (ctx->mem_idx > 0) {
1368 /* Set process priority to high */
1373 if (ctx->mem_idx > 1) {
1374 /* Set process priority to very high */
1384 TCGv t0 = tcg_temp_new();
1385 gen_load_spr(t0, SPR_PPR);
1386 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1387 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1388 gen_store_spr(SPR_PPR, t0);
1395 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1398 static void gen_xor(DisasContext *ctx)
1400 /* Optimisation for "set to zero" case */
1401 if (rS(ctx->opcode) != rB(ctx->opcode))
1402 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1404 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1405 if (unlikely(Rc(ctx->opcode) != 0))
1406 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1410 static void gen_ori(DisasContext *ctx)
1412 target_ulong uimm = UIMM(ctx->opcode);
1414 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1416 /* XXX: should handle special NOPs for POWER series */
1419 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1423 static void gen_oris(DisasContext *ctx)
1425 target_ulong uimm = UIMM(ctx->opcode);
1427 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1431 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1435 static void gen_xori(DisasContext *ctx)
1437 target_ulong uimm = UIMM(ctx->opcode);
1439 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1443 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1447 static void gen_xoris(DisasContext *ctx)
1449 target_ulong uimm = UIMM(ctx->opcode);
1451 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1455 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1458 /* popcntb : PowerPC 2.03 specification */
1459 static void gen_popcntb(DisasContext *ctx)
1461 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1464 static void gen_popcntw(DisasContext *ctx)
1466 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1469 #if defined(TARGET_PPC64)
1470 /* popcntd: PowerPC 2.06 specification */
1471 static void gen_popcntd(DisasContext *ctx)
1473 gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1477 /* prtyw: PowerPC 2.05 specification */
1478 static void gen_prtyw(DisasContext *ctx)
1480 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1481 TCGv rs = cpu_gpr[rS(ctx->opcode)];
1482 TCGv t0 = tcg_temp_new();
1483 tcg_gen_shri_tl(t0, rs, 16);
1484 tcg_gen_xor_tl(ra, rs, t0);
1485 tcg_gen_shri_tl(t0, ra, 8);
1486 tcg_gen_xor_tl(ra, ra, t0);
1487 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
1491 #if defined(TARGET_PPC64)
1492 /* prtyd: PowerPC 2.05 specification */
1493 static void gen_prtyd(DisasContext *ctx)
1495 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1496 TCGv rs = cpu_gpr[rS(ctx->opcode)];
1497 TCGv t0 = tcg_temp_new();
1498 tcg_gen_shri_tl(t0, rs, 32);
1499 tcg_gen_xor_tl(ra, rs, t0);
1500 tcg_gen_shri_tl(t0, ra, 16);
1501 tcg_gen_xor_tl(ra, ra, t0);
1502 tcg_gen_shri_tl(t0, ra, 8);
1503 tcg_gen_xor_tl(ra, ra, t0);
1504 tcg_gen_andi_tl(ra, ra, 1);
1509 #if defined(TARGET_PPC64)
1510 /* extsw & extsw. */
1511 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1514 static void gen_cntlzd(DisasContext *ctx)
1516 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1517 if (unlikely(Rc(ctx->opcode) != 0))
1518 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1522 /*** Integer rotate ***/
1524 /* rlwimi & rlwimi. */
1525 static void gen_rlwimi(DisasContext *ctx)
1527 uint32_t mb, me, sh;
1529 mb = MB(ctx->opcode);
1530 me = ME(ctx->opcode);
1531 sh = SH(ctx->opcode);
1532 if (likely(sh == 0 && mb == 0 && me == 31)) {
1533 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1537 TCGv t0 = tcg_temp_new();
1538 #if defined(TARGET_PPC64)
1539 TCGv_i32 t2 = tcg_temp_new_i32();
1540 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1541 tcg_gen_rotli_i32(t2, t2, sh);
1542 tcg_gen_extu_i32_i64(t0, t2);
1543 tcg_temp_free_i32(t2);
1545 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1547 #if defined(TARGET_PPC64)
1551 mask = MASK(mb, me);
1552 t1 = tcg_temp_new();
1553 tcg_gen_andi_tl(t0, t0, mask);
1554 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1555 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1559 if (unlikely(Rc(ctx->opcode) != 0))
1560 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1563 /* rlwinm & rlwinm. */
1564 static void gen_rlwinm(DisasContext *ctx)
1566 uint32_t mb, me, sh;
1568 sh = SH(ctx->opcode);
1569 mb = MB(ctx->opcode);
1570 me = ME(ctx->opcode);
1572 if (likely(mb == 0 && me == (31 - sh))) {
1573 if (likely(sh == 0)) {
1574 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1576 TCGv t0 = tcg_temp_new();
1577 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1578 tcg_gen_shli_tl(t0, t0, sh);
1579 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1582 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1583 TCGv t0 = tcg_temp_new();
1584 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1585 tcg_gen_shri_tl(t0, t0, mb);
1586 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1589 TCGv t0 = tcg_temp_new();
1590 #if defined(TARGET_PPC64)
1591 TCGv_i32 t1 = tcg_temp_new_i32();
1592 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1593 tcg_gen_rotli_i32(t1, t1, sh);
1594 tcg_gen_extu_i32_i64(t0, t1);
1595 tcg_temp_free_i32(t1);
1597 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1599 #if defined(TARGET_PPC64)
1603 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1606 if (unlikely(Rc(ctx->opcode) != 0))
1607 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1610 /* rlwnm & rlwnm. */
1611 static void gen_rlwnm(DisasContext *ctx)
1615 #if defined(TARGET_PPC64)
1619 mb = MB(ctx->opcode);
1620 me = ME(ctx->opcode);
1621 t0 = tcg_temp_new();
1622 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1623 #if defined(TARGET_PPC64)
1624 t1 = tcg_temp_new_i32();
1625 t2 = tcg_temp_new_i32();
1626 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1627 tcg_gen_trunc_i64_i32(t2, t0);
1628 tcg_gen_rotl_i32(t1, t1, t2);
1629 tcg_gen_extu_i32_i64(t0, t1);
1630 tcg_temp_free_i32(t1);
1631 tcg_temp_free_i32(t2);
1633 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1635 if (unlikely(mb != 0 || me != 31)) {
1636 #if defined(TARGET_PPC64)
1640 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1642 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1645 if (unlikely(Rc(ctx->opcode) != 0))
1646 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1649 #if defined(TARGET_PPC64)
1650 #define GEN_PPC64_R2(name, opc1, opc2) \
1651 static void glue(gen_, name##0)(DisasContext *ctx) \
1653 gen_##name(ctx, 0); \
1656 static void glue(gen_, name##1)(DisasContext *ctx) \
1658 gen_##name(ctx, 1); \
1660 #define GEN_PPC64_R4(name, opc1, opc2) \
1661 static void glue(gen_, name##0)(DisasContext *ctx) \
1663 gen_##name(ctx, 0, 0); \
1666 static void glue(gen_, name##1)(DisasContext *ctx) \
1668 gen_##name(ctx, 0, 1); \
1671 static void glue(gen_, name##2)(DisasContext *ctx) \
1673 gen_##name(ctx, 1, 0); \
1676 static void glue(gen_, name##3)(DisasContext *ctx) \
1678 gen_##name(ctx, 1, 1); \
1681 static inline void gen_rldinm(DisasContext *ctx, uint32_t mb, uint32_t me,
1684 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1685 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1686 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1687 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1689 TCGv t0 = tcg_temp_new();
1690 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1691 if (likely(mb == 0 && me == 63)) {
1692 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1694 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1698 if (unlikely(Rc(ctx->opcode) != 0))
1699 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1701 /* rldicl - rldicl. */
1702 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
1706 sh = SH(ctx->opcode) | (shn << 5);
1707 mb = MB(ctx->opcode) | (mbn << 5);
1708 gen_rldinm(ctx, mb, 63, sh);
1710 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1711 /* rldicr - rldicr. */
1712 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
1716 sh = SH(ctx->opcode) | (shn << 5);
1717 me = MB(ctx->opcode) | (men << 5);
1718 gen_rldinm(ctx, 0, me, sh);
1720 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1721 /* rldic - rldic. */
1722 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
1726 sh = SH(ctx->opcode) | (shn << 5);
1727 mb = MB(ctx->opcode) | (mbn << 5);
1728 gen_rldinm(ctx, mb, 63 - sh, sh);
1730 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1732 static inline void gen_rldnm(DisasContext *ctx, uint32_t mb, uint32_t me)
1736 mb = MB(ctx->opcode);
1737 me = ME(ctx->opcode);
1738 t0 = tcg_temp_new();
1739 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1740 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1741 if (unlikely(mb != 0 || me != 63)) {
1742 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1744 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1747 if (unlikely(Rc(ctx->opcode) != 0))
1748 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1751 /* rldcl - rldcl. */
1752 static inline void gen_rldcl(DisasContext *ctx, int mbn)
1756 mb = MB(ctx->opcode) | (mbn << 5);
1757 gen_rldnm(ctx, mb, 63);
1759 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1760 /* rldcr - rldcr. */
1761 static inline void gen_rldcr(DisasContext *ctx, int men)
1765 me = MB(ctx->opcode) | (men << 5);
1766 gen_rldnm(ctx, 0, me);
1768 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1769 /* rldimi - rldimi. */
1770 static inline void gen_rldimi(DisasContext *ctx, int mbn, int shn)
1772 uint32_t sh, mb, me;
1774 sh = SH(ctx->opcode) | (shn << 5);
1775 mb = MB(ctx->opcode) | (mbn << 5);
1777 if (unlikely(sh == 0 && mb == 0)) {
1778 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1783 t0 = tcg_temp_new();
1784 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1785 t1 = tcg_temp_new();
1786 mask = MASK(mb, me);
1787 tcg_gen_andi_tl(t0, t0, mask);
1788 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1789 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1793 if (unlikely(Rc(ctx->opcode) != 0))
1794 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1796 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1799 /*** Integer shift ***/
1802 static void gen_slw(DisasContext *ctx)
1806 t0 = tcg_temp_new();
1807 /* AND rS with a mask that is 0 when rB >= 0x20 */
1808 #if defined(TARGET_PPC64)
1809 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1810 tcg_gen_sari_tl(t0, t0, 0x3f);
1812 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1813 tcg_gen_sari_tl(t0, t0, 0x1f);
1815 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1816 t1 = tcg_temp_new();
1817 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1818 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1821 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1822 if (unlikely(Rc(ctx->opcode) != 0))
1823 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1827 static void gen_sraw(DisasContext *ctx)
1829 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
1830 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1831 if (unlikely(Rc(ctx->opcode) != 0))
1832 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1835 /* srawi & srawi. */
1836 static void gen_srawi(DisasContext *ctx)
1838 int sh = SH(ctx->opcode);
1839 TCGv dst = cpu_gpr[rA(ctx->opcode)];
1840 TCGv src = cpu_gpr[rS(ctx->opcode)];
1842 tcg_gen_mov_tl(dst, src);
1843 tcg_gen_movi_tl(cpu_ca, 0);
1846 tcg_gen_ext32s_tl(dst, src);
1847 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
1848 t0 = tcg_temp_new();
1849 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
1850 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
1852 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
1853 tcg_gen_sari_tl(dst, dst, sh);
1855 if (unlikely(Rc(ctx->opcode) != 0)) {
1856 gen_set_Rc0(ctx, dst);
1861 static void gen_srw(DisasContext *ctx)
1865 t0 = tcg_temp_new();
1866 /* AND rS with a mask that is 0 when rB >= 0x20 */
1867 #if defined(TARGET_PPC64)
1868 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1869 tcg_gen_sari_tl(t0, t0, 0x3f);
1871 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1872 tcg_gen_sari_tl(t0, t0, 0x1f);
1874 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1875 tcg_gen_ext32u_tl(t0, t0);
1876 t1 = tcg_temp_new();
1877 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1878 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1881 if (unlikely(Rc(ctx->opcode) != 0))
1882 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1885 #if defined(TARGET_PPC64)
1887 static void gen_sld(DisasContext *ctx)
1891 t0 = tcg_temp_new();
1892 /* AND rS with a mask that is 0 when rB >= 0x40 */
1893 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1894 tcg_gen_sari_tl(t0, t0, 0x3f);
1895 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1896 t1 = tcg_temp_new();
1897 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1898 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1901 if (unlikely(Rc(ctx->opcode) != 0))
1902 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1906 static void gen_srad(DisasContext *ctx)
1908 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
1909 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1910 if (unlikely(Rc(ctx->opcode) != 0))
1911 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1913 /* sradi & sradi. */
1914 static inline void gen_sradi(DisasContext *ctx, int n)
1916 int sh = SH(ctx->opcode) + (n << 5);
1917 TCGv dst = cpu_gpr[rA(ctx->opcode)];
1918 TCGv src = cpu_gpr[rS(ctx->opcode)];
1920 tcg_gen_mov_tl(dst, src);
1921 tcg_gen_movi_tl(cpu_ca, 0);
1924 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
1925 t0 = tcg_temp_new();
1926 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
1927 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
1929 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
1930 tcg_gen_sari_tl(dst, src, sh);
1932 if (unlikely(Rc(ctx->opcode) != 0)) {
1933 gen_set_Rc0(ctx, dst);
1937 static void gen_sradi0(DisasContext *ctx)
1942 static void gen_sradi1(DisasContext *ctx)
1948 static void gen_srd(DisasContext *ctx)
1952 t0 = tcg_temp_new();
1953 /* AND rS with a mask that is 0 when rB >= 0x40 */
1954 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1955 tcg_gen_sari_tl(t0, t0, 0x3f);
1956 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1957 t1 = tcg_temp_new();
1958 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1959 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1962 if (unlikely(Rc(ctx->opcode) != 0))
1963 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1967 /*** Floating-Point arithmetic ***/
1968 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1969 static void gen_f##name(DisasContext *ctx) \
1971 if (unlikely(!ctx->fpu_enabled)) { \
1972 gen_exception(ctx, POWERPC_EXCP_FPU); \
1975 /* NIP cannot be restored if the memory exception comes from an helper */ \
1976 gen_update_nip(ctx, ctx->nip - 4); \
1977 gen_reset_fpstatus(); \
1978 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1979 cpu_fpr[rA(ctx->opcode)], \
1980 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
1982 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1983 cpu_fpr[rD(ctx->opcode)]); \
1985 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
1986 Rc(ctx->opcode) != 0); \
1989 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
1990 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
1991 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
1993 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1994 static void gen_f##name(DisasContext *ctx) \
1996 if (unlikely(!ctx->fpu_enabled)) { \
1997 gen_exception(ctx, POWERPC_EXCP_FPU); \
2000 /* NIP cannot be restored if the memory exception comes from an helper */ \
2001 gen_update_nip(ctx, ctx->nip - 4); \
2002 gen_reset_fpstatus(); \
2003 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2004 cpu_fpr[rA(ctx->opcode)], \
2005 cpu_fpr[rB(ctx->opcode)]); \
2007 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2008 cpu_fpr[rD(ctx->opcode)]); \
2010 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2011 set_fprf, Rc(ctx->opcode) != 0); \
2013 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2014 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2015 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2017 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2018 static void gen_f##name(DisasContext *ctx) \
2020 if (unlikely(!ctx->fpu_enabled)) { \
2021 gen_exception(ctx, POWERPC_EXCP_FPU); \
2024 /* NIP cannot be restored if the memory exception comes from an helper */ \
2025 gen_update_nip(ctx, ctx->nip - 4); \
2026 gen_reset_fpstatus(); \
2027 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2028 cpu_fpr[rA(ctx->opcode)], \
2029 cpu_fpr[rC(ctx->opcode)]); \
2031 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2032 cpu_fpr[rD(ctx->opcode)]); \
2034 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2035 set_fprf, Rc(ctx->opcode) != 0); \
2037 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2038 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2039 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2041 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2042 static void gen_f##name(DisasContext *ctx) \
2044 if (unlikely(!ctx->fpu_enabled)) { \
2045 gen_exception(ctx, POWERPC_EXCP_FPU); \
2048 /* NIP cannot be restored if the memory exception comes from an helper */ \
2049 gen_update_nip(ctx, ctx->nip - 4); \
2050 gen_reset_fpstatus(); \
2051 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2052 cpu_fpr[rB(ctx->opcode)]); \
2053 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2054 set_fprf, Rc(ctx->opcode) != 0); \
2057 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2058 static void gen_f##name(DisasContext *ctx) \
2060 if (unlikely(!ctx->fpu_enabled)) { \
2061 gen_exception(ctx, POWERPC_EXCP_FPU); \
2064 /* NIP cannot be restored if the memory exception comes from an helper */ \
2065 gen_update_nip(ctx, ctx->nip - 4); \
2066 gen_reset_fpstatus(); \
2067 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2068 cpu_fpr[rB(ctx->opcode)]); \
2069 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2070 set_fprf, Rc(ctx->opcode) != 0); \
2074 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2076 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2078 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2081 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2084 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2087 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2090 static void gen_frsqrtes(DisasContext *ctx)
2092 if (unlikely(!ctx->fpu_enabled)) {
2093 gen_exception(ctx, POWERPC_EXCP_FPU);
2096 /* NIP cannot be restored if the memory exception comes from an helper */
2097 gen_update_nip(ctx, ctx->nip - 4);
2098 gen_reset_fpstatus();
2099 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_env,
2100 cpu_fpr[rB(ctx->opcode)]);
2101 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env,
2102 cpu_fpr[rD(ctx->opcode)]);
2103 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2107 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2109 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2113 static void gen_fsqrt(DisasContext *ctx)
2115 if (unlikely(!ctx->fpu_enabled)) {
2116 gen_exception(ctx, POWERPC_EXCP_FPU);
2119 /* NIP cannot be restored if the memory exception comes from an helper */
2120 gen_update_nip(ctx, ctx->nip - 4);
2121 gen_reset_fpstatus();
2122 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_env,
2123 cpu_fpr[rB(ctx->opcode)]);
2124 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2127 static void gen_fsqrts(DisasContext *ctx)
2129 if (unlikely(!ctx->fpu_enabled)) {
2130 gen_exception(ctx, POWERPC_EXCP_FPU);
2133 /* NIP cannot be restored if the memory exception comes from an helper */
2134 gen_update_nip(ctx, ctx->nip - 4);
2135 gen_reset_fpstatus();
2136 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_env,
2137 cpu_fpr[rB(ctx->opcode)]);
2138 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env,
2139 cpu_fpr[rD(ctx->opcode)]);
2140 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2143 /*** Floating-Point multiply-and-add ***/
2144 /* fmadd - fmadds */
2145 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2146 /* fmsub - fmsubs */
2147 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2148 /* fnmadd - fnmadds */
2149 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2150 /* fnmsub - fnmsubs */
2151 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2153 /*** Floating-Point round & convert ***/
2155 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2157 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2159 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2160 #if defined(TARGET_PPC64)
2162 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2164 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2166 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2170 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2172 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2174 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2176 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2178 /*** Floating-Point compare ***/
2181 static void gen_fcmpo(DisasContext *ctx)
2184 if (unlikely(!ctx->fpu_enabled)) {
2185 gen_exception(ctx, POWERPC_EXCP_FPU);
2188 /* NIP cannot be restored if the memory exception comes from an helper */
2189 gen_update_nip(ctx, ctx->nip - 4);
2190 gen_reset_fpstatus();
2191 crf = tcg_const_i32(crfD(ctx->opcode));
2192 gen_helper_fcmpo(cpu_env, cpu_fpr[rA(ctx->opcode)],
2193 cpu_fpr[rB(ctx->opcode)], crf);
2194 tcg_temp_free_i32(crf);
2195 gen_helper_float_check_status(cpu_env);
2199 static void gen_fcmpu(DisasContext *ctx)
2202 if (unlikely(!ctx->fpu_enabled)) {
2203 gen_exception(ctx, POWERPC_EXCP_FPU);
2206 /* NIP cannot be restored if the memory exception comes from an helper */
2207 gen_update_nip(ctx, ctx->nip - 4);
2208 gen_reset_fpstatus();
2209 crf = tcg_const_i32(crfD(ctx->opcode));
2210 gen_helper_fcmpu(cpu_env, cpu_fpr[rA(ctx->opcode)],
2211 cpu_fpr[rB(ctx->opcode)], crf);
2212 tcg_temp_free_i32(crf);
2213 gen_helper_float_check_status(cpu_env);
2216 /*** Floating-point move ***/
2218 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2219 static void gen_fabs(DisasContext *ctx)
2221 if (unlikely(!ctx->fpu_enabled)) {
2222 gen_exception(ctx, POWERPC_EXCP_FPU);
2225 tcg_gen_andi_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2227 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2231 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2232 static void gen_fmr(DisasContext *ctx)
2234 if (unlikely(!ctx->fpu_enabled)) {
2235 gen_exception(ctx, POWERPC_EXCP_FPU);
2238 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2239 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2243 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2244 static void gen_fnabs(DisasContext *ctx)
2246 if (unlikely(!ctx->fpu_enabled)) {
2247 gen_exception(ctx, POWERPC_EXCP_FPU);
2250 tcg_gen_ori_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2252 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2256 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2257 static void gen_fneg(DisasContext *ctx)
2259 if (unlikely(!ctx->fpu_enabled)) {
2260 gen_exception(ctx, POWERPC_EXCP_FPU);
2263 tcg_gen_xori_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2265 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2268 /* fcpsgn: PowerPC 2.05 specification */
2269 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
2270 static void gen_fcpsgn(DisasContext *ctx)
2272 if (unlikely(!ctx->fpu_enabled)) {
2273 gen_exception(ctx, POWERPC_EXCP_FPU);
2276 tcg_gen_deposit_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)],
2277 cpu_fpr[rB(ctx->opcode)], 0, 63);
2278 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2281 /*** Floating-Point status & ctrl register ***/
2284 static void gen_mcrfs(DisasContext *ctx)
2286 TCGv tmp = tcg_temp_new();
2289 if (unlikely(!ctx->fpu_enabled)) {
2290 gen_exception(ctx, POWERPC_EXCP_FPU);
2293 bfa = 4 * (7 - crfS(ctx->opcode));
2294 tcg_gen_shri_tl(tmp, cpu_fpscr, bfa);
2295 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp);
2297 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2298 tcg_gen_andi_tl(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2302 static void gen_mffs(DisasContext *ctx)
2304 if (unlikely(!ctx->fpu_enabled)) {
2305 gen_exception(ctx, POWERPC_EXCP_FPU);
2308 gen_reset_fpstatus();
2309 tcg_gen_extu_tl_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2310 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2314 static void gen_mtfsb0(DisasContext *ctx)
2318 if (unlikely(!ctx->fpu_enabled)) {
2319 gen_exception(ctx, POWERPC_EXCP_FPU);
2322 crb = 31 - crbD(ctx->opcode);
2323 gen_reset_fpstatus();
2324 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
2326 /* NIP cannot be restored if the memory exception comes from an helper */
2327 gen_update_nip(ctx, ctx->nip - 4);
2328 t0 = tcg_const_i32(crb);
2329 gen_helper_fpscr_clrbit(cpu_env, t0);
2330 tcg_temp_free_i32(t0);
2332 if (unlikely(Rc(ctx->opcode) != 0)) {
2333 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2334 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2339 static void gen_mtfsb1(DisasContext *ctx)
2343 if (unlikely(!ctx->fpu_enabled)) {
2344 gen_exception(ctx, POWERPC_EXCP_FPU);
2347 crb = 31 - crbD(ctx->opcode);
2348 gen_reset_fpstatus();
2349 /* XXX: we pretend we can only do IEEE floating-point computations */
2350 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2352 /* NIP cannot be restored if the memory exception comes from an helper */
2353 gen_update_nip(ctx, ctx->nip - 4);
2354 t0 = tcg_const_i32(crb);
2355 gen_helper_fpscr_setbit(cpu_env, t0);
2356 tcg_temp_free_i32(t0);
2358 if (unlikely(Rc(ctx->opcode) != 0)) {
2359 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2360 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2362 /* We can raise a differed exception */
2363 gen_helper_float_check_status(cpu_env);
2367 static void gen_mtfsf(DisasContext *ctx)
2372 if (unlikely(!ctx->fpu_enabled)) {
2373 gen_exception(ctx, POWERPC_EXCP_FPU);
2376 flm = FPFLM(ctx->opcode);
2377 l = FPL(ctx->opcode);
2378 w = FPW(ctx->opcode);
2379 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
2380 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2383 /* NIP cannot be restored if the memory exception comes from an helper */
2384 gen_update_nip(ctx, ctx->nip - 4);
2385 gen_reset_fpstatus();
2387 t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff);
2389 t0 = tcg_const_i32(flm << (w * 8));
2391 gen_helper_store_fpscr(cpu_env, cpu_fpr[rB(ctx->opcode)], t0);
2392 tcg_temp_free_i32(t0);
2393 if (unlikely(Rc(ctx->opcode) != 0)) {
2394 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2395 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2397 /* We can raise a differed exception */
2398 gen_helper_float_check_status(cpu_env);
2402 static void gen_mtfsfi(DisasContext *ctx)
2408 if (unlikely(!ctx->fpu_enabled)) {
2409 gen_exception(ctx, POWERPC_EXCP_FPU);
2412 w = FPW(ctx->opcode);
2413 bf = FPBF(ctx->opcode);
2414 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
2415 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2418 sh = (8 * w) + 7 - bf;
2419 /* NIP cannot be restored if the memory exception comes from an helper */
2420 gen_update_nip(ctx, ctx->nip - 4);
2421 gen_reset_fpstatus();
2422 t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh));
2423 t1 = tcg_const_i32(1 << sh);
2424 gen_helper_store_fpscr(cpu_env, t0, t1);
2425 tcg_temp_free_i64(t0);
2426 tcg_temp_free_i32(t1);
2427 if (unlikely(Rc(ctx->opcode) != 0)) {
2428 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2429 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2431 /* We can raise a differed exception */
2432 gen_helper_float_check_status(cpu_env);
2435 /*** Addressing modes ***/
2436 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2437 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2440 target_long simm = SIMM(ctx->opcode);
2443 if (rA(ctx->opcode) == 0) {
2444 if (NARROW_MODE(ctx)) {
2445 simm = (uint32_t)simm;
2447 tcg_gen_movi_tl(EA, simm);
2448 } else if (likely(simm != 0)) {
2449 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2450 if (NARROW_MODE(ctx)) {
2451 tcg_gen_ext32u_tl(EA, EA);
2454 if (NARROW_MODE(ctx)) {
2455 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2457 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2462 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2464 if (rA(ctx->opcode) == 0) {
2465 if (NARROW_MODE(ctx)) {
2466 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2468 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2471 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2472 if (NARROW_MODE(ctx)) {
2473 tcg_gen_ext32u_tl(EA, EA);
2478 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2480 if (rA(ctx->opcode) == 0) {
2481 tcg_gen_movi_tl(EA, 0);
2482 } else if (NARROW_MODE(ctx)) {
2483 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2485 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2489 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2492 tcg_gen_addi_tl(ret, arg1, val);
2493 if (NARROW_MODE(ctx)) {
2494 tcg_gen_ext32u_tl(ret, ret);
2498 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2500 int l1 = gen_new_label();
2501 TCGv t0 = tcg_temp_new();
2503 /* NIP cannot be restored if the memory exception comes from an helper */
2504 gen_update_nip(ctx, ctx->nip - 4);
2505 tcg_gen_andi_tl(t0, EA, mask);
2506 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2507 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2508 t2 = tcg_const_i32(0);
2509 gen_helper_raise_exception_err(cpu_env, t1, t2);
2510 tcg_temp_free_i32(t1);
2511 tcg_temp_free_i32(t2);
2516 /*** Integer load ***/
2517 static inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2519 tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx);
2522 static inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2524 tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx);
2527 static inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2529 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2530 if (unlikely(ctx->le_mode)) {
2531 tcg_gen_bswap16_tl(arg1, arg1);
2535 static inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2537 if (unlikely(ctx->le_mode)) {
2538 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2539 tcg_gen_bswap16_tl(arg1, arg1);
2540 tcg_gen_ext16s_tl(arg1, arg1);
2542 tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx);
2546 static inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2548 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2549 if (unlikely(ctx->le_mode)) {
2550 tcg_gen_bswap32_tl(arg1, arg1);
2554 #if defined(TARGET_PPC64)
2555 static inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2557 if (unlikely(ctx->le_mode)) {
2558 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2559 tcg_gen_bswap32_tl(arg1, arg1);
2560 tcg_gen_ext32s_tl(arg1, arg1);
2562 tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx);
2566 static inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2568 tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2569 if (unlikely(ctx->le_mode)) {
2570 tcg_gen_bswap64_i64(arg1, arg1);
2574 static inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2)
2576 tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx);
2579 static inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2)
2581 if (unlikely(ctx->le_mode)) {
2582 TCGv t0 = tcg_temp_new();
2583 tcg_gen_ext16u_tl(t0, arg1);
2584 tcg_gen_bswap16_tl(t0, t0);
2585 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2588 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2592 static inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2)
2594 if (unlikely(ctx->le_mode)) {
2595 TCGv t0 = tcg_temp_new();
2596 tcg_gen_ext32u_tl(t0, arg1);
2597 tcg_gen_bswap32_tl(t0, t0);
2598 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2601 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2605 static inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2607 if (unlikely(ctx->le_mode)) {
2608 TCGv_i64 t0 = tcg_temp_new_i64();
2609 tcg_gen_bswap64_i64(t0, arg1);
2610 tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2611 tcg_temp_free_i64(t0);
2613 tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2616 #define GEN_LD(name, ldop, opc, type) \
2617 static void glue(gen_, name)(DisasContext *ctx) \
2620 gen_set_access_type(ctx, ACCESS_INT); \
2621 EA = tcg_temp_new(); \
2622 gen_addr_imm_index(ctx, EA, 0); \
2623 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2624 tcg_temp_free(EA); \
2627 #define GEN_LDU(name, ldop, opc, type) \
2628 static void glue(gen_, name##u)(DisasContext *ctx) \
2631 if (unlikely(rA(ctx->opcode) == 0 || \
2632 rA(ctx->opcode) == rD(ctx->opcode))) { \
2633 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2636 gen_set_access_type(ctx, ACCESS_INT); \
2637 EA = tcg_temp_new(); \
2638 if (type == PPC_64B) \
2639 gen_addr_imm_index(ctx, EA, 0x03); \
2641 gen_addr_imm_index(ctx, EA, 0); \
2642 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2643 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2644 tcg_temp_free(EA); \
2647 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2648 static void glue(gen_, name##ux)(DisasContext *ctx) \
2651 if (unlikely(rA(ctx->opcode) == 0 || \
2652 rA(ctx->opcode) == rD(ctx->opcode))) { \
2653 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2656 gen_set_access_type(ctx, ACCESS_INT); \
2657 EA = tcg_temp_new(); \
2658 gen_addr_reg_index(ctx, EA); \
2659 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2660 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2661 tcg_temp_free(EA); \
2664 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2) \
2665 static void glue(gen_, name##x)(DisasContext *ctx) \
2668 gen_set_access_type(ctx, ACCESS_INT); \
2669 EA = tcg_temp_new(); \
2670 gen_addr_reg_index(ctx, EA); \
2671 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2672 tcg_temp_free(EA); \
2674 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2675 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE)
2677 #define GEN_LDS(name, ldop, op, type) \
2678 GEN_LD(name, ldop, op | 0x20, type); \
2679 GEN_LDU(name, ldop, op | 0x21, type); \
2680 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2681 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2683 /* lbz lbzu lbzux lbzx */
2684 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2685 /* lha lhau lhaux lhax */
2686 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2687 /* lhz lhzu lhzux lhzx */
2688 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2689 /* lwz lwzu lwzux lwzx */
2690 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2691 #if defined(TARGET_PPC64)
2693 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2695 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2697 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2699 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2701 static void gen_ld(DisasContext *ctx)
2704 if (Rc(ctx->opcode)) {
2705 if (unlikely(rA(ctx->opcode) == 0 ||
2706 rA(ctx->opcode) == rD(ctx->opcode))) {
2707 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2711 gen_set_access_type(ctx, ACCESS_INT);
2712 EA = tcg_temp_new();
2713 gen_addr_imm_index(ctx, EA, 0x03);
2714 if (ctx->opcode & 0x02) {
2715 /* lwa (lwau is undefined) */
2716 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2719 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2721 if (Rc(ctx->opcode))
2722 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2727 static void gen_lq(DisasContext *ctx)
2729 #if defined(CONFIG_USER_ONLY)
2730 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2735 /* Restore CPU state */
2736 if (unlikely(ctx->mem_idx == 0)) {
2737 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2740 ra = rA(ctx->opcode);
2741 rd = rD(ctx->opcode);
2742 if (unlikely((rd & 1) || rd == ra)) {
2743 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2746 if (unlikely(ctx->le_mode)) {
2747 /* Little-endian mode is not handled */
2748 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2751 gen_set_access_type(ctx, ACCESS_INT);
2752 EA = tcg_temp_new();
2753 gen_addr_imm_index(ctx, EA, 0x0F);
2754 gen_qemu_ld64(ctx, cpu_gpr[rd], EA);
2755 gen_addr_add(ctx, EA, EA, 8);
2756 gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA);
2762 /*** Integer store ***/
2763 #define GEN_ST(name, stop, opc, type) \
2764 static void glue(gen_, name)(DisasContext *ctx) \
2767 gen_set_access_type(ctx, ACCESS_INT); \
2768 EA = tcg_temp_new(); \
2769 gen_addr_imm_index(ctx, EA, 0); \
2770 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2771 tcg_temp_free(EA); \
2774 #define GEN_STU(name, stop, opc, type) \
2775 static void glue(gen_, stop##u)(DisasContext *ctx) \
2778 if (unlikely(rA(ctx->opcode) == 0)) { \
2779 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2782 gen_set_access_type(ctx, ACCESS_INT); \
2783 EA = tcg_temp_new(); \
2784 if (type == PPC_64B) \
2785 gen_addr_imm_index(ctx, EA, 0x03); \
2787 gen_addr_imm_index(ctx, EA, 0); \
2788 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2789 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2790 tcg_temp_free(EA); \
2793 #define GEN_STUX(name, stop, opc2, opc3, type) \
2794 static void glue(gen_, name##ux)(DisasContext *ctx) \
2797 if (unlikely(rA(ctx->opcode) == 0)) { \
2798 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2801 gen_set_access_type(ctx, ACCESS_INT); \
2802 EA = tcg_temp_new(); \
2803 gen_addr_reg_index(ctx, EA); \
2804 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2805 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2806 tcg_temp_free(EA); \
2809 #define GEN_STX_E(name, stop, opc2, opc3, type, type2) \
2810 static void glue(gen_, name##x)(DisasContext *ctx) \
2813 gen_set_access_type(ctx, ACCESS_INT); \
2814 EA = tcg_temp_new(); \
2815 gen_addr_reg_index(ctx, EA); \
2816 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2817 tcg_temp_free(EA); \
2819 #define GEN_STX(name, stop, opc2, opc3, type) \
2820 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE)
2822 #define GEN_STS(name, stop, op, type) \
2823 GEN_ST(name, stop, op | 0x20, type); \
2824 GEN_STU(name, stop, op | 0x21, type); \
2825 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2826 GEN_STX(name, stop, 0x17, op | 0x00, type)
2828 /* stb stbu stbux stbx */
2829 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2830 /* sth sthu sthux sthx */
2831 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2832 /* stw stwu stwux stwx */
2833 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2834 #if defined(TARGET_PPC64)
2835 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2836 GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2838 static void gen_std(DisasContext *ctx)
2843 rs = rS(ctx->opcode);
2844 if ((ctx->opcode & 0x3) == 0x2) {
2845 #if defined(CONFIG_USER_ONLY)
2846 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2849 if (unlikely(ctx->mem_idx == 0)) {
2850 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2853 if (unlikely(rs & 1)) {
2854 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2857 if (unlikely(ctx->le_mode)) {
2858 /* Little-endian mode is not handled */
2859 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2862 gen_set_access_type(ctx, ACCESS_INT);
2863 EA = tcg_temp_new();
2864 gen_addr_imm_index(ctx, EA, 0x03);
2865 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2866 gen_addr_add(ctx, EA, EA, 8);
2867 gen_qemu_st64(ctx, cpu_gpr[rs+1], EA);
2872 if (Rc(ctx->opcode)) {
2873 if (unlikely(rA(ctx->opcode) == 0)) {
2874 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2878 gen_set_access_type(ctx, ACCESS_INT);
2879 EA = tcg_temp_new();
2880 gen_addr_imm_index(ctx, EA, 0x03);
2881 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2882 if (Rc(ctx->opcode))
2883 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2888 /*** Integer load and store with byte reverse ***/
2890 static inline void gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2892 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2893 if (likely(!ctx->le_mode)) {
2894 tcg_gen_bswap16_tl(arg1, arg1);
2897 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2900 static inline void gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2902 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2903 if (likely(!ctx->le_mode)) {
2904 tcg_gen_bswap32_tl(arg1, arg1);
2907 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2909 #if defined(TARGET_PPC64)
2911 static inline void gen_qemu_ld64ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2913 tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2914 if (likely(!ctx->le_mode)) {
2915 tcg_gen_bswap64_tl(arg1, arg1);
2918 GEN_LDX_E(ldbr, ld64ur, 0x14, 0x10, PPC_NONE, PPC2_DBRX);
2919 #endif /* TARGET_PPC64 */
2922 static inline void gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2924 if (likely(!ctx->le_mode)) {
2925 TCGv t0 = tcg_temp_new();
2926 tcg_gen_ext16u_tl(t0, arg1);
2927 tcg_gen_bswap16_tl(t0, t0);
2928 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2931 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2934 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2937 static inline void gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2939 if (likely(!ctx->le_mode)) {
2940 TCGv t0 = tcg_temp_new();
2941 tcg_gen_ext32u_tl(t0, arg1);
2942 tcg_gen_bswap32_tl(t0, t0);
2943 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2946 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2949 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2951 #if defined(TARGET_PPC64)
2953 static inline void gen_qemu_st64r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2955 if (likely(!ctx->le_mode)) {
2956 TCGv t0 = tcg_temp_new();
2957 tcg_gen_bswap64_tl(t0, arg1);
2958 tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2961 tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2964 GEN_STX_E(stdbr, st64r, 0x14, 0x14, PPC_NONE, PPC2_DBRX);
2965 #endif /* TARGET_PPC64 */
2967 /*** Integer load and store multiple ***/
2970 static void gen_lmw(DisasContext *ctx)
2974 gen_set_access_type(ctx, ACCESS_INT);
2975 /* NIP cannot be restored if the memory exception comes from an helper */
2976 gen_update_nip(ctx, ctx->nip - 4);
2977 t0 = tcg_temp_new();
2978 t1 = tcg_const_i32(rD(ctx->opcode));
2979 gen_addr_imm_index(ctx, t0, 0);
2980 gen_helper_lmw(cpu_env, t0, t1);
2982 tcg_temp_free_i32(t1);
2986 static void gen_stmw(DisasContext *ctx)
2990 gen_set_access_type(ctx, ACCESS_INT);
2991 /* NIP cannot be restored if the memory exception comes from an helper */
2992 gen_update_nip(ctx, ctx->nip - 4);
2993 t0 = tcg_temp_new();
2994 t1 = tcg_const_i32(rS(ctx->opcode));
2995 gen_addr_imm_index(ctx, t0, 0);
2996 gen_helper_stmw(cpu_env, t0, t1);
2998 tcg_temp_free_i32(t1);
3001 /*** Integer load and store strings ***/
3004 /* PowerPC32 specification says we must generate an exception if
3005 * rA is in the range of registers to be loaded.
3006 * In an other hand, IBM says this is valid, but rA won't be loaded.
3007 * For now, I'll follow the spec...
3009 static void gen_lswi(DisasContext *ctx)
3013 int nb = NB(ctx->opcode);
3014 int start = rD(ctx->opcode);
3015 int ra = rA(ctx->opcode);
3021 if (unlikely(((start + nr) > 32 &&
3022 start <= ra && (start + nr - 32) > ra) ||
3023 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
3024 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3027 gen_set_access_type(ctx, ACCESS_INT);
3028 /* NIP cannot be restored if the memory exception comes from an helper */
3029 gen_update_nip(ctx, ctx->nip - 4);
3030 t0 = tcg_temp_new();
3031 gen_addr_register(ctx, t0);
3032 t1 = tcg_const_i32(nb);
3033 t2 = tcg_const_i32(start);
3034 gen_helper_lsw(cpu_env, t0, t1, t2);
3036 tcg_temp_free_i32(t1);
3037 tcg_temp_free_i32(t2);
3041 static void gen_lswx(DisasContext *ctx)
3044 TCGv_i32 t1, t2, t3;
3045 gen_set_access_type(ctx, ACCESS_INT);
3046 /* NIP cannot be restored if the memory exception comes from an helper */
3047 gen_update_nip(ctx, ctx->nip - 4);
3048 t0 = tcg_temp_new();
3049 gen_addr_reg_index(ctx, t0);
3050 t1 = tcg_const_i32(rD(ctx->opcode));
3051 t2 = tcg_const_i32(rA(ctx->opcode));
3052 t3 = tcg_const_i32(rB(ctx->opcode));
3053 gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3055 tcg_temp_free_i32(t1);
3056 tcg_temp_free_i32(t2);
3057 tcg_temp_free_i32(t3);
3061 static void gen_stswi(DisasContext *ctx)
3065 int nb = NB(ctx->opcode);
3066 gen_set_access_type(ctx, ACCESS_INT);
3067 /* NIP cannot be restored if the memory exception comes from an helper */
3068 gen_update_nip(ctx, ctx->nip - 4);
3069 t0 = tcg_temp_new();
3070 gen_addr_register(ctx, t0);
3073 t1 = tcg_const_i32(nb);
3074 t2 = tcg_const_i32(rS(ctx->opcode));
3075 gen_helper_stsw(cpu_env, t0, t1, t2);
3077 tcg_temp_free_i32(t1);
3078 tcg_temp_free_i32(t2);
3082 static void gen_stswx(DisasContext *ctx)
3086 gen_set_access_type(ctx, ACCESS_INT);
3087 /* NIP cannot be restored if the memory exception comes from an helper */
3088 gen_update_nip(ctx, ctx->nip - 4);
3089 t0 = tcg_temp_new();
3090 gen_addr_reg_index(ctx, t0);
3091 t1 = tcg_temp_new_i32();
3092 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3093 tcg_gen_andi_i32(t1, t1, 0x7F);
3094 t2 = tcg_const_i32(rS(ctx->opcode));
3095 gen_helper_stsw(cpu_env, t0, t1, t2);
3097 tcg_temp_free_i32(t1);
3098 tcg_temp_free_i32(t2);
3101 /*** Memory synchronisation ***/
3103 static void gen_eieio(DisasContext *ctx)
3108 static void gen_isync(DisasContext *ctx)
3110 gen_stop_exception(ctx);
3114 static void gen_lwarx(DisasContext *ctx)
3117 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3118 gen_set_access_type(ctx, ACCESS_RES);
3119 t0 = tcg_temp_local_new();
3120 gen_addr_reg_index(ctx, t0);
3121 gen_check_align(ctx, t0, 0x03);
3122 gen_qemu_ld32u(ctx, gpr, t0);
3123 tcg_gen_mov_tl(cpu_reserve, t0);
3124 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val));
3128 #if defined(CONFIG_USER_ONLY)
3129 static void gen_conditional_store (DisasContext *ctx, TCGv EA,
3132 TCGv t0 = tcg_temp_new();
3133 uint32_t save_exception = ctx->exception;
3135 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUPPCState, reserve_ea));
3136 tcg_gen_movi_tl(t0, (size << 5) | reg);
3137 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, reserve_info));
3139 gen_update_nip(ctx, ctx->nip-4);
3140 ctx->exception = POWERPC_EXCP_BRANCH;
3141 gen_exception(ctx, POWERPC_EXCP_STCX);
3142 ctx->exception = save_exception;
3147 static void gen_stwcx_(DisasContext *ctx)
3150 gen_set_access_type(ctx, ACCESS_RES);
3151 t0 = tcg_temp_local_new();
3152 gen_addr_reg_index(ctx, t0);
3153 gen_check_align(ctx, t0, 0x03);
3154 #if defined(CONFIG_USER_ONLY)
3155 gen_conditional_store(ctx, t0, rS(ctx->opcode), 4);
3160 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3161 l1 = gen_new_label();
3162 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3163 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3164 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3166 tcg_gen_movi_tl(cpu_reserve, -1);
3172 #if defined(TARGET_PPC64)
3174 static void gen_ldarx(DisasContext *ctx)
3177 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3178 gen_set_access_type(ctx, ACCESS_RES);
3179 t0 = tcg_temp_local_new();
3180 gen_addr_reg_index(ctx, t0);
3181 gen_check_align(ctx, t0, 0x07);
3182 gen_qemu_ld64(ctx, gpr, t0);
3183 tcg_gen_mov_tl(cpu_reserve, t0);
3184 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val));
3189 static void gen_stdcx_(DisasContext *ctx)
3192 gen_set_access_type(ctx, ACCESS_RES);
3193 t0 = tcg_temp_local_new();
3194 gen_addr_reg_index(ctx, t0);
3195 gen_check_align(ctx, t0, 0x07);
3196 #if defined(CONFIG_USER_ONLY)
3197 gen_conditional_store(ctx, t0, rS(ctx->opcode), 8);
3201 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3202 l1 = gen_new_label();
3203 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3204 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3205 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3207 tcg_gen_movi_tl(cpu_reserve, -1);
3212 #endif /* defined(TARGET_PPC64) */
3215 static void gen_sync(DisasContext *ctx)
3220 static void gen_wait(DisasContext *ctx)
3222 TCGv_i32 t0 = tcg_temp_new_i32();
3223 tcg_gen_st_i32(t0, cpu_env,
3224 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3225 tcg_temp_free_i32(t0);
3226 /* Stop translation, as the CPU is supposed to sleep from now */
3227 gen_exception_err(ctx, EXCP_HLT, 1);
3230 /*** Floating-point load ***/
3231 #define GEN_LDF(name, ldop, opc, type) \
3232 static void glue(gen_, name)(DisasContext *ctx) \
3235 if (unlikely(!ctx->fpu_enabled)) { \
3236 gen_exception(ctx, POWERPC_EXCP_FPU); \
3239 gen_set_access_type(ctx, ACCESS_FLOAT); \
3240 EA = tcg_temp_new(); \
3241 gen_addr_imm_index(ctx, EA, 0); \
3242 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3243 tcg_temp_free(EA); \
3246 #define GEN_LDUF(name, ldop, opc, type) \
3247 static void glue(gen_, name##u)(DisasContext *ctx) \
3250 if (unlikely(!ctx->fpu_enabled)) { \
3251 gen_exception(ctx, POWERPC_EXCP_FPU); \
3254 if (unlikely(rA(ctx->opcode) == 0)) { \
3255 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3258 gen_set_access_type(ctx, ACCESS_FLOAT); \
3259 EA = tcg_temp_new(); \
3260 gen_addr_imm_index(ctx, EA, 0); \
3261 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3262 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3263 tcg_temp_free(EA); \
3266 #define GEN_LDUXF(name, ldop, opc, type) \
3267 static void glue(gen_, name##ux)(DisasContext *ctx) \
3270 if (unlikely(!ctx->fpu_enabled)) { \
3271 gen_exception(ctx, POWERPC_EXCP_FPU); \
3274 if (unlikely(rA(ctx->opcode) == 0)) { \
3275 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3278 gen_set_access_type(ctx, ACCESS_FLOAT); \
3279 EA = tcg_temp_new(); \
3280 gen_addr_reg_index(ctx, EA); \
3281 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3282 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3283 tcg_temp_free(EA); \
3286 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3287 static void glue(gen_, name##x)(DisasContext *ctx) \
3290 if (unlikely(!ctx->fpu_enabled)) { \
3291 gen_exception(ctx, POWERPC_EXCP_FPU); \
3294 gen_set_access_type(ctx, ACCESS_FLOAT); \
3295 EA = tcg_temp_new(); \
3296 gen_addr_reg_index(ctx, EA); \
3297 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3298 tcg_temp_free(EA); \
3301 #define GEN_LDFS(name, ldop, op, type) \
3302 GEN_LDF(name, ldop, op | 0x20, type); \
3303 GEN_LDUF(name, ldop, op | 0x21, type); \
3304 GEN_LDUXF(name, ldop, op | 0x01, type); \
3305 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3307 static inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3309 TCGv t0 = tcg_temp_new();
3310 TCGv_i32 t1 = tcg_temp_new_i32();
3311 gen_qemu_ld32u(ctx, t0, arg2);
3312 tcg_gen_trunc_tl_i32(t1, t0);
3314 gen_helper_float32_to_float64(arg1, cpu_env, t1);
3315 tcg_temp_free_i32(t1);
3318 /* lfd lfdu lfdux lfdx */
3319 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3320 /* lfs lfsu lfsux lfsx */
3321 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3324 static void gen_lfdp(DisasContext *ctx)
3327 if (unlikely(!ctx->fpu_enabled)) {
3328 gen_exception(ctx, POWERPC_EXCP_FPU);
3331 gen_set_access_type(ctx, ACCESS_FLOAT);
3332 EA = tcg_temp_new();
3333 gen_addr_imm_index(ctx, EA, 0); \
3334 if (unlikely(ctx->le_mode)) {
3335 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3336 tcg_gen_addi_tl(EA, EA, 8);
3337 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3339 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3340 tcg_gen_addi_tl(EA, EA, 8);
3341 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3347 static void gen_lfdpx(DisasContext *ctx)
3350 if (unlikely(!ctx->fpu_enabled)) {
3351 gen_exception(ctx, POWERPC_EXCP_FPU);
3354 gen_set_access_type(ctx, ACCESS_FLOAT);
3355 EA = tcg_temp_new();
3356 gen_addr_reg_index(ctx, EA);
3357 if (unlikely(ctx->le_mode)) {
3358 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3359 tcg_gen_addi_tl(EA, EA, 8);
3360 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3362 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3363 tcg_gen_addi_tl(EA, EA, 8);
3364 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3370 static void gen_lfiwax(DisasContext *ctx)
3374 if (unlikely(!ctx->fpu_enabled)) {
3375 gen_exception(ctx, POWERPC_EXCP_FPU);
3378 gen_set_access_type(ctx, ACCESS_FLOAT);
3379 EA = tcg_temp_new();
3380 t0 = tcg_temp_new();
3381 gen_addr_reg_index(ctx, EA);
3382 gen_qemu_ld32u(ctx, t0, EA);
3383 tcg_gen_ext_tl_i64(cpu_fpr[rD(ctx->opcode)], t0);
3384 tcg_gen_ext32s_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
3389 /*** Floating-point store ***/
3390 #define GEN_STF(name, stop, opc, type) \
3391 static void glue(gen_, name)(DisasContext *ctx) \
3394 if (unlikely(!ctx->fpu_enabled)) { \
3395 gen_exception(ctx, POWERPC_EXCP_FPU); \
3398 gen_set_access_type(ctx, ACCESS_FLOAT); \
3399 EA = tcg_temp_new(); \
3400 gen_addr_imm_index(ctx, EA, 0); \
3401 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3402 tcg_temp_free(EA); \
3405 #define GEN_STUF(name, stop, opc, type) \
3406 static void glue(gen_, name##u)(DisasContext *ctx) \
3409 if (unlikely(!ctx->fpu_enabled)) { \
3410 gen_exception(ctx, POWERPC_EXCP_FPU); \
3413 if (unlikely(rA(ctx->opcode) == 0)) { \
3414 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3417 gen_set_access_type(ctx, ACCESS_FLOAT); \
3418 EA = tcg_temp_new(); \
3419 gen_addr_imm_index(ctx, EA, 0); \
3420 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3421 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3422 tcg_temp_free(EA); \
3425 #define GEN_STUXF(name, stop, opc, type) \
3426 static void glue(gen_, name##ux)(DisasContext *ctx) \
3429 if (unlikely(!ctx->fpu_enabled)) { \
3430 gen_exception(ctx, POWERPC_EXCP_FPU); \
3433 if (unlikely(rA(ctx->opcode) == 0)) { \
3434 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3437 gen_set_access_type(ctx, ACCESS_FLOAT); \
3438 EA = tcg_temp_new(); \
3439 gen_addr_reg_index(ctx, EA); \
3440 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3441 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3442 tcg_temp_free(EA); \
3445 #define GEN_STXF(name, stop, opc2, opc3, type) \
3446 static void glue(gen_, name##x)(DisasContext *ctx) \
3449 if (unlikely(!ctx->fpu_enabled)) { \
3450 gen_exception(ctx, POWERPC_EXCP_FPU); \
3453 gen_set_access_type(ctx, ACCESS_FLOAT); \
3454 EA = tcg_temp_new(); \
3455 gen_addr_reg_index(ctx, EA); \
3456 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3457 tcg_temp_free(EA); \
3460 #define GEN_STFS(name, stop, op, type) \
3461 GEN_STF(name, stop, op | 0x20, type); \
3462 GEN_STUF(name, stop, op | 0x21, type); \
3463 GEN_STUXF(name, stop, op | 0x01, type); \
3464 GEN_STXF(name, stop, 0x17, op | 0x00, type)
3466 static inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3468 TCGv_i32 t0 = tcg_temp_new_i32();
3469 TCGv t1 = tcg_temp_new();
3470 gen_helper_float64_to_float32(t0, cpu_env, arg1);
3471 tcg_gen_extu_i32_tl(t1, t0);
3472 tcg_temp_free_i32(t0);
3473 gen_qemu_st32(ctx, t1, arg2);
3477 /* stfd stfdu stfdux stfdx */
3478 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3479 /* stfs stfsu stfsux stfsx */
3480 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3483 static void gen_stfdp(DisasContext *ctx)
3486 if (unlikely(!ctx->fpu_enabled)) {
3487 gen_exception(ctx, POWERPC_EXCP_FPU);
3490 gen_set_access_type(ctx, ACCESS_FLOAT);
3491 EA = tcg_temp_new();
3492 gen_addr_imm_index(ctx, EA, 0); \
3493 if (unlikely(ctx->le_mode)) {
3494 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3495 tcg_gen_addi_tl(EA, EA, 8);
3496 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3498 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3499 tcg_gen_addi_tl(EA, EA, 8);
3500 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3506 static void gen_stfdpx(DisasContext *ctx)
3509 if (unlikely(!ctx->fpu_enabled)) {
3510 gen_exception(ctx, POWERPC_EXCP_FPU);
3513 gen_set_access_type(ctx, ACCESS_FLOAT);
3514 EA = tcg_temp_new();
3515 gen_addr_reg_index(ctx, EA);
3516 if (unlikely(ctx->le_mode)) {
3517 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3518 tcg_gen_addi_tl(EA, EA, 8);
3519 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3521 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3522 tcg_gen_addi_tl(EA, EA, 8);
3523 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3529 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3531 TCGv t0 = tcg_temp_new();
3532 tcg_gen_trunc_i64_tl(t0, arg1),
3533 gen_qemu_st32(ctx, t0, arg2);
3537 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3539 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
3541 #if defined(TARGET_PPC64)
3543 tcg_gen_movi_tl(cpu_cfar, nip);
3548 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3550 TranslationBlock *tb;
3552 if (NARROW_MODE(ctx)) {
3553 dest = (uint32_t) dest;
3555 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3556 likely(!ctx->singlestep_enabled)) {
3558 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3559 tcg_gen_exit_tb((tcg_target_long)tb + n);
3561 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3562 if (unlikely(ctx->singlestep_enabled)) {
3563 if ((ctx->singlestep_enabled &
3564 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3565 (ctx->exception == POWERPC_EXCP_BRANCH ||
3566 ctx->exception == POWERPC_EXCP_TRACE)) {
3567 target_ulong tmp = ctx->nip;
3569 gen_exception(ctx, POWERPC_EXCP_TRACE);
3572 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3573 gen_debug_exception(ctx);
3580 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3582 if (NARROW_MODE(ctx)) {
3583 nip = (uint32_t)nip;
3585 tcg_gen_movi_tl(cpu_lr, nip);
3589 static void gen_b(DisasContext *ctx)
3591 target_ulong li, target;
3593 ctx->exception = POWERPC_EXCP_BRANCH;
3594 /* sign extend LI */
3595 li = LI(ctx->opcode);
3596 li = (li ^ 0x02000000) - 0x02000000;
3597 if (likely(AA(ctx->opcode) == 0)) {
3598 target = ctx->nip + li - 4;
3602 if (LK(ctx->opcode)) {
3603 gen_setlr(ctx, ctx->nip);
3605 gen_update_cfar(ctx, ctx->nip);
3606 gen_goto_tb(ctx, 0, target);
3613 static inline void gen_bcond(DisasContext *ctx, int type)
3615 uint32_t bo = BO(ctx->opcode);
3619 ctx->exception = POWERPC_EXCP_BRANCH;
3620 if (type == BCOND_LR || type == BCOND_CTR) {
3621 target = tcg_temp_local_new();
3622 if (type == BCOND_CTR)
3623 tcg_gen_mov_tl(target, cpu_ctr);
3625 tcg_gen_mov_tl(target, cpu_lr);
3627 TCGV_UNUSED(target);
3629 if (LK(ctx->opcode))
3630 gen_setlr(ctx, ctx->nip);
3631 l1 = gen_new_label();
3632 if ((bo & 0x4) == 0) {
3633 /* Decrement and test CTR */
3634 TCGv temp = tcg_temp_new();
3635 if (unlikely(type == BCOND_CTR)) {
3636 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3639 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3640 if (NARROW_MODE(ctx)) {
3641 tcg_gen_ext32u_tl(temp, cpu_ctr);
3643 tcg_gen_mov_tl(temp, cpu_ctr);
3646 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3648 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3650 tcg_temp_free(temp);
3652 if ((bo & 0x10) == 0) {
3654 uint32_t bi = BI(ctx->opcode);
3655 uint32_t mask = 1 << (3 - (bi & 0x03));
3656 TCGv_i32 temp = tcg_temp_new_i32();
3659 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3660 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3662 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3663 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3665 tcg_temp_free_i32(temp);
3667 gen_update_cfar(ctx, ctx->nip);
3668 if (type == BCOND_IM) {
3669 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3670 if (likely(AA(ctx->opcode) == 0)) {
3671 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3673 gen_goto_tb(ctx, 0, li);
3676 gen_goto_tb(ctx, 1, ctx->nip);
3678 if (NARROW_MODE(ctx)) {
3679 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3681 tcg_gen_andi_tl(cpu_nip, target, ~3);
3685 gen_update_nip(ctx, ctx->nip);
3690 static void gen_bc(DisasContext *ctx)
3692 gen_bcond(ctx, BCOND_IM);
3695 static void gen_bcctr(DisasContext *ctx)
3697 gen_bcond(ctx, BCOND_CTR);
3700 static void gen_bclr(DisasContext *ctx)
3702 gen_bcond(ctx, BCOND_LR);
3705 /*** Condition register logical ***/
3706 #define GEN_CRLOGIC(name, tcg_op, opc) \
3707 static void glue(gen_, name)(DisasContext *ctx) \
3712 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3713 t0 = tcg_temp_new_i32(); \
3715 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3717 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3719 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3720 t1 = tcg_temp_new_i32(); \
3721 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3723 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3725 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3727 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3728 tcg_op(t0, t0, t1); \
3729 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3730 tcg_gen_andi_i32(t0, t0, bitmask); \
3731 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3732 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3733 tcg_temp_free_i32(t0); \
3734 tcg_temp_free_i32(t1); \
3738 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3740 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3742 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3744 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3746 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3748 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3750 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3752 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3755 static void gen_mcrf(DisasContext *ctx)
3757 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3760 /*** System linkage ***/
3762 /* rfi (mem_idx only) */
3763 static void gen_rfi(DisasContext *ctx)
3765 #if defined(CONFIG_USER_ONLY)
3766 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3768 /* Restore CPU state */
3769 if (unlikely(!ctx->mem_idx)) {
3770 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3773 gen_update_cfar(ctx, ctx->nip);
3774 gen_helper_rfi(cpu_env);
3775 gen_sync_exception(ctx);
3779 #if defined(TARGET_PPC64)
3780 static void gen_rfid(DisasContext *ctx)
3782 #if defined(CONFIG_USER_ONLY)
3783 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3785 /* Restore CPU state */
3786 if (unlikely(!ctx->mem_idx)) {
3787 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3790 gen_update_cfar(ctx, ctx->nip);
3791 gen_helper_rfid(cpu_env);
3792 gen_sync_exception(ctx);
3796 static void gen_hrfid(DisasContext *ctx)
3798 #if defined(CONFIG_USER_ONLY)
3799 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3801 /* Restore CPU state */
3802 if (unlikely(ctx->mem_idx <= 1)) {
3803 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3806 gen_helper_hrfid(cpu_env);
3807 gen_sync_exception(ctx);
3813 #if defined(CONFIG_USER_ONLY)
3814 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3816 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3818 static void gen_sc(DisasContext *ctx)
3822 lev = (ctx->opcode >> 5) & 0x7F;
3823 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3829 static void gen_tw(DisasContext *ctx)
3831 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3832 /* Update the nip since this might generate a trap exception */
3833 gen_update_nip(ctx, ctx->nip);
3834 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3836 tcg_temp_free_i32(t0);
3840 static void gen_twi(DisasContext *ctx)
3842 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3843 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3844 /* Update the nip since this might generate a trap exception */
3845 gen_update_nip(ctx, ctx->nip);
3846 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3848 tcg_temp_free_i32(t1);
3851 #if defined(TARGET_PPC64)
3853 static void gen_td(DisasContext *ctx)
3855 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3856 /* Update the nip since this might generate a trap exception */
3857 gen_update_nip(ctx, ctx->nip);
3858 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3860 tcg_temp_free_i32(t0);
3864 static void gen_tdi(DisasContext *ctx)
3866 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3867 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3868 /* Update the nip since this might generate a trap exception */
3869 gen_update_nip(ctx, ctx->nip);
3870 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3872 tcg_temp_free_i32(t1);
3876 /*** Processor control ***/
3878 static void gen_read_xer(TCGv dst)
3880 TCGv t0 = tcg_temp_new();
3881 TCGv t1 = tcg_temp_new();
3882 TCGv t2 = tcg_temp_new();
3883 tcg_gen_mov_tl(dst, cpu_xer);
3884 tcg_gen_shli_tl(t0, cpu_so, XER_SO);
3885 tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
3886 tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
3887 tcg_gen_or_tl(t0, t0, t1);
3888 tcg_gen_or_tl(dst, dst, t2);
3889 tcg_gen_or_tl(dst, dst, t0);
3895 static void gen_write_xer(TCGv src)
3897 tcg_gen_andi_tl(cpu_xer, src,
3898 ~((1u << XER_SO) | (1u << XER_OV) | (1u << XER_CA)));
3899 tcg_gen_shri_tl(cpu_so, src, XER_SO);
3900 tcg_gen_shri_tl(cpu_ov, src, XER_OV);
3901 tcg_gen_shri_tl(cpu_ca, src, XER_CA);
3902 tcg_gen_andi_tl(cpu_so, cpu_so, 1);
3903 tcg_gen_andi_tl(cpu_ov, cpu_ov, 1);
3904 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
3908 static void gen_mcrxr(DisasContext *ctx)
3910 TCGv_i32 t0 = tcg_temp_new_i32();
3911 TCGv_i32 t1 = tcg_temp_new_i32();
3912 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
3914 tcg_gen_trunc_tl_i32(t0, cpu_so);
3915 tcg_gen_trunc_tl_i32(t1, cpu_ov);
3916 tcg_gen_trunc_tl_i32(dst, cpu_ca);
3917 tcg_gen_shri_i32(t0, t0, 2);
3918 tcg_gen_shri_i32(t1, t1, 1);
3919 tcg_gen_or_i32(dst, dst, t0);
3920 tcg_gen_or_i32(dst, dst, t1);
3921 tcg_temp_free_i32(t0);
3922 tcg_temp_free_i32(t1);
3924 tcg_gen_movi_tl(cpu_so, 0);
3925 tcg_gen_movi_tl(cpu_ov, 0);
3926 tcg_gen_movi_tl(cpu_ca, 0);
3930 static void gen_mfcr(DisasContext *ctx)
3934 if (likely(ctx->opcode & 0x00100000)) {
3935 crm = CRM(ctx->opcode);
3936 if (likely(crm && ((crm & (crm - 1)) == 0))) {
3938 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3939 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
3940 cpu_gpr[rD(ctx->opcode)], crn * 4);
3943 TCGv_i32 t0 = tcg_temp_new_i32();
3944 tcg_gen_mov_i32(t0, cpu_crf[0]);
3945 tcg_gen_shli_i32(t0, t0, 4);
3946 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
3947 tcg_gen_shli_i32(t0, t0, 4);
3948 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
3949 tcg_gen_shli_i32(t0, t0, 4);
3950 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
3951 tcg_gen_shli_i32(t0, t0, 4);
3952 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
3953 tcg_gen_shli_i32(t0, t0, 4);
3954 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
3955 tcg_gen_shli_i32(t0, t0, 4);
3956 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
3957 tcg_gen_shli_i32(t0, t0, 4);
3958 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
3959 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
3960 tcg_temp_free_i32(t0);
3965 static void gen_mfmsr(DisasContext *ctx)
3967 #if defined(CONFIG_USER_ONLY)
3968 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3970 if (unlikely(!ctx->mem_idx)) {
3971 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3974 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3978 static void spr_noaccess(void *opaque, int gprn, int sprn)
3981 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3982 printf("ERROR: try to access SPR %d !\n", sprn);
3985 #define SPR_NOACCESS (&spr_noaccess)
3988 static inline void gen_op_mfspr(DisasContext *ctx)
3990 void (*read_cb)(void *opaque, int gprn, int sprn);
3991 uint32_t sprn = SPR(ctx->opcode);
3993 #if !defined(CONFIG_USER_ONLY)
3994 if (ctx->mem_idx == 2)
3995 read_cb = ctx->spr_cb[sprn].hea_read;
3996 else if (ctx->mem_idx)
3997 read_cb = ctx->spr_cb[sprn].oea_read;
4000 read_cb = ctx->spr_cb[sprn].uea_read;
4001 if (likely(read_cb != NULL)) {
4002 if (likely(read_cb != SPR_NOACCESS)) {
4003 (*read_cb)(ctx, rD(ctx->opcode), sprn);
4005 /* Privilege exception */
4006 /* This is a hack to avoid warnings when running Linux:
4007 * this OS breaks the PowerPC virtualisation model,
4008 * allowing userland application to read the PVR
4010 if (sprn != SPR_PVR) {
4011 qemu_log("Trying to read privileged spr %d %03x at "
4012 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
4013 printf("Trying to read privileged spr %d %03x at "
4014 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
4016 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4020 qemu_log("Trying to read invalid spr %d %03x at "
4021 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
4022 printf("Trying to read invalid spr %d %03x at " TARGET_FMT_lx "\n",
4023 sprn, sprn, ctx->nip);
4024 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4028 static void gen_mfspr(DisasContext *ctx)
4034 static void gen_mftb(DisasContext *ctx)
4040 static void gen_mtcrf(DisasContext *ctx)
4044 crm = CRM(ctx->opcode);
4045 if (likely((ctx->opcode & 0x00100000))) {
4046 if (crm && ((crm & (crm - 1)) == 0)) {
4047 TCGv_i32 temp = tcg_temp_new_i32();
4049 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4050 tcg_gen_shri_i32(temp, temp, crn * 4);
4051 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4052 tcg_temp_free_i32(temp);
4055 TCGv_i32 temp = tcg_temp_new_i32();
4056 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4057 for (crn = 0 ; crn < 8 ; crn++) {
4058 if (crm & (1 << crn)) {
4059 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4060 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4063 tcg_temp_free_i32(temp);
4068 #if defined(TARGET_PPC64)
4069 static void gen_mtmsrd(DisasContext *ctx)
4071 #if defined(CONFIG_USER_ONLY)
4072 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4074 if (unlikely(!ctx->mem_idx)) {
4075 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4078 if (ctx->opcode & 0x00010000) {
4079 /* Special form that does not need any synchronisation */
4080 TCGv t0 = tcg_temp_new();
4081 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
4082 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
4083 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
4086 /* XXX: we need to update nip before the store
4087 * if we enter power saving mode, we will exit the loop
4088 * directly from ppc_store_msr
4090 gen_update_nip(ctx, ctx->nip);
4091 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]);
4092 /* Must stop the translation as machine state (may have) changed */
4093 /* Note that mtmsr is not always defined as context-synchronizing */
4094 gen_stop_exception(ctx);
4100 static void gen_mtmsr(DisasContext *ctx)
4102 #if defined(CONFIG_USER_ONLY)
4103 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4105 if (unlikely(!ctx->mem_idx)) {
4106 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4109 if (ctx->opcode & 0x00010000) {
4110 /* Special form that does not need any synchronisation */
4111 TCGv t0 = tcg_temp_new();
4112 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
4113 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
4114 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
4117 TCGv msr = tcg_temp_new();
4119 /* XXX: we need to update nip before the store
4120 * if we enter power saving mode, we will exit the loop
4121 * directly from ppc_store_msr
4123 gen_update_nip(ctx, ctx->nip);
4124 #if defined(TARGET_PPC64)
4125 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
4127 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
4129 gen_helper_store_msr(cpu_env, msr);
4130 /* Must stop the translation as machine state (may have) changed */
4131 /* Note that mtmsr is not always defined as context-synchronizing */
4132 gen_stop_exception(ctx);
4138 static void gen_mtspr(DisasContext *ctx)
4140 void (*write_cb)(void *opaque, int sprn, int gprn);
4141 uint32_t sprn = SPR(ctx->opcode);
4143 #if !defined(CONFIG_USER_ONLY)
4144 if (ctx->mem_idx == 2)
4145 write_cb = ctx->spr_cb[sprn].hea_write;
4146 else if (ctx->mem_idx)
4147 write_cb = ctx->spr_cb[sprn].oea_write;
4150 write_cb = ctx->spr_cb[sprn].uea_write;
4151 if (likely(write_cb != NULL)) {
4152 if (likely(write_cb != SPR_NOACCESS)) {
4153 (*write_cb)(ctx, sprn, rS(ctx->opcode));
4155 /* Privilege exception */
4156 qemu_log("Trying to write privileged spr %d %03x at "
4157 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
4158 printf("Trying to write privileged spr %d %03x at " TARGET_FMT_lx
4159 "\n", sprn, sprn, ctx->nip);
4160 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4164 qemu_log("Trying to write invalid spr %d %03x at "
4165 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
4166 printf("Trying to write invalid spr %d %03x at " TARGET_FMT_lx "\n",
4167 sprn, sprn, ctx->nip);
4168 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4172 /*** Cache management ***/
4175 static void gen_dcbf(DisasContext *ctx)
4177 /* XXX: specification says this is treated as a load by the MMU */
4179 gen_set_access_type(ctx, ACCESS_CACHE);
4180 t0 = tcg_temp_new();
4181 gen_addr_reg_index(ctx, t0);
4182 gen_qemu_ld8u(ctx, t0, t0);
4186 /* dcbi (Supervisor only) */
4187 static void gen_dcbi(DisasContext *ctx)
4189 #if defined(CONFIG_USER_ONLY)
4190 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4193 if (unlikely(!ctx->mem_idx)) {
4194 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4197 EA = tcg_temp_new();
4198 gen_set_access_type(ctx, ACCESS_CACHE);
4199 gen_addr_reg_index(ctx, EA);
4200 val = tcg_temp_new();
4201 /* XXX: specification says this should be treated as a store by the MMU */
4202 gen_qemu_ld8u(ctx, val, EA);
4203 gen_qemu_st8(ctx, val, EA);
4210 static void gen_dcbst(DisasContext *ctx)
4212 /* XXX: specification say this is treated as a load by the MMU */
4214 gen_set_access_type(ctx, ACCESS_CACHE);
4215 t0 = tcg_temp_new();
4216 gen_addr_reg_index(ctx, t0);
4217 gen_qemu_ld8u(ctx, t0, t0);
4222 static void gen_dcbt(DisasContext *ctx)
4224 /* interpreted as no-op */
4225 /* XXX: specification say this is treated as a load by the MMU
4226 * but does not generate any exception
4231 static void gen_dcbtst(DisasContext *ctx)
4233 /* interpreted as no-op */
4234 /* XXX: specification say this is treated as a load by the MMU
4235 * but does not generate any exception
4240 static void gen_dcbz(DisasContext *ctx)
4243 TCGv_i32 tcgv_is_dcbzl;
4244 int is_dcbzl = ctx->opcode & 0x00200000 ? 1 : 0;
4246 gen_set_access_type(ctx, ACCESS_CACHE);
4247 /* NIP cannot be restored if the memory exception comes from an helper */
4248 gen_update_nip(ctx, ctx->nip - 4);
4249 tcgv_addr = tcg_temp_new();
4250 tcgv_is_dcbzl = tcg_const_i32(is_dcbzl);
4252 gen_addr_reg_index(ctx, tcgv_addr);
4253 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_is_dcbzl);
4255 tcg_temp_free(tcgv_addr);
4256 tcg_temp_free_i32(tcgv_is_dcbzl);
4260 static void gen_dst(DisasContext *ctx)
4262 if (rA(ctx->opcode) == 0) {
4263 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4265 /* interpreted as no-op */
4270 static void gen_dstst(DisasContext *ctx)
4272 if (rA(ctx->opcode) == 0) {
4273 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4275 /* interpreted as no-op */
4281 static void gen_dss(DisasContext *ctx)
4283 /* interpreted as no-op */
4287 static void gen_icbi(DisasContext *ctx)
4290 gen_set_access_type(ctx, ACCESS_CACHE);
4291 /* NIP cannot be restored if the memory exception comes from an helper */
4292 gen_update_nip(ctx, ctx->nip - 4);
4293 t0 = tcg_temp_new();
4294 gen_addr_reg_index(ctx, t0);
4295 gen_helper_icbi(cpu_env, t0);
4301 static void gen_dcba(DisasContext *ctx)
4303 /* interpreted as no-op */
4304 /* XXX: specification say this is treated as a store by the MMU
4305 * but does not generate any exception
4309 /*** Segment register manipulation ***/
4310 /* Supervisor only: */
4313 static void gen_mfsr(DisasContext *ctx)
4315 #if defined(CONFIG_USER_ONLY)
4316 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4319 if (unlikely(!ctx->mem_idx)) {
4320 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4323 t0 = tcg_const_tl(SR(ctx->opcode));
4324 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4330 static void gen_mfsrin(DisasContext *ctx)
4332 #if defined(CONFIG_USER_ONLY)
4333 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4336 if (unlikely(!ctx->mem_idx)) {
4337 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4340 t0 = tcg_temp_new();
4341 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4342 tcg_gen_andi_tl(t0, t0, 0xF);
4343 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4349 static void gen_mtsr(DisasContext *ctx)
4351 #if defined(CONFIG_USER_ONLY)
4352 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4355 if (unlikely(!ctx->mem_idx)) {
4356 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4359 t0 = tcg_const_tl(SR(ctx->opcode));
4360 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4366 static void gen_mtsrin(DisasContext *ctx)
4368 #if defined(CONFIG_USER_ONLY)
4369 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4372 if (unlikely(!ctx->mem_idx)) {
4373 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4376 t0 = tcg_temp_new();
4377 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4378 tcg_gen_andi_tl(t0, t0, 0xF);
4379 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
4384 #if defined(TARGET_PPC64)
4385 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4388 static void gen_mfsr_64b(DisasContext *ctx)
4390 #if defined(CONFIG_USER_ONLY)
4391 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4394 if (unlikely(!ctx->mem_idx)) {
4395 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4398 t0 = tcg_const_tl(SR(ctx->opcode));
4399 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4405 static void gen_mfsrin_64b(DisasContext *ctx)
4407 #if defined(CONFIG_USER_ONLY)
4408 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4411 if (unlikely(!ctx->mem_idx)) {
4412 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4415 t0 = tcg_temp_new();
4416 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4417 tcg_gen_andi_tl(t0, t0, 0xF);
4418 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4424 static void gen_mtsr_64b(DisasContext *ctx)
4426 #if defined(CONFIG_USER_ONLY)
4427 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4430 if (unlikely(!ctx->mem_idx)) {
4431 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4434 t0 = tcg_const_tl(SR(ctx->opcode));
4435 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4441 static void gen_mtsrin_64b(DisasContext *ctx)
4443 #if defined(CONFIG_USER_ONLY)
4444 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4447 if (unlikely(!ctx->mem_idx)) {
4448 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4451 t0 = tcg_temp_new();
4452 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4453 tcg_gen_andi_tl(t0, t0, 0xF);
4454 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4460 static void gen_slbmte(DisasContext *ctx)
4462 #if defined(CONFIG_USER_ONLY)
4463 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4465 if (unlikely(!ctx->mem_idx)) {
4466 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4469 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
4470 cpu_gpr[rS(ctx->opcode)]);
4474 static void gen_slbmfee(DisasContext *ctx)
4476 #if defined(CONFIG_USER_ONLY)
4477 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4479 if (unlikely(!ctx->mem_idx)) {
4480 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4483 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4484 cpu_gpr[rB(ctx->opcode)]);
4488 static void gen_slbmfev(DisasContext *ctx)
4490 #if defined(CONFIG_USER_ONLY)
4491 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4493 if (unlikely(!ctx->mem_idx)) {
4494 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4497 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4498 cpu_gpr[rB(ctx->opcode)]);
4501 #endif /* defined(TARGET_PPC64) */
4503 /*** Lookaside buffer management ***/
4504 /* Optional & mem_idx only: */
4507 static void gen_tlbia(DisasContext *ctx)
4509 #if defined(CONFIG_USER_ONLY)
4510 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4512 if (unlikely(!ctx->mem_idx)) {
4513 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4516 gen_helper_tlbia(cpu_env);
4521 static void gen_tlbiel(DisasContext *ctx)
4523 #if defined(CONFIG_USER_ONLY)
4524 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4526 if (unlikely(!ctx->mem_idx)) {
4527 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4530 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4535 static void gen_tlbie(DisasContext *ctx)
4537 #if defined(CONFIG_USER_ONLY)
4538 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4540 if (unlikely(!ctx->mem_idx)) {
4541 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4544 if (NARROW_MODE(ctx)) {
4545 TCGv t0 = tcg_temp_new();
4546 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4547 gen_helper_tlbie(cpu_env, t0);
4550 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4556 static void gen_tlbsync(DisasContext *ctx)
4558 #if defined(CONFIG_USER_ONLY)
4559 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4561 if (unlikely(!ctx->mem_idx)) {
4562 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4565 /* This has no effect: it should ensure that all previous
4566 * tlbie have completed
4568 gen_stop_exception(ctx);
4572 #if defined(TARGET_PPC64)
4574 static void gen_slbia(DisasContext *ctx)
4576 #if defined(CONFIG_USER_ONLY)
4577 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4579 if (unlikely(!ctx->mem_idx)) {
4580 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4583 gen_helper_slbia(cpu_env);
4588 static void gen_slbie(DisasContext *ctx)
4590 #if defined(CONFIG_USER_ONLY)
4591 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4593 if (unlikely(!ctx->mem_idx)) {
4594 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4597 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4602 /*** External control ***/
4606 static void gen_eciwx(DisasContext *ctx)
4609 /* Should check EAR[E] ! */
4610 gen_set_access_type(ctx, ACCESS_EXT);
4611 t0 = tcg_temp_new();
4612 gen_addr_reg_index(ctx, t0);
4613 gen_check_align(ctx, t0, 0x03);
4614 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4619 static void gen_ecowx(DisasContext *ctx)
4622 /* Should check EAR[E] ! */
4623 gen_set_access_type(ctx, ACCESS_EXT);
4624 t0 = tcg_temp_new();
4625 gen_addr_reg_index(ctx, t0);
4626 gen_check_align(ctx, t0, 0x03);
4627 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4631 /* PowerPC 601 specific instructions */
4634 static void gen_abs(DisasContext *ctx)
4636 int l1 = gen_new_label();
4637 int l2 = gen_new_label();
4638 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4639 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4642 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4644 if (unlikely(Rc(ctx->opcode) != 0))
4645 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4649 static void gen_abso(DisasContext *ctx)
4651 int l1 = gen_new_label();
4652 int l2 = gen_new_label();
4653 int l3 = gen_new_label();
4654 /* Start with XER OV disabled, the most likely case */
4655 tcg_gen_movi_tl(cpu_ov, 0);
4656 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4657 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4658 tcg_gen_movi_tl(cpu_ov, 1);
4659 tcg_gen_movi_tl(cpu_so, 1);
4662 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4665 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4667 if (unlikely(Rc(ctx->opcode) != 0))
4668 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4672 static void gen_clcs(DisasContext *ctx)
4674 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4675 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4676 tcg_temp_free_i32(t0);
4677 /* Rc=1 sets CR0 to an undefined state */
4681 static void gen_div(DisasContext *ctx)
4683 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4684 cpu_gpr[rB(ctx->opcode)]);
4685 if (unlikely(Rc(ctx->opcode) != 0))
4686 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4690 static void gen_divo(DisasContext *ctx)
4692 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4693 cpu_gpr[rB(ctx->opcode)]);
4694 if (unlikely(Rc(ctx->opcode) != 0))
4695 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4699 static void gen_divs(DisasContext *ctx)
4701 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4702 cpu_gpr[rB(ctx->opcode)]);
4703 if (unlikely(Rc(ctx->opcode) != 0))
4704 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4707 /* divso - divso. */
4708 static void gen_divso(DisasContext *ctx)
4710 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env,
4711 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4712 if (unlikely(Rc(ctx->opcode) != 0))
4713 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4717 static void gen_doz(DisasContext *ctx)
4719 int l1 = gen_new_label();
4720 int l2 = gen_new_label();
4721 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4722 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4725 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4727 if (unlikely(Rc(ctx->opcode) != 0))
4728 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4732 static void gen_dozo(DisasContext *ctx)
4734 int l1 = gen_new_label();
4735 int l2 = gen_new_label();
4736 TCGv t0 = tcg_temp_new();
4737 TCGv t1 = tcg_temp_new();
4738 TCGv t2 = tcg_temp_new();
4739 /* Start with XER OV disabled, the most likely case */
4740 tcg_gen_movi_tl(cpu_ov, 0);
4741 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4742 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4743 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4744 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4745 tcg_gen_andc_tl(t1, t1, t2);
4746 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4747 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4748 tcg_gen_movi_tl(cpu_ov, 1);
4749 tcg_gen_movi_tl(cpu_so, 1);
4752 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4757 if (unlikely(Rc(ctx->opcode) != 0))
4758 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4762 static void gen_dozi(DisasContext *ctx)
4764 target_long simm = SIMM(ctx->opcode);
4765 int l1 = gen_new_label();
4766 int l2 = gen_new_label();
4767 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4768 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4771 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4773 if (unlikely(Rc(ctx->opcode) != 0))
4774 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4777 /* lscbx - lscbx. */
4778 static void gen_lscbx(DisasContext *ctx)
4780 TCGv t0 = tcg_temp_new();
4781 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4782 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4783 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4785 gen_addr_reg_index(ctx, t0);
4786 /* NIP cannot be restored if the memory exception comes from an helper */
4787 gen_update_nip(ctx, ctx->nip - 4);
4788 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3);
4789 tcg_temp_free_i32(t1);
4790 tcg_temp_free_i32(t2);
4791 tcg_temp_free_i32(t3);
4792 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4793 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4794 if (unlikely(Rc(ctx->opcode) != 0))
4795 gen_set_Rc0(ctx, t0);
4799 /* maskg - maskg. */
4800 static void gen_maskg(DisasContext *ctx)
4802 int l1 = gen_new_label();
4803 TCGv t0 = tcg_temp_new();
4804 TCGv t1 = tcg_temp_new();
4805 TCGv t2 = tcg_temp_new();
4806 TCGv t3 = tcg_temp_new();
4807 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4808 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4809 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4810 tcg_gen_addi_tl(t2, t0, 1);
4811 tcg_gen_shr_tl(t2, t3, t2);
4812 tcg_gen_shr_tl(t3, t3, t1);
4813 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4814 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4815 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4821 if (unlikely(Rc(ctx->opcode) != 0))
4822 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4825 /* maskir - maskir. */
4826 static void gen_maskir(DisasContext *ctx)
4828 TCGv t0 = tcg_temp_new();
4829 TCGv t1 = tcg_temp_new();
4830 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4831 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4832 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4835 if (unlikely(Rc(ctx->opcode) != 0))
4836 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4840 static void gen_mul(DisasContext *ctx)
4842 TCGv_i64 t0 = tcg_temp_new_i64();
4843 TCGv_i64 t1 = tcg_temp_new_i64();
4844 TCGv t2 = tcg_temp_new();
4845 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4846 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4847 tcg_gen_mul_i64(t0, t0, t1);
4848 tcg_gen_trunc_i64_tl(t2, t0);
4849 gen_store_spr(SPR_MQ, t2);
4850 tcg_gen_shri_i64(t1, t0, 32);
4851 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4852 tcg_temp_free_i64(t0);
4853 tcg_temp_free_i64(t1);
4855 if (unlikely(Rc(ctx->opcode) != 0))
4856 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4860 static void gen_mulo(DisasContext *ctx)
4862 int l1 = gen_new_label();
4863 TCGv_i64 t0 = tcg_temp_new_i64();
4864 TCGv_i64 t1 = tcg_temp_new_i64();
4865 TCGv t2 = tcg_temp_new();
4866 /* Start with XER OV disabled, the most likely case */
4867 tcg_gen_movi_tl(cpu_ov, 0);
4868 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4869 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4870 tcg_gen_mul_i64(t0, t0, t1);
4871 tcg_gen_trunc_i64_tl(t2, t0);
4872 gen_store_spr(SPR_MQ, t2);
4873 tcg_gen_shri_i64(t1, t0, 32);
4874 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4875 tcg_gen_ext32s_i64(t1, t0);
4876 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4877 tcg_gen_movi_tl(cpu_ov, 1);
4878 tcg_gen_movi_tl(cpu_so, 1);
4880 tcg_temp_free_i64(t0);
4881 tcg_temp_free_i64(t1);
4883 if (unlikely(Rc(ctx->opcode) != 0))
4884 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4888 static void gen_nabs(DisasContext *ctx)
4890 int l1 = gen_new_label();
4891 int l2 = gen_new_label();
4892 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4893 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4896 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4898 if (unlikely(Rc(ctx->opcode) != 0))
4899 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4902 /* nabso - nabso. */
4903 static void gen_nabso(DisasContext *ctx)
4905 int l1 = gen_new_label();
4906 int l2 = gen_new_label();
4907 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4908 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4911 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4913 /* nabs never overflows */
4914 tcg_gen_movi_tl(cpu_ov, 0);
4915 if (unlikely(Rc(ctx->opcode) != 0))
4916 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4920 static void gen_rlmi(DisasContext *ctx)
4922 uint32_t mb = MB(ctx->opcode);
4923 uint32_t me = ME(ctx->opcode);
4924 TCGv t0 = tcg_temp_new();
4925 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4926 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4927 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4928 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4929 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4931 if (unlikely(Rc(ctx->opcode) != 0))
4932 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4936 static void gen_rrib(DisasContext *ctx)
4938 TCGv t0 = tcg_temp_new();
4939 TCGv t1 = tcg_temp_new();
4940 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4941 tcg_gen_movi_tl(t1, 0x80000000);
4942 tcg_gen_shr_tl(t1, t1, t0);
4943 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4944 tcg_gen_and_tl(t0, t0, t1);
4945 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4946 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4949 if (unlikely(Rc(ctx->opcode) != 0))
4950 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4954 static void gen_sle(DisasContext *ctx)
4956 TCGv t0 = tcg_temp_new();
4957 TCGv t1 = tcg_temp_new();
4958 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4959 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4960 tcg_gen_subfi_tl(t1, 32, t1);
4961 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4962 tcg_gen_or_tl(t1, t0, t1);
4963 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4964 gen_store_spr(SPR_MQ, t1);
4967 if (unlikely(Rc(ctx->opcode) != 0))
4968 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4972 static void gen_sleq(DisasContext *ctx)
4974 TCGv t0 = tcg_temp_new();
4975 TCGv t1 = tcg_temp_new();
4976 TCGv t2 = tcg_temp_new();
4977 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4978 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4979 tcg_gen_shl_tl(t2, t2, t0);
4980 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4981 gen_load_spr(t1, SPR_MQ);
4982 gen_store_spr(SPR_MQ, t0);
4983 tcg_gen_and_tl(t0, t0, t2);
4984 tcg_gen_andc_tl(t1, t1, t2);
4985 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4989 if (unlikely(Rc(ctx->opcode) != 0))
4990 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4994 static void gen_sliq(DisasContext *ctx)
4996 int sh = SH(ctx->opcode);
4997 TCGv t0 = tcg_temp_new();
4998 TCGv t1 = tcg_temp_new();
4999 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5000 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5001 tcg_gen_or_tl(t1, t0, t1);
5002 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5003 gen_store_spr(SPR_MQ, t1);
5006 if (unlikely(Rc(ctx->opcode) != 0))
5007 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5010 /* slliq - slliq. */
5011 static void gen_slliq(DisasContext *ctx)
5013 int sh = SH(ctx->opcode);
5014 TCGv t0 = tcg_temp_new();
5015 TCGv t1 = tcg_temp_new();
5016 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5017 gen_load_spr(t1, SPR_MQ);
5018 gen_store_spr(SPR_MQ, t0);
5019 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
5020 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
5021 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5024 if (unlikely(Rc(ctx->opcode) != 0))
5025 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5029 static void gen_sllq(DisasContext *ctx)
5031 int l1 = gen_new_label();
5032 int l2 = gen_new_label();
5033 TCGv t0 = tcg_temp_local_new();
5034 TCGv t1 = tcg_temp_local_new();
5035 TCGv t2 = tcg_temp_local_new();
5036 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5037 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5038 tcg_gen_shl_tl(t1, t1, t2);
5039 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5040 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5041 gen_load_spr(t0, SPR_MQ);
5042 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5045 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5046 gen_load_spr(t2, SPR_MQ);
5047 tcg_gen_andc_tl(t1, t2, t1);
5048 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5053 if (unlikely(Rc(ctx->opcode) != 0))
5054 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5058 static void gen_slq(DisasContext *ctx)
5060 int l1 = gen_new_label();
5061 TCGv t0 = tcg_temp_new();
5062 TCGv t1 = tcg_temp_new();
5063 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5064 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5065 tcg_gen_subfi_tl(t1, 32, t1);
5066 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5067 tcg_gen_or_tl(t1, t0, t1);
5068 gen_store_spr(SPR_MQ, t1);
5069 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5070 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5071 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
5072 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5076 if (unlikely(Rc(ctx->opcode) != 0))
5077 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5080 /* sraiq - sraiq. */
5081 static void gen_sraiq(DisasContext *ctx)
5083 int sh = SH(ctx->opcode);
5084 int l1 = gen_new_label();
5085 TCGv t0 = tcg_temp_new();
5086 TCGv t1 = tcg_temp_new();
5087 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5088 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5089 tcg_gen_or_tl(t0, t0, t1);
5090 gen_store_spr(SPR_MQ, t0);
5091 tcg_gen_movi_tl(cpu_ca, 0);
5092 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
5093 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
5094 tcg_gen_movi_tl(cpu_ca, 1);
5096 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
5099 if (unlikely(Rc(ctx->opcode) != 0))
5100 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5104 static void gen_sraq(DisasContext *ctx)
5106 int l1 = gen_new_label();
5107 int l2 = gen_new_label();
5108 TCGv t0 = tcg_temp_new();
5109 TCGv t1 = tcg_temp_local_new();
5110 TCGv t2 = tcg_temp_local_new();
5111 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5112 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5113 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
5114 tcg_gen_subfi_tl(t2, 32, t2);
5115 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
5116 tcg_gen_or_tl(t0, t0, t2);
5117 gen_store_spr(SPR_MQ, t0);
5118 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5119 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
5120 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
5121 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
5124 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
5125 tcg_gen_movi_tl(cpu_ca, 0);
5126 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
5127 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
5128 tcg_gen_movi_tl(cpu_ca, 1);
5132 if (unlikely(Rc(ctx->opcode) != 0))
5133 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5137 static void gen_sre(DisasContext *ctx)
5139 TCGv t0 = tcg_temp_new();
5140 TCGv t1 = tcg_temp_new();
5141 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5142 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5143 tcg_gen_subfi_tl(t1, 32, t1);
5144 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5145 tcg_gen_or_tl(t1, t0, t1);
5146 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5147 gen_store_spr(SPR_MQ, t1);
5150 if (unlikely(Rc(ctx->opcode) != 0))
5151 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5155 static void gen_srea(DisasContext *ctx)
5157 TCGv t0 = tcg_temp_new();
5158 TCGv t1 = tcg_temp_new();
5159 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5160 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5161 gen_store_spr(SPR_MQ, t0);
5162 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
5165 if (unlikely(Rc(ctx->opcode) != 0))
5166 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5170 static void gen_sreq(DisasContext *ctx)
5172 TCGv t0 = tcg_temp_new();
5173 TCGv t1 = tcg_temp_new();
5174 TCGv t2 = tcg_temp_new();
5175 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5176 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5177 tcg_gen_shr_tl(t1, t1, t0);
5178 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5179 gen_load_spr(t2, SPR_MQ);
5180 gen_store_spr(SPR_MQ, t0);
5181 tcg_gen_and_tl(t0, t0, t1);
5182 tcg_gen_andc_tl(t2, t2, t1);
5183 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5187 if (unlikely(Rc(ctx->opcode) != 0))
5188 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5192 static void gen_sriq(DisasContext *ctx)
5194 int sh = SH(ctx->opcode);
5195 TCGv t0 = tcg_temp_new();
5196 TCGv t1 = tcg_temp_new();
5197 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5198 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5199 tcg_gen_or_tl(t1, t0, t1);
5200 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5201 gen_store_spr(SPR_MQ, t1);
5204 if (unlikely(Rc(ctx->opcode) != 0))
5205 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5209 static void gen_srliq(DisasContext *ctx)
5211 int sh = SH(ctx->opcode);
5212 TCGv t0 = tcg_temp_new();
5213 TCGv t1 = tcg_temp_new();
5214 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5215 gen_load_spr(t1, SPR_MQ);
5216 gen_store_spr(SPR_MQ, t0);
5217 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
5218 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
5219 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5222 if (unlikely(Rc(ctx->opcode) != 0))
5223 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5227 static void gen_srlq(DisasContext *ctx)
5229 int l1 = gen_new_label();
5230 int l2 = gen_new_label();
5231 TCGv t0 = tcg_temp_local_new();
5232 TCGv t1 = tcg_temp_local_new();
5233 TCGv t2 = tcg_temp_local_new();
5234 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5235 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5236 tcg_gen_shr_tl(t2, t1, t2);
5237 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5238 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5239 gen_load_spr(t0, SPR_MQ);
5240 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5243 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5244 tcg_gen_and_tl(t0, t0, t2);
5245 gen_load_spr(t1, SPR_MQ);
5246 tcg_gen_andc_tl(t1, t1, t2);
5247 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5252 if (unlikely(Rc(ctx->opcode) != 0))
5253 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5257 static void gen_srq(DisasContext *ctx)
5259 int l1 = gen_new_label();
5260 TCGv t0 = tcg_temp_new();
5261 TCGv t1 = tcg_temp_new();
5262 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5263 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5264 tcg_gen_subfi_tl(t1, 32, t1);
5265 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5266 tcg_gen_or_tl(t1, t0, t1);
5267 gen_store_spr(SPR_MQ, t1);
5268 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5269 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5270 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5271 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5275 if (unlikely(Rc(ctx->opcode) != 0))
5276 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5279 /* PowerPC 602 specific instructions */
5282 static void gen_dsa(DisasContext *ctx)
5285 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5289 static void gen_esa(DisasContext *ctx)
5292 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5296 static void gen_mfrom(DisasContext *ctx)
5298 #if defined(CONFIG_USER_ONLY)
5299 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5301 if (unlikely(!ctx->mem_idx)) {
5302 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5305 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5309 /* 602 - 603 - G2 TLB management */
5312 static void gen_tlbld_6xx(DisasContext *ctx)
5314 #if defined(CONFIG_USER_ONLY)
5315 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5317 if (unlikely(!ctx->mem_idx)) {
5318 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5321 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5326 static void gen_tlbli_6xx(DisasContext *ctx)
5328 #if defined(CONFIG_USER_ONLY)
5329 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5331 if (unlikely(!ctx->mem_idx)) {
5332 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5335 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5339 /* 74xx TLB management */
5342 static void gen_tlbld_74xx(DisasContext *ctx)
5344 #if defined(CONFIG_USER_ONLY)
5345 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5347 if (unlikely(!ctx->mem_idx)) {
5348 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5351 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5356 static void gen_tlbli_74xx(DisasContext *ctx)
5358 #if defined(CONFIG_USER_ONLY)
5359 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5361 if (unlikely(!ctx->mem_idx)) {
5362 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5365 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5369 /* POWER instructions not in PowerPC 601 */
5372 static void gen_clf(DisasContext *ctx)
5374 /* Cache line flush: implemented as no-op */
5378 static void gen_cli(DisasContext *ctx)
5380 /* Cache line invalidate: privileged and treated as no-op */
5381 #if defined(CONFIG_USER_ONLY)
5382 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5384 if (unlikely(!ctx->mem_idx)) {
5385 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5392 static void gen_dclst(DisasContext *ctx)
5394 /* Data cache line store: treated as no-op */
5397 static void gen_mfsri(DisasContext *ctx)
5399 #if defined(CONFIG_USER_ONLY)
5400 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5402 int ra = rA(ctx->opcode);
5403 int rd = rD(ctx->opcode);
5405 if (unlikely(!ctx->mem_idx)) {
5406 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5409 t0 = tcg_temp_new();
5410 gen_addr_reg_index(ctx, t0);
5411 tcg_gen_shri_tl(t0, t0, 28);
5412 tcg_gen_andi_tl(t0, t0, 0xF);
5413 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0);
5415 if (ra != 0 && ra != rd)
5416 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5420 static void gen_rac(DisasContext *ctx)
5422 #if defined(CONFIG_USER_ONLY)
5423 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5426 if (unlikely(!ctx->mem_idx)) {
5427 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5430 t0 = tcg_temp_new();
5431 gen_addr_reg_index(ctx, t0);
5432 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5437 static void gen_rfsvc(DisasContext *ctx)
5439 #if defined(CONFIG_USER_ONLY)
5440 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5442 if (unlikely(!ctx->mem_idx)) {
5443 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5446 gen_helper_rfsvc(cpu_env);
5447 gen_sync_exception(ctx);
5451 /* svc is not implemented for now */
5453 /* POWER2 specific instructions */
5454 /* Quad manipulation (load/store two floats at a time) */
5457 static void gen_lfq(DisasContext *ctx)
5459 int rd = rD(ctx->opcode);
5461 gen_set_access_type(ctx, ACCESS_FLOAT);
5462 t0 = tcg_temp_new();
5463 gen_addr_imm_index(ctx, t0, 0);
5464 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5465 gen_addr_add(ctx, t0, t0, 8);
5466 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5471 static void gen_lfqu(DisasContext *ctx)
5473 int ra = rA(ctx->opcode);
5474 int rd = rD(ctx->opcode);
5476 gen_set_access_type(ctx, ACCESS_FLOAT);
5477 t0 = tcg_temp_new();
5478 t1 = tcg_temp_new();
5479 gen_addr_imm_index(ctx, t0, 0);
5480 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5481 gen_addr_add(ctx, t1, t0, 8);
5482 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5484 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5490 static void gen_lfqux(DisasContext *ctx)
5492 int ra = rA(ctx->opcode);
5493 int rd = rD(ctx->opcode);
5494 gen_set_access_type(ctx, ACCESS_FLOAT);
5496 t0 = tcg_temp_new();
5497 gen_addr_reg_index(ctx, t0);
5498 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5499 t1 = tcg_temp_new();
5500 gen_addr_add(ctx, t1, t0, 8);
5501 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5504 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5509 static void gen_lfqx(DisasContext *ctx)
5511 int rd = rD(ctx->opcode);
5513 gen_set_access_type(ctx, ACCESS_FLOAT);
5514 t0 = tcg_temp_new();
5515 gen_addr_reg_index(ctx, t0);
5516 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5517 gen_addr_add(ctx, t0, t0, 8);
5518 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5523 static void gen_stfq(DisasContext *ctx)
5525 int rd = rD(ctx->opcode);
5527 gen_set_access_type(ctx, ACCESS_FLOAT);
5528 t0 = tcg_temp_new();
5529 gen_addr_imm_index(ctx, t0, 0);
5530 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5531 gen_addr_add(ctx, t0, t0, 8);
5532 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5537 static void gen_stfqu(DisasContext *ctx)
5539 int ra = rA(ctx->opcode);
5540 int rd = rD(ctx->opcode);
5542 gen_set_access_type(ctx, ACCESS_FLOAT);
5543 t0 = tcg_temp_new();
5544 gen_addr_imm_index(ctx, t0, 0);
5545 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5546 t1 = tcg_temp_new();
5547 gen_addr_add(ctx, t1, t0, 8);
5548 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5551 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5556 static void gen_stfqux(DisasContext *ctx)
5558 int ra = rA(ctx->opcode);
5559 int rd = rD(ctx->opcode);
5561 gen_set_access_type(ctx, ACCESS_FLOAT);
5562 t0 = tcg_temp_new();
5563 gen_addr_reg_index(ctx, t0);
5564 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5565 t1 = tcg_temp_new();
5566 gen_addr_add(ctx, t1, t0, 8);
5567 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5570 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5575 static void gen_stfqx(DisasContext *ctx)
5577 int rd = rD(ctx->opcode);
5579 gen_set_access_type(ctx, ACCESS_FLOAT);
5580 t0 = tcg_temp_new();
5581 gen_addr_reg_index(ctx, t0);
5582 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5583 gen_addr_add(ctx, t0, t0, 8);
5584 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5588 /* BookE specific instructions */
5590 /* XXX: not implemented on 440 ? */
5591 static void gen_mfapidi(DisasContext *ctx)
5594 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5597 /* XXX: not implemented on 440 ? */
5598 static void gen_tlbiva(DisasContext *ctx)
5600 #if defined(CONFIG_USER_ONLY)
5601 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5604 if (unlikely(!ctx->mem_idx)) {
5605 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5608 t0 = tcg_temp_new();
5609 gen_addr_reg_index(ctx, t0);
5610 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5615 /* All 405 MAC instructions are translated here */
5616 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5617 int ra, int rb, int rt, int Rc)
5621 t0 = tcg_temp_local_new();
5622 t1 = tcg_temp_local_new();
5624 switch (opc3 & 0x0D) {
5626 /* macchw - macchw. - macchwo - macchwo. */
5627 /* macchws - macchws. - macchwso - macchwso. */
5628 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5629 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5630 /* mulchw - mulchw. */
5631 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5632 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5633 tcg_gen_ext16s_tl(t1, t1);
5636 /* macchwu - macchwu. - macchwuo - macchwuo. */
5637 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5638 /* mulchwu - mulchwu. */
5639 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5640 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5641 tcg_gen_ext16u_tl(t1, t1);
5644 /* machhw - machhw. - machhwo - machhwo. */
5645 /* machhws - machhws. - machhwso - machhwso. */
5646 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5647 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5648 /* mulhhw - mulhhw. */
5649 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5650 tcg_gen_ext16s_tl(t0, t0);
5651 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5652 tcg_gen_ext16s_tl(t1, t1);
5655 /* machhwu - machhwu. - machhwuo - machhwuo. */
5656 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5657 /* mulhhwu - mulhhwu. */
5658 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5659 tcg_gen_ext16u_tl(t0, t0);
5660 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5661 tcg_gen_ext16u_tl(t1, t1);
5664 /* maclhw - maclhw. - maclhwo - maclhwo. */
5665 /* maclhws - maclhws. - maclhwso - maclhwso. */
5666 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5667 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5668 /* mullhw - mullhw. */
5669 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5670 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5673 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5674 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5675 /* mullhwu - mullhwu. */
5676 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5677 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5681 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5682 tcg_gen_mul_tl(t1, t0, t1);
5684 /* nmultiply-and-accumulate (0x0E) */
5685 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5687 /* multiply-and-accumulate (0x0C) */
5688 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5692 /* Check overflow and/or saturate */
5693 int l1 = gen_new_label();
5696 /* Start with XER OV disabled, the most likely case */
5697 tcg_gen_movi_tl(cpu_ov, 0);
5701 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5702 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5703 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5704 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5707 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5708 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5712 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5715 tcg_gen_movi_tl(t0, UINT32_MAX);
5719 /* Check overflow */
5720 tcg_gen_movi_tl(cpu_ov, 1);
5721 tcg_gen_movi_tl(cpu_so, 1);
5724 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5727 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5731 if (unlikely(Rc) != 0) {
5733 gen_set_Rc0(ctx, cpu_gpr[rt]);
5737 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5738 static void glue(gen_, name)(DisasContext *ctx) \
5740 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5741 rD(ctx->opcode), Rc(ctx->opcode)); \
5744 /* macchw - macchw. */
5745 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5746 /* macchwo - macchwo. */
5747 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5748 /* macchws - macchws. */
5749 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5750 /* macchwso - macchwso. */
5751 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5752 /* macchwsu - macchwsu. */
5753 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5754 /* macchwsuo - macchwsuo. */
5755 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5756 /* macchwu - macchwu. */
5757 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5758 /* macchwuo - macchwuo. */
5759 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5760 /* machhw - machhw. */
5761 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5762 /* machhwo - machhwo. */
5763 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5764 /* machhws - machhws. */
5765 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5766 /* machhwso - machhwso. */
5767 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5768 /* machhwsu - machhwsu. */
5769 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5770 /* machhwsuo - machhwsuo. */
5771 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5772 /* machhwu - machhwu. */
5773 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5774 /* machhwuo - machhwuo. */
5775 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5776 /* maclhw - maclhw. */
5777 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5778 /* maclhwo - maclhwo. */
5779 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5780 /* maclhws - maclhws. */
5781 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5782 /* maclhwso - maclhwso. */
5783 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5784 /* maclhwu - maclhwu. */
5785 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5786 /* maclhwuo - maclhwuo. */
5787 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5788 /* maclhwsu - maclhwsu. */
5789 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5790 /* maclhwsuo - maclhwsuo. */
5791 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5792 /* nmacchw - nmacchw. */
5793 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5794 /* nmacchwo - nmacchwo. */
5795 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5796 /* nmacchws - nmacchws. */
5797 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5798 /* nmacchwso - nmacchwso. */
5799 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5800 /* nmachhw - nmachhw. */
5801 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5802 /* nmachhwo - nmachhwo. */
5803 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5804 /* nmachhws - nmachhws. */
5805 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5806 /* nmachhwso - nmachhwso. */
5807 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5808 /* nmaclhw - nmaclhw. */
5809 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5810 /* nmaclhwo - nmaclhwo. */
5811 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5812 /* nmaclhws - nmaclhws. */
5813 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5814 /* nmaclhwso - nmaclhwso. */
5815 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5817 /* mulchw - mulchw. */
5818 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5819 /* mulchwu - mulchwu. */
5820 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5821 /* mulhhw - mulhhw. */
5822 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5823 /* mulhhwu - mulhhwu. */
5824 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5825 /* mullhw - mullhw. */
5826 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5827 /* mullhwu - mullhwu. */
5828 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5831 static void gen_mfdcr(DisasContext *ctx)
5833 #if defined(CONFIG_USER_ONLY)
5834 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5837 if (unlikely(!ctx->mem_idx)) {
5838 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5841 /* NIP cannot be restored if the memory exception comes from an helper */
5842 gen_update_nip(ctx, ctx->nip - 4);
5843 dcrn = tcg_const_tl(SPR(ctx->opcode));
5844 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5845 tcg_temp_free(dcrn);
5850 static void gen_mtdcr(DisasContext *ctx)
5852 #if defined(CONFIG_USER_ONLY)
5853 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5856 if (unlikely(!ctx->mem_idx)) {
5857 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5860 /* NIP cannot be restored if the memory exception comes from an helper */
5861 gen_update_nip(ctx, ctx->nip - 4);
5862 dcrn = tcg_const_tl(SPR(ctx->opcode));
5863 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5864 tcg_temp_free(dcrn);
5869 /* XXX: not implemented on 440 ? */
5870 static void gen_mfdcrx(DisasContext *ctx)
5872 #if defined(CONFIG_USER_ONLY)
5873 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5875 if (unlikely(!ctx->mem_idx)) {
5876 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5879 /* NIP cannot be restored if the memory exception comes from an helper */
5880 gen_update_nip(ctx, ctx->nip - 4);
5881 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5882 cpu_gpr[rA(ctx->opcode)]);
5883 /* Note: Rc update flag set leads to undefined state of Rc0 */
5888 /* XXX: not implemented on 440 ? */
5889 static void gen_mtdcrx(DisasContext *ctx)
5891 #if defined(CONFIG_USER_ONLY)
5892 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5894 if (unlikely(!ctx->mem_idx)) {
5895 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5898 /* NIP cannot be restored if the memory exception comes from an helper */
5899 gen_update_nip(ctx, ctx->nip - 4);
5900 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5901 cpu_gpr[rS(ctx->opcode)]);
5902 /* Note: Rc update flag set leads to undefined state of Rc0 */
5906 /* mfdcrux (PPC 460) : user-mode access to DCR */
5907 static void gen_mfdcrux(DisasContext *ctx)
5909 /* NIP cannot be restored if the memory exception comes from an helper */
5910 gen_update_nip(ctx, ctx->nip - 4);
5911 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5912 cpu_gpr[rA(ctx->opcode)]);
5913 /* Note: Rc update flag set leads to undefined state of Rc0 */
5916 /* mtdcrux (PPC 460) : user-mode access to DCR */
5917 static void gen_mtdcrux(DisasContext *ctx)
5919 /* NIP cannot be restored if the memory exception comes from an helper */
5920 gen_update_nip(ctx, ctx->nip - 4);
5921 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5922 cpu_gpr[rS(ctx->opcode)]);
5923 /* Note: Rc update flag set leads to undefined state of Rc0 */
5927 static void gen_dccci(DisasContext *ctx)
5929 #if defined(CONFIG_USER_ONLY)
5930 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5932 if (unlikely(!ctx->mem_idx)) {
5933 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5936 /* interpreted as no-op */
5941 static void gen_dcread(DisasContext *ctx)
5943 #if defined(CONFIG_USER_ONLY)
5944 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5947 if (unlikely(!ctx->mem_idx)) {
5948 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5951 gen_set_access_type(ctx, ACCESS_CACHE);
5952 EA = tcg_temp_new();
5953 gen_addr_reg_index(ctx, EA);
5954 val = tcg_temp_new();
5955 gen_qemu_ld32u(ctx, val, EA);
5957 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5963 static void gen_icbt_40x(DisasContext *ctx)
5965 /* interpreted as no-op */
5966 /* XXX: specification say this is treated as a load by the MMU
5967 * but does not generate any exception
5972 static void gen_iccci(DisasContext *ctx)
5974 #if defined(CONFIG_USER_ONLY)
5975 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5977 if (unlikely(!ctx->mem_idx)) {
5978 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5981 /* interpreted as no-op */
5986 static void gen_icread(DisasContext *ctx)
5988 #if defined(CONFIG_USER_ONLY)
5989 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5991 if (unlikely(!ctx->mem_idx)) {
5992 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5995 /* interpreted as no-op */
5999 /* rfci (mem_idx only) */
6000 static void gen_rfci_40x(DisasContext *ctx)
6002 #if defined(CONFIG_USER_ONLY)
6003 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6005 if (unlikely(!ctx->mem_idx)) {
6006 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6009 /* Restore CPU state */
6010 gen_helper_40x_rfci(cpu_env);
6011 gen_sync_exception(ctx);
6015 static void gen_rfci(DisasContext *ctx)
6017 #if defined(CONFIG_USER_ONLY)
6018 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6020 if (unlikely(!ctx->mem_idx)) {
6021 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6024 /* Restore CPU state */
6025 gen_helper_rfci(cpu_env);
6026 gen_sync_exception(ctx);
6030 /* BookE specific */
6032 /* XXX: not implemented on 440 ? */
6033 static void gen_rfdi(DisasContext *ctx)
6035 #if defined(CONFIG_USER_ONLY)
6036 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6038 if (unlikely(!ctx->mem_idx)) {
6039 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6042 /* Restore CPU state */
6043 gen_helper_rfdi(cpu_env);
6044 gen_sync_exception(ctx);
6048 /* XXX: not implemented on 440 ? */
6049 static void gen_rfmci(DisasContext *ctx)
6051 #if defined(CONFIG_USER_ONLY)
6052 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6054 if (unlikely(!ctx->mem_idx)) {
6055 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6058 /* Restore CPU state */
6059 gen_helper_rfmci(cpu_env);
6060 gen_sync_exception(ctx);
6064 /* TLB management - PowerPC 405 implementation */
6067 static void gen_tlbre_40x(DisasContext *ctx)
6069 #if defined(CONFIG_USER_ONLY)
6070 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6072 if (unlikely(!ctx->mem_idx)) {
6073 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6076 switch (rB(ctx->opcode)) {
6078 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
6079 cpu_gpr[rA(ctx->opcode)]);
6082 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
6083 cpu_gpr[rA(ctx->opcode)]);
6086 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6092 /* tlbsx - tlbsx. */
6093 static void gen_tlbsx_40x(DisasContext *ctx)
6095 #if defined(CONFIG_USER_ONLY)
6096 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6099 if (unlikely(!ctx->mem_idx)) {
6100 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6103 t0 = tcg_temp_new();
6104 gen_addr_reg_index(ctx, t0);
6105 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6107 if (Rc(ctx->opcode)) {
6108 int l1 = gen_new_label();
6109 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6110 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6111 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6118 static void gen_tlbwe_40x(DisasContext *ctx)
6120 #if defined(CONFIG_USER_ONLY)
6121 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6123 if (unlikely(!ctx->mem_idx)) {
6124 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6127 switch (rB(ctx->opcode)) {
6129 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
6130 cpu_gpr[rS(ctx->opcode)]);
6133 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
6134 cpu_gpr[rS(ctx->opcode)]);
6137 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6143 /* TLB management - PowerPC 440 implementation */
6146 static void gen_tlbre_440(DisasContext *ctx)
6148 #if defined(CONFIG_USER_ONLY)
6149 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6151 if (unlikely(!ctx->mem_idx)) {
6152 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6155 switch (rB(ctx->opcode)) {
6160 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6161 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
6162 t0, cpu_gpr[rA(ctx->opcode)]);
6163 tcg_temp_free_i32(t0);
6167 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6173 /* tlbsx - tlbsx. */
6174 static void gen_tlbsx_440(DisasContext *ctx)
6176 #if defined(CONFIG_USER_ONLY)
6177 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6180 if (unlikely(!ctx->mem_idx)) {
6181 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6184 t0 = tcg_temp_new();
6185 gen_addr_reg_index(ctx, t0);
6186 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6188 if (Rc(ctx->opcode)) {
6189 int l1 = gen_new_label();
6190 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6191 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6192 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6199 static void gen_tlbwe_440(DisasContext *ctx)
6201 #if defined(CONFIG_USER_ONLY)
6202 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6204 if (unlikely(!ctx->mem_idx)) {
6205 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6208 switch (rB(ctx->opcode)) {
6213 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6214 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
6215 cpu_gpr[rS(ctx->opcode)]);
6216 tcg_temp_free_i32(t0);
6220 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6226 /* TLB management - PowerPC BookE 2.06 implementation */
6229 static void gen_tlbre_booke206(DisasContext *ctx)
6231 #if defined(CONFIG_USER_ONLY)
6232 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6234 if (unlikely(!ctx->mem_idx)) {
6235 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6239 gen_helper_booke206_tlbre(cpu_env);
6243 /* tlbsx - tlbsx. */
6244 static void gen_tlbsx_booke206(DisasContext *ctx)
6246 #if defined(CONFIG_USER_ONLY)
6247 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6250 if (unlikely(!ctx->mem_idx)) {
6251 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6255 if (rA(ctx->opcode)) {
6256 t0 = tcg_temp_new();
6257 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6259 t0 = tcg_const_tl(0);
6262 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6263 gen_helper_booke206_tlbsx(cpu_env, t0);
6268 static void gen_tlbwe_booke206(DisasContext *ctx)
6270 #if defined(CONFIG_USER_ONLY)
6271 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6273 if (unlikely(!ctx->mem_idx)) {
6274 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6277 gen_update_nip(ctx, ctx->nip - 4);
6278 gen_helper_booke206_tlbwe(cpu_env);
6282 static void gen_tlbivax_booke206(DisasContext *ctx)
6284 #if defined(CONFIG_USER_ONLY)
6285 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6288 if (unlikely(!ctx->mem_idx)) {
6289 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6293 t0 = tcg_temp_new();
6294 gen_addr_reg_index(ctx, t0);
6296 gen_helper_booke206_tlbivax(cpu_env, t0);
6300 static void gen_tlbilx_booke206(DisasContext *ctx)
6302 #if defined(CONFIG_USER_ONLY)
6303 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6306 if (unlikely(!ctx->mem_idx)) {
6307 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6311 t0 = tcg_temp_new();
6312 gen_addr_reg_index(ctx, t0);
6314 switch((ctx->opcode >> 21) & 0x3) {
6316 gen_helper_booke206_tlbilx0(cpu_env, t0);
6319 gen_helper_booke206_tlbilx1(cpu_env, t0);
6322 gen_helper_booke206_tlbilx3(cpu_env, t0);
6325 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6335 static void gen_wrtee(DisasContext *ctx)
6337 #if defined(CONFIG_USER_ONLY)
6338 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6341 if (unlikely(!ctx->mem_idx)) {
6342 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6345 t0 = tcg_temp_new();
6346 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6347 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6348 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6350 /* Stop translation to have a chance to raise an exception
6351 * if we just set msr_ee to 1
6353 gen_stop_exception(ctx);
6358 static void gen_wrteei(DisasContext *ctx)
6360 #if defined(CONFIG_USER_ONLY)
6361 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6363 if (unlikely(!ctx->mem_idx)) {
6364 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6367 if (ctx->opcode & 0x00008000) {
6368 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6369 /* Stop translation to have a chance to raise an exception */
6370 gen_stop_exception(ctx);
6372 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6377 /* PowerPC 440 specific instructions */
6380 static void gen_dlmzb(DisasContext *ctx)
6382 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6383 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6384 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6385 tcg_temp_free_i32(t0);
6388 /* mbar replaces eieio on 440 */
6389 static void gen_mbar(DisasContext *ctx)
6391 /* interpreted as no-op */
6394 /* msync replaces sync on 440 */
6395 static void gen_msync_4xx(DisasContext *ctx)
6397 /* interpreted as no-op */
6401 static void gen_icbt_440(DisasContext *ctx)
6403 /* interpreted as no-op */
6404 /* XXX: specification say this is treated as a load by the MMU
6405 * but does not generate any exception
6409 /* Embedded.Processor Control */
6411 static void gen_msgclr(DisasContext *ctx)
6413 #if defined(CONFIG_USER_ONLY)
6414 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6416 if (unlikely(ctx->mem_idx == 0)) {
6417 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6421 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6425 static void gen_msgsnd(DisasContext *ctx)
6427 #if defined(CONFIG_USER_ONLY)
6428 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6430 if (unlikely(ctx->mem_idx == 0)) {
6431 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6435 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
6439 /*** Altivec vector extension ***/
6440 /* Altivec registers moves */
6442 static inline TCGv_ptr gen_avr_ptr(int reg)
6444 TCGv_ptr r = tcg_temp_new_ptr();
6445 tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
6449 #define GEN_VR_LDX(name, opc2, opc3) \
6450 static void glue(gen_, name)(DisasContext *ctx) \
6453 if (unlikely(!ctx->altivec_enabled)) { \
6454 gen_exception(ctx, POWERPC_EXCP_VPU); \
6457 gen_set_access_type(ctx, ACCESS_INT); \
6458 EA = tcg_temp_new(); \
6459 gen_addr_reg_index(ctx, EA); \
6460 tcg_gen_andi_tl(EA, EA, ~0xf); \
6461 if (ctx->le_mode) { \
6462 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6463 tcg_gen_addi_tl(EA, EA, 8); \
6464 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6466 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6467 tcg_gen_addi_tl(EA, EA, 8); \
6468 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6470 tcg_temp_free(EA); \
6473 #define GEN_VR_STX(name, opc2, opc3) \
6474 static void gen_st##name(DisasContext *ctx) \
6477 if (unlikely(!ctx->altivec_enabled)) { \
6478 gen_exception(ctx, POWERPC_EXCP_VPU); \
6481 gen_set_access_type(ctx, ACCESS_INT); \
6482 EA = tcg_temp_new(); \
6483 gen_addr_reg_index(ctx, EA); \
6484 tcg_gen_andi_tl(EA, EA, ~0xf); \
6485 if (ctx->le_mode) { \
6486 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6487 tcg_gen_addi_tl(EA, EA, 8); \
6488 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6490 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6491 tcg_gen_addi_tl(EA, EA, 8); \
6492 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6494 tcg_temp_free(EA); \
6497 #define GEN_VR_LVE(name, opc2, opc3) \
6498 static void gen_lve##name(DisasContext *ctx) \
6502 if (unlikely(!ctx->altivec_enabled)) { \
6503 gen_exception(ctx, POWERPC_EXCP_VPU); \
6506 gen_set_access_type(ctx, ACCESS_INT); \
6507 EA = tcg_temp_new(); \
6508 gen_addr_reg_index(ctx, EA); \
6509 rs = gen_avr_ptr(rS(ctx->opcode)); \
6510 gen_helper_lve##name(cpu_env, rs, EA); \
6511 tcg_temp_free(EA); \
6512 tcg_temp_free_ptr(rs); \
6515 #define GEN_VR_STVE(name, opc2, opc3) \
6516 static void gen_stve##name(DisasContext *ctx) \
6520 if (unlikely(!ctx->altivec_enabled)) { \
6521 gen_exception(ctx, POWERPC_EXCP_VPU); \
6524 gen_set_access_type(ctx, ACCESS_INT); \
6525 EA = tcg_temp_new(); \
6526 gen_addr_reg_index(ctx, EA); \
6527 rs = gen_avr_ptr(rS(ctx->opcode)); \
6528 gen_helper_stve##name(cpu_env, rs, EA); \
6529 tcg_temp_free(EA); \
6530 tcg_temp_free_ptr(rs); \
6533 GEN_VR_LDX(lvx, 0x07, 0x03);
6534 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6535 GEN_VR_LDX(lvxl, 0x07, 0x0B);
6537 GEN_VR_LVE(bx, 0x07, 0x00);
6538 GEN_VR_LVE(hx, 0x07, 0x01);
6539 GEN_VR_LVE(wx, 0x07, 0x02);
6541 GEN_VR_STX(svx, 0x07, 0x07);
6542 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6543 GEN_VR_STX(svxl, 0x07, 0x0F);
6545 GEN_VR_STVE(bx, 0x07, 0x04);
6546 GEN_VR_STVE(hx, 0x07, 0x05);
6547 GEN_VR_STVE(wx, 0x07, 0x06);
6549 static void gen_lvsl(DisasContext *ctx)
6553 if (unlikely(!ctx->altivec_enabled)) {
6554 gen_exception(ctx, POWERPC_EXCP_VPU);
6557 EA = tcg_temp_new();
6558 gen_addr_reg_index(ctx, EA);
6559 rd = gen_avr_ptr(rD(ctx->opcode));
6560 gen_helper_lvsl(rd, EA);
6562 tcg_temp_free_ptr(rd);
6565 static void gen_lvsr(DisasContext *ctx)
6569 if (unlikely(!ctx->altivec_enabled)) {
6570 gen_exception(ctx, POWERPC_EXCP_VPU);
6573 EA = tcg_temp_new();
6574 gen_addr_reg_index(ctx, EA);
6575 rd = gen_avr_ptr(rD(ctx->opcode));
6576 gen_helper_lvsr(rd, EA);
6578 tcg_temp_free_ptr(rd);
6581 static void gen_mfvscr(DisasContext *ctx)
6584 if (unlikely(!ctx->altivec_enabled)) {
6585 gen_exception(ctx, POWERPC_EXCP_VPU);
6588 tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
6589 t = tcg_temp_new_i32();
6590 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, vscr));
6591 tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
6592 tcg_temp_free_i32(t);
6595 static void gen_mtvscr(DisasContext *ctx)
6598 if (unlikely(!ctx->altivec_enabled)) {
6599 gen_exception(ctx, POWERPC_EXCP_VPU);
6602 p = gen_avr_ptr(rD(ctx->opcode));
6603 gen_helper_mtvscr(cpu_env, p);
6604 tcg_temp_free_ptr(p);
6607 /* Logical operations */
6608 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
6609 static void glue(gen_, name)(DisasContext *ctx) \
6611 if (unlikely(!ctx->altivec_enabled)) { \
6612 gen_exception(ctx, POWERPC_EXCP_VPU); \
6615 tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
6616 tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
6619 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
6620 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
6621 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
6622 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
6623 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
6625 #define GEN_VXFORM(name, opc2, opc3) \
6626 static void glue(gen_, name)(DisasContext *ctx) \
6628 TCGv_ptr ra, rb, rd; \
6629 if (unlikely(!ctx->altivec_enabled)) { \
6630 gen_exception(ctx, POWERPC_EXCP_VPU); \
6633 ra = gen_avr_ptr(rA(ctx->opcode)); \
6634 rb = gen_avr_ptr(rB(ctx->opcode)); \
6635 rd = gen_avr_ptr(rD(ctx->opcode)); \
6636 gen_helper_##name (rd, ra, rb); \
6637 tcg_temp_free_ptr(ra); \
6638 tcg_temp_free_ptr(rb); \
6639 tcg_temp_free_ptr(rd); \
6642 #define GEN_VXFORM_ENV(name, opc2, opc3) \
6643 static void glue(gen_, name)(DisasContext *ctx) \
6645 TCGv_ptr ra, rb, rd; \
6646 if (unlikely(!ctx->altivec_enabled)) { \
6647 gen_exception(ctx, POWERPC_EXCP_VPU); \
6650 ra = gen_avr_ptr(rA(ctx->opcode)); \
6651 rb = gen_avr_ptr(rB(ctx->opcode)); \
6652 rd = gen_avr_ptr(rD(ctx->opcode)); \
6653 gen_helper_##name(cpu_env, rd, ra, rb); \
6654 tcg_temp_free_ptr(ra); \
6655 tcg_temp_free_ptr(rb); \
6656 tcg_temp_free_ptr(rd); \
6659 GEN_VXFORM(vaddubm, 0, 0);
6660 GEN_VXFORM(vadduhm, 0, 1);
6661 GEN_VXFORM(vadduwm, 0, 2);
6662 GEN_VXFORM(vsububm, 0, 16);
6663 GEN_VXFORM(vsubuhm, 0, 17);
6664 GEN_VXFORM(vsubuwm, 0, 18);
6665 GEN_VXFORM(vmaxub, 1, 0);
6666 GEN_VXFORM(vmaxuh, 1, 1);
6667 GEN_VXFORM(vmaxuw, 1, 2);
6668 GEN_VXFORM(vmaxsb, 1, 4);
6669 GEN_VXFORM(vmaxsh, 1, 5);
6670 GEN_VXFORM(vmaxsw, 1, 6);
6671 GEN_VXFORM(vminub, 1, 8);
6672 GEN_VXFORM(vminuh, 1, 9);
6673 GEN_VXFORM(vminuw, 1, 10);
6674 GEN_VXFORM(vminsb, 1, 12);
6675 GEN_VXFORM(vminsh, 1, 13);
6676 GEN_VXFORM(vminsw, 1, 14);
6677 GEN_VXFORM(vavgub, 1, 16);
6678 GEN_VXFORM(vavguh, 1, 17);
6679 GEN_VXFORM(vavguw, 1, 18);
6680 GEN_VXFORM(vavgsb, 1, 20);
6681 GEN_VXFORM(vavgsh, 1, 21);
6682 GEN_VXFORM(vavgsw, 1, 22);
6683 GEN_VXFORM(vmrghb, 6, 0);
6684 GEN_VXFORM(vmrghh, 6, 1);
6685 GEN_VXFORM(vmrghw, 6, 2);
6686 GEN_VXFORM(vmrglb, 6, 4);
6687 GEN_VXFORM(vmrglh, 6, 5);
6688 GEN_VXFORM(vmrglw, 6, 6);
6689 GEN_VXFORM(vmuloub, 4, 0);
6690 GEN_VXFORM(vmulouh, 4, 1);
6691 GEN_VXFORM(vmulosb, 4, 4);
6692 GEN_VXFORM(vmulosh, 4, 5);
6693 GEN_VXFORM(vmuleub, 4, 8);
6694 GEN_VXFORM(vmuleuh, 4, 9);
6695 GEN_VXFORM(vmulesb, 4, 12);
6696 GEN_VXFORM(vmulesh, 4, 13);
6697 GEN_VXFORM(vslb, 2, 4);
6698 GEN_VXFORM(vslh, 2, 5);
6699 GEN_VXFORM(vslw, 2, 6);
6700 GEN_VXFORM(vsrb, 2, 8);
6701 GEN_VXFORM(vsrh, 2, 9);
6702 GEN_VXFORM(vsrw, 2, 10);
6703 GEN_VXFORM(vsrab, 2, 12);
6704 GEN_VXFORM(vsrah, 2, 13);
6705 GEN_VXFORM(vsraw, 2, 14);
6706 GEN_VXFORM(vslo, 6, 16);
6707 GEN_VXFORM(vsro, 6, 17);
6708 GEN_VXFORM(vaddcuw, 0, 6);
6709 GEN_VXFORM(vsubcuw, 0, 22);
6710 GEN_VXFORM_ENV(vaddubs, 0, 8);
6711 GEN_VXFORM_ENV(vadduhs, 0, 9);
6712 GEN_VXFORM_ENV(vadduws, 0, 10);
6713 GEN_VXFORM_ENV(vaddsbs, 0, 12);
6714 GEN_VXFORM_ENV(vaddshs, 0, 13);
6715 GEN_VXFORM_ENV(vaddsws, 0, 14);
6716 GEN_VXFORM_ENV(vsububs, 0, 24);
6717 GEN_VXFORM_ENV(vsubuhs, 0, 25);
6718 GEN_VXFORM_ENV(vsubuws, 0, 26);
6719 GEN_VXFORM_ENV(vsubsbs, 0, 28);
6720 GEN_VXFORM_ENV(vsubshs, 0, 29);
6721 GEN_VXFORM_ENV(vsubsws, 0, 30);
6722 GEN_VXFORM(vrlb, 2, 0);
6723 GEN_VXFORM(vrlh, 2, 1);
6724 GEN_VXFORM(vrlw, 2, 2);
6725 GEN_VXFORM(vsl, 2, 7);
6726 GEN_VXFORM(vsr, 2, 11);
6727 GEN_VXFORM_ENV(vpkuhum, 7, 0);
6728 GEN_VXFORM_ENV(vpkuwum, 7, 1);
6729 GEN_VXFORM_ENV(vpkuhus, 7, 2);
6730 GEN_VXFORM_ENV(vpkuwus, 7, 3);
6731 GEN_VXFORM_ENV(vpkshus, 7, 4);
6732 GEN_VXFORM_ENV(vpkswus, 7, 5);
6733 GEN_VXFORM_ENV(vpkshss, 7, 6);
6734 GEN_VXFORM_ENV(vpkswss, 7, 7);
6735 GEN_VXFORM(vpkpx, 7, 12);
6736 GEN_VXFORM_ENV(vsum4ubs, 4, 24);
6737 GEN_VXFORM_ENV(vsum4sbs, 4, 28);
6738 GEN_VXFORM_ENV(vsum4shs, 4, 25);
6739 GEN_VXFORM_ENV(vsum2sws, 4, 26);
6740 GEN_VXFORM_ENV(vsumsws, 4, 30);
6741 GEN_VXFORM_ENV(vaddfp, 5, 0);
6742 GEN_VXFORM_ENV(vsubfp, 5, 1);
6743 GEN_VXFORM_ENV(vmaxfp, 5, 16);
6744 GEN_VXFORM_ENV(vminfp, 5, 17);
6746 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
6747 static void glue(gen_, name)(DisasContext *ctx) \
6749 TCGv_ptr ra, rb, rd; \
6750 if (unlikely(!ctx->altivec_enabled)) { \
6751 gen_exception(ctx, POWERPC_EXCP_VPU); \
6754 ra = gen_avr_ptr(rA(ctx->opcode)); \
6755 rb = gen_avr_ptr(rB(ctx->opcode)); \
6756 rd = gen_avr_ptr(rD(ctx->opcode)); \
6757 gen_helper_##opname(cpu_env, rd, ra, rb); \
6758 tcg_temp_free_ptr(ra); \
6759 tcg_temp_free_ptr(rb); \
6760 tcg_temp_free_ptr(rd); \
6763 #define GEN_VXRFORM(name, opc2, opc3) \
6764 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
6765 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
6767 GEN_VXRFORM(vcmpequb, 3, 0)
6768 GEN_VXRFORM(vcmpequh, 3, 1)
6769 GEN_VXRFORM(vcmpequw, 3, 2)
6770 GEN_VXRFORM(vcmpgtsb, 3, 12)
6771 GEN_VXRFORM(vcmpgtsh, 3, 13)
6772 GEN_VXRFORM(vcmpgtsw, 3, 14)
6773 GEN_VXRFORM(vcmpgtub, 3, 8)
6774 GEN_VXRFORM(vcmpgtuh, 3, 9)
6775 GEN_VXRFORM(vcmpgtuw, 3, 10)
6776 GEN_VXRFORM(vcmpeqfp, 3, 3)
6777 GEN_VXRFORM(vcmpgefp, 3, 7)
6778 GEN_VXRFORM(vcmpgtfp, 3, 11)
6779 GEN_VXRFORM(vcmpbfp, 3, 15)
6781 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6782 static void glue(gen_, name)(DisasContext *ctx) \
6786 if (unlikely(!ctx->altivec_enabled)) { \
6787 gen_exception(ctx, POWERPC_EXCP_VPU); \
6790 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6791 rd = gen_avr_ptr(rD(ctx->opcode)); \
6792 gen_helper_##name (rd, simm); \
6793 tcg_temp_free_i32(simm); \
6794 tcg_temp_free_ptr(rd); \
6797 GEN_VXFORM_SIMM(vspltisb, 6, 12);
6798 GEN_VXFORM_SIMM(vspltish, 6, 13);
6799 GEN_VXFORM_SIMM(vspltisw, 6, 14);
6801 #define GEN_VXFORM_NOA(name, opc2, opc3) \
6802 static void glue(gen_, name)(DisasContext *ctx) \
6805 if (unlikely(!ctx->altivec_enabled)) { \
6806 gen_exception(ctx, POWERPC_EXCP_VPU); \
6809 rb = gen_avr_ptr(rB(ctx->opcode)); \
6810 rd = gen_avr_ptr(rD(ctx->opcode)); \
6811 gen_helper_##name (rd, rb); \
6812 tcg_temp_free_ptr(rb); \
6813 tcg_temp_free_ptr(rd); \
6816 #define GEN_VXFORM_NOA_ENV(name, opc2, opc3) \
6817 static void glue(gen_, name)(DisasContext *ctx) \
6821 if (unlikely(!ctx->altivec_enabled)) { \
6822 gen_exception(ctx, POWERPC_EXCP_VPU); \
6825 rb = gen_avr_ptr(rB(ctx->opcode)); \
6826 rd = gen_avr_ptr(rD(ctx->opcode)); \
6827 gen_helper_##name(cpu_env, rd, rb); \
6828 tcg_temp_free_ptr(rb); \
6829 tcg_temp_free_ptr(rd); \
6832 GEN_VXFORM_NOA(vupkhsb, 7, 8);
6833 GEN_VXFORM_NOA(vupkhsh, 7, 9);
6834 GEN_VXFORM_NOA(vupklsb, 7, 10);
6835 GEN_VXFORM_NOA(vupklsh, 7, 11);
6836 GEN_VXFORM_NOA(vupkhpx, 7, 13);
6837 GEN_VXFORM_NOA(vupklpx, 7, 15);
6838 GEN_VXFORM_NOA_ENV(vrefp, 5, 4);
6839 GEN_VXFORM_NOA_ENV(vrsqrtefp, 5, 5);
6840 GEN_VXFORM_NOA_ENV(vexptefp, 5, 6);
6841 GEN_VXFORM_NOA_ENV(vlogefp, 5, 7);
6842 GEN_VXFORM_NOA_ENV(vrfim, 5, 8);
6843 GEN_VXFORM_NOA_ENV(vrfin, 5, 9);
6844 GEN_VXFORM_NOA_ENV(vrfip, 5, 10);
6845 GEN_VXFORM_NOA_ENV(vrfiz, 5, 11);
6847 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6848 static void glue(gen_, name)(DisasContext *ctx) \
6852 if (unlikely(!ctx->altivec_enabled)) { \
6853 gen_exception(ctx, POWERPC_EXCP_VPU); \
6856 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6857 rd = gen_avr_ptr(rD(ctx->opcode)); \
6858 gen_helper_##name (rd, simm); \
6859 tcg_temp_free_i32(simm); \
6860 tcg_temp_free_ptr(rd); \
6863 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
6864 static void glue(gen_, name)(DisasContext *ctx) \
6868 if (unlikely(!ctx->altivec_enabled)) { \
6869 gen_exception(ctx, POWERPC_EXCP_VPU); \
6872 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6873 rb = gen_avr_ptr(rB(ctx->opcode)); \
6874 rd = gen_avr_ptr(rD(ctx->opcode)); \
6875 gen_helper_##name (rd, rb, uimm); \
6876 tcg_temp_free_i32(uimm); \
6877 tcg_temp_free_ptr(rb); \
6878 tcg_temp_free_ptr(rd); \
6881 #define GEN_VXFORM_UIMM_ENV(name, opc2, opc3) \
6882 static void glue(gen_, name)(DisasContext *ctx) \
6887 if (unlikely(!ctx->altivec_enabled)) { \
6888 gen_exception(ctx, POWERPC_EXCP_VPU); \
6891 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6892 rb = gen_avr_ptr(rB(ctx->opcode)); \
6893 rd = gen_avr_ptr(rD(ctx->opcode)); \
6894 gen_helper_##name(cpu_env, rd, rb, uimm); \
6895 tcg_temp_free_i32(uimm); \
6896 tcg_temp_free_ptr(rb); \
6897 tcg_temp_free_ptr(rd); \
6900 GEN_VXFORM_UIMM(vspltb, 6, 8);
6901 GEN_VXFORM_UIMM(vsplth, 6, 9);
6902 GEN_VXFORM_UIMM(vspltw, 6, 10);
6903 GEN_VXFORM_UIMM_ENV(vcfux, 5, 12);
6904 GEN_VXFORM_UIMM_ENV(vcfsx, 5, 13);
6905 GEN_VXFORM_UIMM_ENV(vctuxs, 5, 14);
6906 GEN_VXFORM_UIMM_ENV(vctsxs, 5, 15);
6908 static void gen_vsldoi(DisasContext *ctx)
6910 TCGv_ptr ra, rb, rd;
6912 if (unlikely(!ctx->altivec_enabled)) {
6913 gen_exception(ctx, POWERPC_EXCP_VPU);
6916 ra = gen_avr_ptr(rA(ctx->opcode));
6917 rb = gen_avr_ptr(rB(ctx->opcode));
6918 rd = gen_avr_ptr(rD(ctx->opcode));
6919 sh = tcg_const_i32(VSH(ctx->opcode));
6920 gen_helper_vsldoi (rd, ra, rb, sh);
6921 tcg_temp_free_ptr(ra);
6922 tcg_temp_free_ptr(rb);
6923 tcg_temp_free_ptr(rd);
6924 tcg_temp_free_i32(sh);
6927 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
6928 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6930 TCGv_ptr ra, rb, rc, rd; \
6931 if (unlikely(!ctx->altivec_enabled)) { \
6932 gen_exception(ctx, POWERPC_EXCP_VPU); \
6935 ra = gen_avr_ptr(rA(ctx->opcode)); \
6936 rb = gen_avr_ptr(rB(ctx->opcode)); \
6937 rc = gen_avr_ptr(rC(ctx->opcode)); \
6938 rd = gen_avr_ptr(rD(ctx->opcode)); \
6939 if (Rc(ctx->opcode)) { \
6940 gen_helper_##name1(cpu_env, rd, ra, rb, rc); \
6942 gen_helper_##name0(cpu_env, rd, ra, rb, rc); \
6944 tcg_temp_free_ptr(ra); \
6945 tcg_temp_free_ptr(rb); \
6946 tcg_temp_free_ptr(rc); \
6947 tcg_temp_free_ptr(rd); \
6950 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
6952 static void gen_vmladduhm(DisasContext *ctx)
6954 TCGv_ptr ra, rb, rc, rd;
6955 if (unlikely(!ctx->altivec_enabled)) {
6956 gen_exception(ctx, POWERPC_EXCP_VPU);
6959 ra = gen_avr_ptr(rA(ctx->opcode));
6960 rb = gen_avr_ptr(rB(ctx->opcode));
6961 rc = gen_avr_ptr(rC(ctx->opcode));
6962 rd = gen_avr_ptr(rD(ctx->opcode));
6963 gen_helper_vmladduhm(rd, ra, rb, rc);
6964 tcg_temp_free_ptr(ra);
6965 tcg_temp_free_ptr(rb);
6966 tcg_temp_free_ptr(rc);
6967 tcg_temp_free_ptr(rd);
6970 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
6971 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
6972 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
6973 GEN_VAFORM_PAIRED(vsel, vperm, 21)
6974 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
6976 /*** SPE extension ***/
6977 /* Register moves */
6980 static inline void gen_evmra(DisasContext *ctx)
6983 if (unlikely(!ctx->spe_enabled)) {
6984 gen_exception(ctx, POWERPC_EXCP_SPEU);
6988 #if defined(TARGET_PPC64)
6990 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6993 tcg_gen_st_i64(cpu_gpr[rA(ctx->opcode)],
6995 offsetof(CPUPPCState, spe_acc));
6997 TCGv_i64 tmp = tcg_temp_new_i64();
6999 /* tmp := rA_lo + rA_hi << 32 */
7000 tcg_gen_concat_i32_i64(tmp, cpu_gpr[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7002 /* spe_acc := tmp */
7003 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
7004 tcg_temp_free_i64(tmp);
7007 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7008 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7012 static inline void gen_load_gpr64(TCGv_i64 t, int reg)
7014 #if defined(TARGET_PPC64)
7015 tcg_gen_mov_i64(t, cpu_gpr[reg]);
7017 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
7021 static inline void gen_store_gpr64(int reg, TCGv_i64 t)
7023 #if defined(TARGET_PPC64)
7024 tcg_gen_mov_i64(cpu_gpr[reg], t);
7026 TCGv_i64 tmp = tcg_temp_new_i64();
7027 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
7028 tcg_gen_shri_i64(tmp, t, 32);
7029 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
7030 tcg_temp_free_i64(tmp);
7034 #define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
7035 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
7037 if (Rc(ctx->opcode)) \
7043 /* Handler for undefined SPE opcodes */
7044 static inline void gen_speundef(DisasContext *ctx)
7046 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7050 #if defined(TARGET_PPC64)
7051 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
7052 static inline void gen_##name(DisasContext *ctx) \
7054 if (unlikely(!ctx->spe_enabled)) { \
7055 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7058 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7059 cpu_gpr[rB(ctx->opcode)]); \
7062 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
7063 static inline void gen_##name(DisasContext *ctx) \
7065 if (unlikely(!ctx->spe_enabled)) { \
7066 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7069 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7070 cpu_gpr[rB(ctx->opcode)]); \
7071 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7072 cpu_gprh[rB(ctx->opcode)]); \
7076 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
7077 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
7078 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
7079 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
7080 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
7081 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
7082 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
7083 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
7085 /* SPE logic immediate */
7086 #if defined(TARGET_PPC64)
7087 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
7088 static inline void gen_##name(DisasContext *ctx) \
7090 if (unlikely(!ctx->spe_enabled)) { \
7091 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7094 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7095 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7096 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7097 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7098 tcg_opi(t0, t0, rB(ctx->opcode)); \
7099 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7100 tcg_gen_trunc_i64_i32(t1, t2); \
7101 tcg_temp_free_i64(t2); \
7102 tcg_opi(t1, t1, rB(ctx->opcode)); \
7103 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7104 tcg_temp_free_i32(t0); \
7105 tcg_temp_free_i32(t1); \
7108 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
7109 static inline void gen_##name(DisasContext *ctx) \
7111 if (unlikely(!ctx->spe_enabled)) { \
7112 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7115 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7117 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7121 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
7122 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
7123 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
7124 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
7126 /* SPE arithmetic */
7127 #if defined(TARGET_PPC64)
7128 #define GEN_SPEOP_ARITH1(name, tcg_op) \
7129 static inline void gen_##name(DisasContext *ctx) \
7131 if (unlikely(!ctx->spe_enabled)) { \
7132 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7135 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7136 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7137 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7138 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7140 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7141 tcg_gen_trunc_i64_i32(t1, t2); \
7142 tcg_temp_free_i64(t2); \
7144 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7145 tcg_temp_free_i32(t0); \
7146 tcg_temp_free_i32(t1); \
7149 #define GEN_SPEOP_ARITH1(name, tcg_op) \
7150 static inline void gen_##name(DisasContext *ctx) \
7152 if (unlikely(!ctx->spe_enabled)) { \
7153 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7156 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
7157 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
7161 static inline void gen_op_evabs(TCGv_i32 ret, TCGv_i32 arg1)
7163 int l1 = gen_new_label();
7164 int l2 = gen_new_label();
7166 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
7167 tcg_gen_neg_i32(ret, arg1);
7170 tcg_gen_mov_i32(ret, arg1);
7173 GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
7174 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
7175 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
7176 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
7177 static inline void gen_op_evrndw(TCGv_i32 ret, TCGv_i32 arg1)
7179 tcg_gen_addi_i32(ret, arg1, 0x8000);
7180 tcg_gen_ext16u_i32(ret, ret);
7182 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
7183 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
7184 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
7186 #if defined(TARGET_PPC64)
7187 #define GEN_SPEOP_ARITH2(name, tcg_op) \
7188 static inline void gen_##name(DisasContext *ctx) \
7190 if (unlikely(!ctx->spe_enabled)) { \
7191 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7194 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7195 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7196 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
7197 TCGv_i64 t3 = tcg_temp_local_new_i64(); \
7198 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7199 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
7200 tcg_op(t0, t0, t2); \
7201 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
7202 tcg_gen_trunc_i64_i32(t1, t3); \
7203 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
7204 tcg_gen_trunc_i64_i32(t2, t3); \
7205 tcg_temp_free_i64(t3); \
7206 tcg_op(t1, t1, t2); \
7207 tcg_temp_free_i32(t2); \
7208 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7209 tcg_temp_free_i32(t0); \
7210 tcg_temp_free_i32(t1); \
7213 #define GEN_SPEOP_ARITH2(name, tcg_op) \
7214 static inline void gen_##name(DisasContext *ctx) \
7216 if (unlikely(!ctx->spe_enabled)) { \
7217 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7220 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7221 cpu_gpr[rB(ctx->opcode)]); \
7222 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7223 cpu_gprh[rB(ctx->opcode)]); \
7227 static inline void gen_op_evsrwu(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7232 l1 = gen_new_label();
7233 l2 = gen_new_label();
7234 t0 = tcg_temp_local_new_i32();
7235 /* No error here: 6 bits are used */
7236 tcg_gen_andi_i32(t0, arg2, 0x3F);
7237 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
7238 tcg_gen_shr_i32(ret, arg1, t0);
7241 tcg_gen_movi_i32(ret, 0);
7243 tcg_temp_free_i32(t0);
7245 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
7246 static inline void gen_op_evsrws(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7251 l1 = gen_new_label();
7252 l2 = gen_new_label();
7253 t0 = tcg_temp_local_new_i32();
7254 /* No error here: 6 bits are used */
7255 tcg_gen_andi_i32(t0, arg2, 0x3F);
7256 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
7257 tcg_gen_sar_i32(ret, arg1, t0);
7260 tcg_gen_movi_i32(ret, 0);
7262 tcg_temp_free_i32(t0);
7264 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
7265 static inline void gen_op_evslw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7270 l1 = gen_new_label();
7271 l2 = gen_new_label();
7272 t0 = tcg_temp_local_new_i32();
7273 /* No error here: 6 bits are used */
7274 tcg_gen_andi_i32(t0, arg2, 0x3F);
7275 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
7276 tcg_gen_shl_i32(ret, arg1, t0);
7279 tcg_gen_movi_i32(ret, 0);
7281 tcg_temp_free_i32(t0);
7283 GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
7284 static inline void gen_op_evrlw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7286 TCGv_i32 t0 = tcg_temp_new_i32();
7287 tcg_gen_andi_i32(t0, arg2, 0x1F);
7288 tcg_gen_rotl_i32(ret, arg1, t0);
7289 tcg_temp_free_i32(t0);
7291 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
7292 static inline void gen_evmergehi(DisasContext *ctx)
7294 if (unlikely(!ctx->spe_enabled)) {
7295 gen_exception(ctx, POWERPC_EXCP_SPEU);
7298 #if defined(TARGET_PPC64)
7299 TCGv t0 = tcg_temp_new();
7300 TCGv t1 = tcg_temp_new();
7301 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7302 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7303 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7307 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7308 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7311 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
7312 static inline void gen_op_evsubf(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7314 tcg_gen_sub_i32(ret, arg2, arg1);
7316 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
7318 /* SPE arithmetic immediate */
7319 #if defined(TARGET_PPC64)
7320 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
7321 static inline void gen_##name(DisasContext *ctx) \
7323 if (unlikely(!ctx->spe_enabled)) { \
7324 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7327 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7328 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7329 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7330 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7331 tcg_op(t0, t0, rA(ctx->opcode)); \
7332 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7333 tcg_gen_trunc_i64_i32(t1, t2); \
7334 tcg_temp_free_i64(t2); \
7335 tcg_op(t1, t1, rA(ctx->opcode)); \
7336 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7337 tcg_temp_free_i32(t0); \
7338 tcg_temp_free_i32(t1); \
7341 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
7342 static inline void gen_##name(DisasContext *ctx) \
7344 if (unlikely(!ctx->spe_enabled)) { \
7345 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7348 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
7350 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
7354 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
7355 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
7357 /* SPE comparison */
7358 #if defined(TARGET_PPC64)
7359 #define GEN_SPEOP_COMP(name, tcg_cond) \
7360 static inline void gen_##name(DisasContext *ctx) \
7362 if (unlikely(!ctx->spe_enabled)) { \
7363 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7366 int l1 = gen_new_label(); \
7367 int l2 = gen_new_label(); \
7368 int l3 = gen_new_label(); \
7369 int l4 = gen_new_label(); \
7370 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7371 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7372 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7373 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7374 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7375 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
7376 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
7378 gen_set_label(l1); \
7379 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7380 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7381 gen_set_label(l2); \
7382 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7383 tcg_gen_trunc_i64_i32(t0, t2); \
7384 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7385 tcg_gen_trunc_i64_i32(t1, t2); \
7386 tcg_temp_free_i64(t2); \
7387 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
7388 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7389 ~(CRF_CH | CRF_CH_AND_CL)); \
7391 gen_set_label(l3); \
7392 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7393 CRF_CH | CRF_CH_OR_CL); \
7394 gen_set_label(l4); \
7395 tcg_temp_free_i32(t0); \
7396 tcg_temp_free_i32(t1); \
7399 #define GEN_SPEOP_COMP(name, tcg_cond) \
7400 static inline void gen_##name(DisasContext *ctx) \
7402 if (unlikely(!ctx->spe_enabled)) { \
7403 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7406 int l1 = gen_new_label(); \
7407 int l2 = gen_new_label(); \
7408 int l3 = gen_new_label(); \
7409 int l4 = gen_new_label(); \
7411 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
7412 cpu_gpr[rB(ctx->opcode)], l1); \
7413 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
7415 gen_set_label(l1); \
7416 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7417 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7418 gen_set_label(l2); \
7419 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
7420 cpu_gprh[rB(ctx->opcode)], l3); \
7421 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7422 ~(CRF_CH | CRF_CH_AND_CL)); \
7424 gen_set_label(l3); \
7425 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7426 CRF_CH | CRF_CH_OR_CL); \
7427 gen_set_label(l4); \
7430 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
7431 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
7432 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
7433 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
7434 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
7437 static inline void gen_brinc(DisasContext *ctx)
7439 /* Note: brinc is usable even if SPE is disabled */
7440 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
7441 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7443 static inline void gen_evmergelo(DisasContext *ctx)
7445 if (unlikely(!ctx->spe_enabled)) {
7446 gen_exception(ctx, POWERPC_EXCP_SPEU);
7449 #if defined(TARGET_PPC64)
7450 TCGv t0 = tcg_temp_new();
7451 TCGv t1 = tcg_temp_new();
7452 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7453 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7454 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7458 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7459 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7462 static inline void gen_evmergehilo(DisasContext *ctx)
7464 if (unlikely(!ctx->spe_enabled)) {
7465 gen_exception(ctx, POWERPC_EXCP_SPEU);
7468 #if defined(TARGET_PPC64)
7469 TCGv t0 = tcg_temp_new();
7470 TCGv t1 = tcg_temp_new();
7471 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7472 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7473 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7477 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7478 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7481 static inline void gen_evmergelohi(DisasContext *ctx)
7483 if (unlikely(!ctx->spe_enabled)) {
7484 gen_exception(ctx, POWERPC_EXCP_SPEU);
7487 #if defined(TARGET_PPC64)
7488 TCGv t0 = tcg_temp_new();
7489 TCGv t1 = tcg_temp_new();
7490 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7491 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7492 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7496 if (rD(ctx->opcode) == rA(ctx->opcode)) {
7497 TCGv_i32 tmp = tcg_temp_new_i32();
7498 tcg_gen_mov_i32(tmp, cpu_gpr[rA(ctx->opcode)]);
7499 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7500 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], tmp);
7501 tcg_temp_free_i32(tmp);
7503 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7504 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7508 static inline void gen_evsplati(DisasContext *ctx)
7510 uint64_t imm = ((int32_t)(rA(ctx->opcode) << 27)) >> 27;
7512 #if defined(TARGET_PPC64)
7513 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7515 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7516 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7519 static inline void gen_evsplatfi(DisasContext *ctx)
7521 uint64_t imm = rA(ctx->opcode) << 27;
7523 #if defined(TARGET_PPC64)
7524 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7526 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7527 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7531 static inline void gen_evsel(DisasContext *ctx)
7533 int l1 = gen_new_label();
7534 int l2 = gen_new_label();
7535 int l3 = gen_new_label();
7536 int l4 = gen_new_label();
7537 TCGv_i32 t0 = tcg_temp_local_new_i32();
7538 #if defined(TARGET_PPC64)
7539 TCGv t1 = tcg_temp_local_new();
7540 TCGv t2 = tcg_temp_local_new();
7542 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
7543 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
7544 #if defined(TARGET_PPC64)
7545 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7547 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7551 #if defined(TARGET_PPC64)
7552 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7554 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7557 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
7558 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
7559 #if defined(TARGET_PPC64)
7560 tcg_gen_ext32u_tl(t2, cpu_gpr[rA(ctx->opcode)]);
7562 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7566 #if defined(TARGET_PPC64)
7567 tcg_gen_ext32u_tl(t2, cpu_gpr[rB(ctx->opcode)]);
7569 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7572 tcg_temp_free_i32(t0);
7573 #if defined(TARGET_PPC64)
7574 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
7580 static void gen_evsel0(DisasContext *ctx)
7585 static void gen_evsel1(DisasContext *ctx)
7590 static void gen_evsel2(DisasContext *ctx)
7595 static void gen_evsel3(DisasContext *ctx)
7602 static inline void gen_evmwumi(DisasContext *ctx)
7606 if (unlikely(!ctx->spe_enabled)) {
7607 gen_exception(ctx, POWERPC_EXCP_SPEU);
7611 t0 = tcg_temp_new_i64();
7612 t1 = tcg_temp_new_i64();
7614 /* t0 := rA; t1 := rB */
7615 #if defined(TARGET_PPC64)
7616 tcg_gen_ext32u_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7617 tcg_gen_ext32u_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7619 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7620 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7623 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7625 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7627 tcg_temp_free_i64(t0);
7628 tcg_temp_free_i64(t1);
7631 static inline void gen_evmwumia(DisasContext *ctx)
7635 if (unlikely(!ctx->spe_enabled)) {
7636 gen_exception(ctx, POWERPC_EXCP_SPEU);
7640 gen_evmwumi(ctx); /* rD := rA * rB */
7642 tmp = tcg_temp_new_i64();
7645 gen_load_gpr64(tmp, rD(ctx->opcode));
7646 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
7647 tcg_temp_free_i64(tmp);
7650 static inline void gen_evmwumiaa(DisasContext *ctx)
7655 if (unlikely(!ctx->spe_enabled)) {
7656 gen_exception(ctx, POWERPC_EXCP_SPEU);
7660 gen_evmwumi(ctx); /* rD := rA * rB */
7662 acc = tcg_temp_new_i64();
7663 tmp = tcg_temp_new_i64();
7666 gen_load_gpr64(tmp, rD(ctx->opcode));
7669 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7671 /* acc := tmp + acc */
7672 tcg_gen_add_i64(acc, acc, tmp);
7675 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7678 gen_store_gpr64(rD(ctx->opcode), acc);
7680 tcg_temp_free_i64(acc);
7681 tcg_temp_free_i64(tmp);
7684 static inline void gen_evmwsmi(DisasContext *ctx)
7688 if (unlikely(!ctx->spe_enabled)) {
7689 gen_exception(ctx, POWERPC_EXCP_SPEU);
7693 t0 = tcg_temp_new_i64();
7694 t1 = tcg_temp_new_i64();
7696 /* t0 := rA; t1 := rB */
7697 #if defined(TARGET_PPC64)
7698 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7699 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7701 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7702 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7705 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7707 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7709 tcg_temp_free_i64(t0);
7710 tcg_temp_free_i64(t1);
7713 static inline void gen_evmwsmia(DisasContext *ctx)
7717 gen_evmwsmi(ctx); /* rD := rA * rB */
7719 tmp = tcg_temp_new_i64();
7722 gen_load_gpr64(tmp, rD(ctx->opcode));
7723 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
7725 tcg_temp_free_i64(tmp);
7728 static inline void gen_evmwsmiaa(DisasContext *ctx)
7730 TCGv_i64 acc = tcg_temp_new_i64();
7731 TCGv_i64 tmp = tcg_temp_new_i64();
7733 gen_evmwsmi(ctx); /* rD := rA * rB */
7735 acc = tcg_temp_new_i64();
7736 tmp = tcg_temp_new_i64();
7739 gen_load_gpr64(tmp, rD(ctx->opcode));
7742 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7744 /* acc := tmp + acc */
7745 tcg_gen_add_i64(acc, acc, tmp);
7748 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7751 gen_store_gpr64(rD(ctx->opcode), acc);
7753 tcg_temp_free_i64(acc);
7754 tcg_temp_free_i64(tmp);
7757 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7758 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7759 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7760 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7761 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7762 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7763 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7764 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE); //
7765 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE);
7766 GEN_SPE(speundef, evand, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE); ////
7767 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7768 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7769 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7770 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7771 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7772 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE);
7773 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE); ////
7774 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7775 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7776 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE);
7777 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7778 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7779 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE); //
7780 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE);
7781 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7782 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7783 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE); ////
7784 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE); ////
7785 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE); ////
7787 /* SPE load and stores */
7788 static inline void gen_addr_spe_imm_index(DisasContext *ctx, TCGv EA, int sh)
7790 target_ulong uimm = rB(ctx->opcode);
7792 if (rA(ctx->opcode) == 0) {
7793 tcg_gen_movi_tl(EA, uimm << sh);
7795 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
7796 if (NARROW_MODE(ctx)) {
7797 tcg_gen_ext32u_tl(EA, EA);
7802 static inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
7804 #if defined(TARGET_PPC64)
7805 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7807 TCGv_i64 t0 = tcg_temp_new_i64();
7808 gen_qemu_ld64(ctx, t0, addr);
7809 tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
7810 tcg_gen_shri_i64(t0, t0, 32);
7811 tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
7812 tcg_temp_free_i64(t0);
7816 static inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
7818 #if defined(TARGET_PPC64)
7819 TCGv t0 = tcg_temp_new();
7820 gen_qemu_ld32u(ctx, t0, addr);
7821 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7822 gen_addr_add(ctx, addr, addr, 4);
7823 gen_qemu_ld32u(ctx, t0, addr);
7824 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7827 gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7828 gen_addr_add(ctx, addr, addr, 4);
7829 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7833 static inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
7835 TCGv t0 = tcg_temp_new();
7836 #if defined(TARGET_PPC64)
7837 gen_qemu_ld16u(ctx, t0, addr);
7838 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7839 gen_addr_add(ctx, addr, addr, 2);
7840 gen_qemu_ld16u(ctx, t0, addr);
7841 tcg_gen_shli_tl(t0, t0, 32);
7842 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7843 gen_addr_add(ctx, addr, addr, 2);
7844 gen_qemu_ld16u(ctx, t0, addr);
7845 tcg_gen_shli_tl(t0, t0, 16);
7846 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7847 gen_addr_add(ctx, addr, addr, 2);
7848 gen_qemu_ld16u(ctx, t0, addr);
7849 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7851 gen_qemu_ld16u(ctx, t0, addr);
7852 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7853 gen_addr_add(ctx, addr, addr, 2);
7854 gen_qemu_ld16u(ctx, t0, addr);
7855 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7856 gen_addr_add(ctx, addr, addr, 2);
7857 gen_qemu_ld16u(ctx, t0, addr);
7858 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7859 gen_addr_add(ctx, addr, addr, 2);
7860 gen_qemu_ld16u(ctx, t0, addr);
7861 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7866 static inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
7868 TCGv t0 = tcg_temp_new();
7869 gen_qemu_ld16u(ctx, t0, addr);
7870 #if defined(TARGET_PPC64)
7871 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7872 tcg_gen_shli_tl(t0, t0, 16);
7873 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7875 tcg_gen_shli_tl(t0, t0, 16);
7876 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7877 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7882 static inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
7884 TCGv t0 = tcg_temp_new();
7885 gen_qemu_ld16u(ctx, t0, addr);
7886 #if defined(TARGET_PPC64)
7887 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7888 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7890 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7891 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7896 static inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
7898 TCGv t0 = tcg_temp_new();
7899 gen_qemu_ld16s(ctx, t0, addr);
7900 #if defined(TARGET_PPC64)
7901 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7902 tcg_gen_ext32u_tl(t0, t0);
7903 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7905 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7906 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7911 static inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
7913 TCGv t0 = tcg_temp_new();
7914 #if defined(TARGET_PPC64)
7915 gen_qemu_ld16u(ctx, t0, addr);
7916 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7917 gen_addr_add(ctx, addr, addr, 2);
7918 gen_qemu_ld16u(ctx, t0, addr);
7919 tcg_gen_shli_tl(t0, t0, 16);
7920 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7922 gen_qemu_ld16u(ctx, t0, addr);
7923 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7924 gen_addr_add(ctx, addr, addr, 2);
7925 gen_qemu_ld16u(ctx, t0, addr);
7926 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7931 static inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
7933 #if defined(TARGET_PPC64)
7934 TCGv t0 = tcg_temp_new();
7935 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7936 gen_addr_add(ctx, addr, addr, 2);
7937 gen_qemu_ld16u(ctx, t0, addr);
7938 tcg_gen_shli_tl(t0, t0, 32);
7939 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7942 gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7943 gen_addr_add(ctx, addr, addr, 2);
7944 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7948 static inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
7950 #if defined(TARGET_PPC64)
7951 TCGv t0 = tcg_temp_new();
7952 gen_qemu_ld16s(ctx, t0, addr);
7953 tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
7954 gen_addr_add(ctx, addr, addr, 2);
7955 gen_qemu_ld16s(ctx, t0, addr);
7956 tcg_gen_shli_tl(t0, t0, 32);
7957 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7960 gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7961 gen_addr_add(ctx, addr, addr, 2);
7962 gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7966 static inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
7968 TCGv t0 = tcg_temp_new();
7969 gen_qemu_ld32u(ctx, t0, addr);
7970 #if defined(TARGET_PPC64)
7971 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7972 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7974 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7975 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7980 static inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
7982 TCGv t0 = tcg_temp_new();
7983 #if defined(TARGET_PPC64)
7984 gen_qemu_ld16u(ctx, t0, addr);
7985 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7986 tcg_gen_shli_tl(t0, t0, 32);
7987 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7988 gen_addr_add(ctx, addr, addr, 2);
7989 gen_qemu_ld16u(ctx, t0, addr);
7990 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7991 tcg_gen_shli_tl(t0, t0, 16);
7992 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7994 gen_qemu_ld16u(ctx, t0, addr);
7995 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7996 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7997 gen_addr_add(ctx, addr, addr, 2);
7998 gen_qemu_ld16u(ctx, t0, addr);
7999 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
8000 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
8005 static inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
8007 #if defined(TARGET_PPC64)
8008 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8010 TCGv_i64 t0 = tcg_temp_new_i64();
8011 tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
8012 gen_qemu_st64(ctx, t0, addr);
8013 tcg_temp_free_i64(t0);
8017 static inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
8019 #if defined(TARGET_PPC64)
8020 TCGv t0 = tcg_temp_new();
8021 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8022 gen_qemu_st32(ctx, t0, addr);
8025 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8027 gen_addr_add(ctx, addr, addr, 4);
8028 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8031 static inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
8033 TCGv t0 = tcg_temp_new();
8034 #if defined(TARGET_PPC64)
8035 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
8037 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
8039 gen_qemu_st16(ctx, t0, addr);
8040 gen_addr_add(ctx, addr, addr, 2);
8041 #if defined(TARGET_PPC64)
8042 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8043 gen_qemu_st16(ctx, t0, addr);
8045 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8047 gen_addr_add(ctx, addr, addr, 2);
8048 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
8049 gen_qemu_st16(ctx, t0, addr);
8051 gen_addr_add(ctx, addr, addr, 2);
8052 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8055 static inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
8057 TCGv t0 = tcg_temp_new();
8058 #if defined(TARGET_PPC64)
8059 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
8061 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
8063 gen_qemu_st16(ctx, t0, addr);
8064 gen_addr_add(ctx, addr, addr, 2);
8065 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
8066 gen_qemu_st16(ctx, t0, addr);
8070 static inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
8072 #if defined(TARGET_PPC64)
8073 TCGv t0 = tcg_temp_new();
8074 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8075 gen_qemu_st16(ctx, t0, addr);
8078 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8080 gen_addr_add(ctx, addr, addr, 2);
8081 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8084 static inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
8086 #if defined(TARGET_PPC64)
8087 TCGv t0 = tcg_temp_new();
8088 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8089 gen_qemu_st32(ctx, t0, addr);
8092 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8096 static inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
8098 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8101 #define GEN_SPEOP_LDST(name, opc2, sh) \
8102 static void glue(gen_, name)(DisasContext *ctx) \
8105 if (unlikely(!ctx->spe_enabled)) { \
8106 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8109 gen_set_access_type(ctx, ACCESS_INT); \
8110 t0 = tcg_temp_new(); \
8111 if (Rc(ctx->opcode)) { \
8112 gen_addr_spe_imm_index(ctx, t0, sh); \
8114 gen_addr_reg_index(ctx, t0); \
8116 gen_op_##name(ctx, t0); \
8117 tcg_temp_free(t0); \
8120 GEN_SPEOP_LDST(evldd, 0x00, 3);
8121 GEN_SPEOP_LDST(evldw, 0x01, 3);
8122 GEN_SPEOP_LDST(evldh, 0x02, 3);
8123 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
8124 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
8125 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
8126 GEN_SPEOP_LDST(evlwhe, 0x08, 2);
8127 GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
8128 GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
8129 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
8130 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
8132 GEN_SPEOP_LDST(evstdd, 0x10, 3);
8133 GEN_SPEOP_LDST(evstdw, 0x11, 3);
8134 GEN_SPEOP_LDST(evstdh, 0x12, 3);
8135 GEN_SPEOP_LDST(evstwhe, 0x18, 2);
8136 GEN_SPEOP_LDST(evstwho, 0x1A, 2);
8137 GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
8138 GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
8140 /* Multiply and add - TODO */
8142 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);//
8143 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8144 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8145 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8146 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8147 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8148 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8149 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8150 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8151 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8152 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8153 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8155 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8156 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE);
8157 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, 0x00000000, PPC_SPE);
8158 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8159 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8160 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8161 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8162 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE);
8163 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, 0x00000000, PPC_SPE);
8164 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8165 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8166 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8168 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8169 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8170 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8171 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8172 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, 0x00000000, PPC_SPE);
8174 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8175 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8176 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8177 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8178 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8179 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8180 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8181 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8182 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8183 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8184 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8185 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8187 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, 0x00000000, PPC_SPE);
8188 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, 0x00000000, PPC_SPE);
8189 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8190 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8192 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8193 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8194 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8195 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8196 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8197 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8198 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8199 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8200 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8201 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8202 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8203 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8205 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8206 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8207 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8208 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8209 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8212 /*** SPE floating-point extension ***/
8213 #if defined(TARGET_PPC64)
8214 #define GEN_SPEFPUOP_CONV_32_32(name) \
8215 static inline void gen_##name(DisasContext *ctx) \
8219 t0 = tcg_temp_new_i32(); \
8220 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
8221 gen_helper_##name(t0, cpu_env, t0); \
8222 t1 = tcg_temp_new(); \
8223 tcg_gen_extu_i32_tl(t1, t0); \
8224 tcg_temp_free_i32(t0); \
8225 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8226 0xFFFFFFFF00000000ULL); \
8227 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
8228 tcg_temp_free(t1); \
8230 #define GEN_SPEFPUOP_CONV_32_64(name) \
8231 static inline void gen_##name(DisasContext *ctx) \
8235 t0 = tcg_temp_new_i32(); \
8236 gen_helper_##name(t0, cpu_env, cpu_gpr[rB(ctx->opcode)]); \
8237 t1 = tcg_temp_new(); \
8238 tcg_gen_extu_i32_tl(t1, t0); \
8239 tcg_temp_free_i32(t0); \
8240 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8241 0xFFFFFFFF00000000ULL); \
8242 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
8243 tcg_temp_free(t1); \
8245 #define GEN_SPEFPUOP_CONV_64_32(name) \
8246 static inline void gen_##name(DisasContext *ctx) \
8248 TCGv_i32 t0 = tcg_temp_new_i32(); \
8249 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
8250 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); \
8251 tcg_temp_free_i32(t0); \
8253 #define GEN_SPEFPUOP_CONV_64_64(name) \
8254 static inline void gen_##name(DisasContext *ctx) \
8256 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8257 cpu_gpr[rB(ctx->opcode)]); \
8259 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
8260 static inline void gen_##name(DisasContext *ctx) \
8264 if (unlikely(!ctx->spe_enabled)) { \
8265 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8268 t0 = tcg_temp_new_i32(); \
8269 t1 = tcg_temp_new_i32(); \
8270 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8271 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
8272 gen_helper_##name(t0, cpu_env, t0, t1); \
8273 tcg_temp_free_i32(t1); \
8274 t2 = tcg_temp_new(); \
8275 tcg_gen_extu_i32_tl(t2, t0); \
8276 tcg_temp_free_i32(t0); \
8277 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8278 0xFFFFFFFF00000000ULL); \
8279 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
8280 tcg_temp_free(t2); \
8282 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
8283 static inline void gen_##name(DisasContext *ctx) \
8285 if (unlikely(!ctx->spe_enabled)) { \
8286 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8289 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8290 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8292 #define GEN_SPEFPUOP_COMP_32(name) \
8293 static inline void gen_##name(DisasContext *ctx) \
8296 if (unlikely(!ctx->spe_enabled)) { \
8297 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8300 t0 = tcg_temp_new_i32(); \
8301 t1 = tcg_temp_new_i32(); \
8302 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8303 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
8304 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
8305 tcg_temp_free_i32(t0); \
8306 tcg_temp_free_i32(t1); \
8308 #define GEN_SPEFPUOP_COMP_64(name) \
8309 static inline void gen_##name(DisasContext *ctx) \
8311 if (unlikely(!ctx->spe_enabled)) { \
8312 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8315 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, \
8316 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8319 #define GEN_SPEFPUOP_CONV_32_32(name) \
8320 static inline void gen_##name(DisasContext *ctx) \
8322 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8323 cpu_gpr[rB(ctx->opcode)]); \
8325 #define GEN_SPEFPUOP_CONV_32_64(name) \
8326 static inline void gen_##name(DisasContext *ctx) \
8328 TCGv_i64 t0 = tcg_temp_new_i64(); \
8329 gen_load_gpr64(t0, rB(ctx->opcode)); \
8330 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); \
8331 tcg_temp_free_i64(t0); \
8333 #define GEN_SPEFPUOP_CONV_64_32(name) \
8334 static inline void gen_##name(DisasContext *ctx) \
8336 TCGv_i64 t0 = tcg_temp_new_i64(); \
8337 gen_helper_##name(t0, cpu_env, cpu_gpr[rB(ctx->opcode)]); \
8338 gen_store_gpr64(rD(ctx->opcode), t0); \
8339 tcg_temp_free_i64(t0); \
8341 #define GEN_SPEFPUOP_CONV_64_64(name) \
8342 static inline void gen_##name(DisasContext *ctx) \
8344 TCGv_i64 t0 = tcg_temp_new_i64(); \
8345 gen_load_gpr64(t0, rB(ctx->opcode)); \
8346 gen_helper_##name(t0, cpu_env, t0); \
8347 gen_store_gpr64(rD(ctx->opcode), t0); \
8348 tcg_temp_free_i64(t0); \
8350 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
8351 static inline void gen_##name(DisasContext *ctx) \
8353 if (unlikely(!ctx->spe_enabled)) { \
8354 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8357 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8358 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8360 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
8361 static inline void gen_##name(DisasContext *ctx) \
8364 if (unlikely(!ctx->spe_enabled)) { \
8365 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8368 t0 = tcg_temp_new_i64(); \
8369 t1 = tcg_temp_new_i64(); \
8370 gen_load_gpr64(t0, rA(ctx->opcode)); \
8371 gen_load_gpr64(t1, rB(ctx->opcode)); \
8372 gen_helper_##name(t0, cpu_env, t0, t1); \
8373 gen_store_gpr64(rD(ctx->opcode), t0); \
8374 tcg_temp_free_i64(t0); \
8375 tcg_temp_free_i64(t1); \
8377 #define GEN_SPEFPUOP_COMP_32(name) \
8378 static inline void gen_##name(DisasContext *ctx) \
8380 if (unlikely(!ctx->spe_enabled)) { \
8381 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8384 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, \
8385 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8387 #define GEN_SPEFPUOP_COMP_64(name) \
8388 static inline void gen_##name(DisasContext *ctx) \
8391 if (unlikely(!ctx->spe_enabled)) { \
8392 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8395 t0 = tcg_temp_new_i64(); \
8396 t1 = tcg_temp_new_i64(); \
8397 gen_load_gpr64(t0, rA(ctx->opcode)); \
8398 gen_load_gpr64(t1, rB(ctx->opcode)); \
8399 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
8400 tcg_temp_free_i64(t0); \
8401 tcg_temp_free_i64(t1); \
8405 /* Single precision floating-point vectors operations */
8407 GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
8408 GEN_SPEFPUOP_ARITH2_64_64(evfssub);
8409 GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
8410 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
8411 static inline void gen_evfsabs(DisasContext *ctx)
8413 if (unlikely(!ctx->spe_enabled)) {
8414 gen_exception(ctx, POWERPC_EXCP_SPEU);
8417 #if defined(TARGET_PPC64)
8418 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
8420 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
8421 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8424 static inline void gen_evfsnabs(DisasContext *ctx)
8426 if (unlikely(!ctx->spe_enabled)) {
8427 gen_exception(ctx, POWERPC_EXCP_SPEU);
8430 #if defined(TARGET_PPC64)
8431 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8433 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8434 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8437 static inline void gen_evfsneg(DisasContext *ctx)
8439 if (unlikely(!ctx->spe_enabled)) {
8440 gen_exception(ctx, POWERPC_EXCP_SPEU);
8443 #if defined(TARGET_PPC64)
8444 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8446 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8447 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8452 GEN_SPEFPUOP_CONV_64_64(evfscfui);
8453 GEN_SPEFPUOP_CONV_64_64(evfscfsi);
8454 GEN_SPEFPUOP_CONV_64_64(evfscfuf);
8455 GEN_SPEFPUOP_CONV_64_64(evfscfsf);
8456 GEN_SPEFPUOP_CONV_64_64(evfsctui);
8457 GEN_SPEFPUOP_CONV_64_64(evfsctsi);
8458 GEN_SPEFPUOP_CONV_64_64(evfsctuf);
8459 GEN_SPEFPUOP_CONV_64_64(evfsctsf);
8460 GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
8461 GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
8464 GEN_SPEFPUOP_COMP_64(evfscmpgt);
8465 GEN_SPEFPUOP_COMP_64(evfscmplt);
8466 GEN_SPEFPUOP_COMP_64(evfscmpeq);
8467 GEN_SPEFPUOP_COMP_64(evfststgt);
8468 GEN_SPEFPUOP_COMP_64(evfststlt);
8469 GEN_SPEFPUOP_COMP_64(evfststeq);
8471 /* Opcodes definitions */
8472 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8473 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE); //
8474 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8475 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8476 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8477 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8478 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8479 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8480 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8481 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8482 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8483 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8484 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8485 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8487 /* Single precision floating-point operations */
8489 GEN_SPEFPUOP_ARITH2_32_32(efsadd);
8490 GEN_SPEFPUOP_ARITH2_32_32(efssub);
8491 GEN_SPEFPUOP_ARITH2_32_32(efsmul);
8492 GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
8493 static inline void gen_efsabs(DisasContext *ctx)
8495 if (unlikely(!ctx->spe_enabled)) {
8496 gen_exception(ctx, POWERPC_EXCP_SPEU);
8499 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
8501 static inline void gen_efsnabs(DisasContext *ctx)
8503 if (unlikely(!ctx->spe_enabled)) {
8504 gen_exception(ctx, POWERPC_EXCP_SPEU);
8507 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8509 static inline void gen_efsneg(DisasContext *ctx)
8511 if (unlikely(!ctx->spe_enabled)) {
8512 gen_exception(ctx, POWERPC_EXCP_SPEU);
8515 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8519 GEN_SPEFPUOP_CONV_32_32(efscfui);
8520 GEN_SPEFPUOP_CONV_32_32(efscfsi);
8521 GEN_SPEFPUOP_CONV_32_32(efscfuf);
8522 GEN_SPEFPUOP_CONV_32_32(efscfsf);
8523 GEN_SPEFPUOP_CONV_32_32(efsctui);
8524 GEN_SPEFPUOP_CONV_32_32(efsctsi);
8525 GEN_SPEFPUOP_CONV_32_32(efsctuf);
8526 GEN_SPEFPUOP_CONV_32_32(efsctsf);
8527 GEN_SPEFPUOP_CONV_32_32(efsctuiz);
8528 GEN_SPEFPUOP_CONV_32_32(efsctsiz);
8529 GEN_SPEFPUOP_CONV_32_64(efscfd);
8532 GEN_SPEFPUOP_COMP_32(efscmpgt);
8533 GEN_SPEFPUOP_COMP_32(efscmplt);
8534 GEN_SPEFPUOP_COMP_32(efscmpeq);
8535 GEN_SPEFPUOP_COMP_32(efststgt);
8536 GEN_SPEFPUOP_COMP_32(efststlt);
8537 GEN_SPEFPUOP_COMP_32(efststeq);
8539 /* Opcodes definitions */
8540 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8541 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE); //
8542 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8543 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8544 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8545 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE); //
8546 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8547 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8548 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8549 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8550 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8551 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8552 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8553 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8555 /* Double precision floating-point operations */
8557 GEN_SPEFPUOP_ARITH2_64_64(efdadd);
8558 GEN_SPEFPUOP_ARITH2_64_64(efdsub);
8559 GEN_SPEFPUOP_ARITH2_64_64(efdmul);
8560 GEN_SPEFPUOP_ARITH2_64_64(efddiv);
8561 static inline void gen_efdabs(DisasContext *ctx)
8563 if (unlikely(!ctx->spe_enabled)) {
8564 gen_exception(ctx, POWERPC_EXCP_SPEU);
8567 #if defined(TARGET_PPC64)
8568 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
8570 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8571 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8574 static inline void gen_efdnabs(DisasContext *ctx)
8576 if (unlikely(!ctx->spe_enabled)) {
8577 gen_exception(ctx, POWERPC_EXCP_SPEU);
8580 #if defined(TARGET_PPC64)
8581 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8583 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8584 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8587 static inline void gen_efdneg(DisasContext *ctx)
8589 if (unlikely(!ctx->spe_enabled)) {
8590 gen_exception(ctx, POWERPC_EXCP_SPEU);
8593 #if defined(TARGET_PPC64)
8594 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8596 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8597 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8602 GEN_SPEFPUOP_CONV_64_32(efdcfui);
8603 GEN_SPEFPUOP_CONV_64_32(efdcfsi);
8604 GEN_SPEFPUOP_CONV_64_32(efdcfuf);
8605 GEN_SPEFPUOP_CONV_64_32(efdcfsf);
8606 GEN_SPEFPUOP_CONV_32_64(efdctui);
8607 GEN_SPEFPUOP_CONV_32_64(efdctsi);
8608 GEN_SPEFPUOP_CONV_32_64(efdctuf);
8609 GEN_SPEFPUOP_CONV_32_64(efdctsf);
8610 GEN_SPEFPUOP_CONV_32_64(efdctuiz);
8611 GEN_SPEFPUOP_CONV_32_64(efdctsiz);
8612 GEN_SPEFPUOP_CONV_64_32(efdcfs);
8613 GEN_SPEFPUOP_CONV_64_64(efdcfuid);
8614 GEN_SPEFPUOP_CONV_64_64(efdcfsid);
8615 GEN_SPEFPUOP_CONV_64_64(efdctuidz);
8616 GEN_SPEFPUOP_CONV_64_64(efdctsidz);
8619 GEN_SPEFPUOP_COMP_64(efdcmpgt);
8620 GEN_SPEFPUOP_COMP_64(efdcmplt);
8621 GEN_SPEFPUOP_COMP_64(efdcmpeq);
8622 GEN_SPEFPUOP_COMP_64(efdtstgt);
8623 GEN_SPEFPUOP_COMP_64(efdtstlt);
8624 GEN_SPEFPUOP_COMP_64(efdtsteq);
8626 /* Opcodes definitions */
8627 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE); //
8628 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8629 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE); //
8630 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8631 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE); //
8632 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8633 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE); //
8634 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE); //
8635 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8636 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8637 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8638 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8639 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8640 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8641 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE); //
8642 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8644 static opcode_t opcodes[] = {
8645 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
8646 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
8647 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8648 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER),
8649 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8650 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
8651 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
8652 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8653 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8654 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8655 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8656 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
8657 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
8658 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
8659 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
8660 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8661 #if defined(TARGET_PPC64)
8662 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
8664 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
8665 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
8666 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8667 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8668 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8669 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
8670 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
8671 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
8672 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8673 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8674 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8675 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8676 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB),
8677 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
8678 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
8679 #if defined(TARGET_PPC64)
8680 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
8681 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
8682 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
8684 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8685 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8686 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8687 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
8688 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
8689 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
8690 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
8691 #if defined(TARGET_PPC64)
8692 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
8693 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
8694 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
8695 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
8696 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
8698 GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES),
8699 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8700 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8701 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT),
8702 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT),
8703 GEN_HANDLER(fabs, 0x3F, 0x08, 0x08, 0x001F0000, PPC_FLOAT),
8704 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT),
8705 GEN_HANDLER(fnabs, 0x3F, 0x08, 0x04, 0x001F0000, PPC_FLOAT),
8706 GEN_HANDLER(fneg, 0x3F, 0x08, 0x01, 0x001F0000, PPC_FLOAT),
8707 GEN_HANDLER_E(fcpsgn, 0x3F, 0x08, 0x00, 0x00000000, PPC_NONE, PPC2_ISA205),
8708 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT),
8709 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT),
8710 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT),
8711 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT),
8712 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x00000000, PPC_FLOAT),
8713 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006e0800, PPC_FLOAT),
8714 #if defined(TARGET_PPC64)
8715 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
8716 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
8717 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
8719 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8720 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8721 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
8722 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
8723 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
8724 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
8725 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
8726 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
8727 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
8728 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
8729 #if defined(TARGET_PPC64)
8730 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
8731 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
8733 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
8734 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
8735 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8736 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8737 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
8738 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
8739 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
8740 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
8741 #if defined(TARGET_PPC64)
8742 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
8743 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
8745 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
8746 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
8747 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8748 #if defined(TARGET_PPC64)
8749 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
8750 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8752 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8753 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8754 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8755 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8756 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8757 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8758 #if defined(TARGET_PPC64)
8759 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8761 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC),
8762 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC),
8763 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8764 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8765 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8766 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE),
8767 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE),
8768 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
8769 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8770 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
8771 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8772 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8773 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8774 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8775 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8776 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8777 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8778 #if defined(TARGET_PPC64)
8779 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8780 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8782 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8783 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8785 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8786 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8787 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8789 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8790 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x03FF0001, PPC_MEM_TLBIE),
8791 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE),
8792 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8793 #if defined(TARGET_PPC64)
8794 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI),
8795 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8797 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8798 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8799 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8800 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8801 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8802 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8803 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8804 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8805 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8806 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8807 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8808 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8809 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8810 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8811 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8812 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8813 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8814 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8815 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8816 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8817 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8818 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8819 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8820 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8821 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8822 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8823 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8824 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8825 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8826 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8827 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8828 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8829 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8830 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8831 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8832 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8833 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8834 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8835 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8836 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8837 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8838 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8839 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8840 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8841 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8842 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8843 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8844 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8845 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8846 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8847 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8848 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8849 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8850 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8851 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8852 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8853 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8854 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8855 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8856 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8857 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8858 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8859 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8860 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8861 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8862 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8863 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8864 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8865 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8866 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8867 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8868 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8869 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8870 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8871 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8872 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8873 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8874 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8875 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8876 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8877 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8878 PPC_NONE, PPC2_BOOKE206),
8879 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8880 PPC_NONE, PPC2_BOOKE206),
8881 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8882 PPC_NONE, PPC2_BOOKE206),
8883 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8884 PPC_NONE, PPC2_BOOKE206),
8885 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
8886 PPC_NONE, PPC2_BOOKE206),
8887 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
8888 PPC_NONE, PPC2_PRCNTL),
8889 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
8890 PPC_NONE, PPC2_PRCNTL),
8891 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8892 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8893 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8894 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8895 PPC_BOOKE, PPC2_BOOKE206),
8896 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE),
8897 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8898 PPC_BOOKE, PPC2_BOOKE206),
8899 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8900 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8901 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8902 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8903 GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC),
8904 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8905 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE),
8906 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE),
8907 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE),
8908 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE),
8910 #undef GEN_INT_ARITH_ADD
8911 #undef GEN_INT_ARITH_ADD_CONST
8912 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8913 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8914 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8915 add_ca, compute_ca, compute_ov) \
8916 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8917 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8918 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8919 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8920 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8921 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8922 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8923 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8924 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8925 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8926 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8928 #undef GEN_INT_ARITH_DIVW
8929 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8930 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8931 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8932 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8933 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8934 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8936 #if defined(TARGET_PPC64)
8937 #undef GEN_INT_ARITH_DIVD
8938 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8939 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8940 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8941 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8942 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8943 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8945 #undef GEN_INT_ARITH_MUL_HELPER
8946 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8947 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8948 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8949 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8950 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8953 #undef GEN_INT_ARITH_SUBF
8954 #undef GEN_INT_ARITH_SUBF_CONST
8955 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8956 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8957 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8958 add_ca, compute_ca, compute_ov) \
8959 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8960 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8961 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8962 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8963 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8964 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8965 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8966 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8967 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8968 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8969 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8973 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
8974 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8975 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
8976 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8977 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8978 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8979 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8980 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8981 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8982 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8983 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8984 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8985 #if defined(TARGET_PPC64)
8986 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8989 #if defined(TARGET_PPC64)
8992 #define GEN_PPC64_R2(name, opc1, opc2) \
8993 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8994 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8996 #define GEN_PPC64_R4(name, opc1, opc2) \
8997 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8998 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
9000 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
9002 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
9004 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
9005 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
9006 GEN_PPC64_R4(rldic, 0x1E, 0x04),
9007 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
9008 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
9009 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
9012 #undef _GEN_FLOAT_ACB
9013 #undef GEN_FLOAT_ACB
9014 #undef _GEN_FLOAT_AB
9016 #undef _GEN_FLOAT_AC
9020 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
9021 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
9022 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
9023 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
9024 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
9025 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
9026 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
9027 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
9028 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
9029 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
9030 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
9031 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
9032 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
9033 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
9034 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
9035 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
9036 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
9037 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
9038 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
9040 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT),
9041 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT),
9042 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT),
9043 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT),
9044 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES),
9045 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE),
9046 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL),
9047 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT),
9048 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT),
9049 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT),
9050 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT),
9051 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT),
9052 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT),
9053 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT),
9054 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT),
9055 #if defined(TARGET_PPC64)
9056 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B),
9057 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B),
9058 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B),
9060 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT),
9061 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT),
9062 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT),
9063 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT),
9070 #define GEN_LD(name, ldop, opc, type) \
9071 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9072 #define GEN_LDU(name, ldop, opc, type) \
9073 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9074 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
9075 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
9076 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2) \
9077 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
9078 #define GEN_LDS(name, ldop, op, type) \
9079 GEN_LD(name, ldop, op | 0x20, type) \
9080 GEN_LDU(name, ldop, op | 0x21, type) \
9081 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
9082 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
9084 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
9085 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
9086 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
9087 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
9088 #if defined(TARGET_PPC64)
9089 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
9090 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
9091 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B)
9092 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B)
9093 GEN_LDX_E(ldbr, ld64ur, 0x14, 0x10, PPC_NONE, PPC2_DBRX)
9095 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
9096 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
9103 #define GEN_ST(name, stop, opc, type) \
9104 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9105 #define GEN_STU(name, stop, opc, type) \
9106 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
9107 #define GEN_STUX(name, stop, opc2, opc3, type) \
9108 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
9109 #define GEN_STX_E(name, stop, opc2, opc3, type, type2) \
9110 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
9111 #define GEN_STS(name, stop, op, type) \
9112 GEN_ST(name, stop, op | 0x20, type) \
9113 GEN_STU(name, stop, op | 0x21, type) \
9114 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
9115 GEN_STX(name, stop, 0x17, op | 0x00, type)
9117 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
9118 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
9119 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
9120 #if defined(TARGET_PPC64)
9121 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B)
9122 GEN_STX(std, st64, 0x15, 0x04, PPC_64B)
9123 GEN_STX_E(stdbr, st64r, 0x14, 0x14, PPC_NONE, PPC2_DBRX)
9125 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
9126 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
9133 #define GEN_LDF(name, ldop, opc, type) \
9134 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9135 #define GEN_LDUF(name, ldop, opc, type) \
9136 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9137 #define GEN_LDUXF(name, ldop, opc, type) \
9138 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
9139 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
9140 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
9141 #define GEN_LDFS(name, ldop, op, type) \
9142 GEN_LDF(name, ldop, op | 0x20, type) \
9143 GEN_LDUF(name, ldop, op | 0x21, type) \
9144 GEN_LDUXF(name, ldop, op | 0x01, type) \
9145 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
9147 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT)
9148 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT)
9149 GEN_HANDLER_E(lfiwax, 0x1f, 0x17, 0x1a, 0x00000001, PPC_NONE, PPC2_ISA205),
9150 GEN_HANDLER_E(lfdp, 0x39, 0xFF, 0xFF, 0x00200003, PPC_NONE, PPC2_ISA205),
9151 GEN_HANDLER_E(lfdpx, 0x1F, 0x17, 0x18, 0x00200001, PPC_NONE, PPC2_ISA205),
9158 #define GEN_STF(name, stop, opc, type) \
9159 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9160 #define GEN_STUF(name, stop, opc, type) \
9161 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9162 #define GEN_STUXF(name, stop, opc, type) \
9163 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
9164 #define GEN_STXF(name, stop, opc2, opc3, type) \
9165 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
9166 #define GEN_STFS(name, stop, op, type) \
9167 GEN_STF(name, stop, op | 0x20, type) \
9168 GEN_STUF(name, stop, op | 0x21, type) \
9169 GEN_STUXF(name, stop, op | 0x01, type) \
9170 GEN_STXF(name, stop, 0x17, op | 0x00, type)
9172 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT)
9173 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT)
9174 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX)
9175 GEN_HANDLER_E(stfdp, 0x3D, 0xFF, 0xFF, 0x00200003, PPC_NONE, PPC2_ISA205),
9176 GEN_HANDLER_E(stfdpx, 0x1F, 0x17, 0x1C, 0x00200001, PPC_NONE, PPC2_ISA205),
9179 #define GEN_CRLOGIC(name, tcg_op, opc) \
9180 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
9181 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
9182 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
9183 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
9184 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
9185 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
9186 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
9187 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
9188 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
9190 #undef GEN_MAC_HANDLER
9191 #define GEN_MAC_HANDLER(name, opc2, opc3) \
9192 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
9193 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
9194 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
9195 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
9196 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
9197 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
9198 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
9199 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
9200 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
9201 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
9202 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
9203 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
9204 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
9205 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
9206 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
9207 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
9208 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
9209 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
9210 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
9211 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
9212 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
9213 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
9214 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
9215 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
9216 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
9217 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
9218 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
9219 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
9220 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
9221 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
9222 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
9223 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
9224 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
9225 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
9226 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
9227 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
9228 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
9229 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
9230 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
9231 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
9232 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
9233 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
9234 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
9240 #define GEN_VR_LDX(name, opc2, opc3) \
9241 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9242 #define GEN_VR_STX(name, opc2, opc3) \
9243 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9244 #define GEN_VR_LVE(name, opc2, opc3) \
9245 GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9246 #define GEN_VR_STVE(name, opc2, opc3) \
9247 GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9248 GEN_VR_LDX(lvx, 0x07, 0x03),
9249 GEN_VR_LDX(lvxl, 0x07, 0x0B),
9250 GEN_VR_LVE(bx, 0x07, 0x00),
9251 GEN_VR_LVE(hx, 0x07, 0x01),
9252 GEN_VR_LVE(wx, 0x07, 0x02),
9253 GEN_VR_STX(svx, 0x07, 0x07),
9254 GEN_VR_STX(svxl, 0x07, 0x0F),
9255 GEN_VR_STVE(bx, 0x07, 0x04),
9256 GEN_VR_STVE(hx, 0x07, 0x05),
9257 GEN_VR_STVE(wx, 0x07, 0x06),
9259 #undef GEN_VX_LOGICAL
9260 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
9261 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9262 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
9263 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
9264 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
9265 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
9266 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
9269 #define GEN_VXFORM(name, opc2, opc3) \
9270 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9271 GEN_VXFORM(vaddubm, 0, 0),
9272 GEN_VXFORM(vadduhm, 0, 1),
9273 GEN_VXFORM(vadduwm, 0, 2),
9274 GEN_VXFORM(vsububm, 0, 16),
9275 GEN_VXFORM(vsubuhm, 0, 17),
9276 GEN_VXFORM(vsubuwm, 0, 18),
9277 GEN_VXFORM(vmaxub, 1, 0),
9278 GEN_VXFORM(vmaxuh, 1, 1),
9279 GEN_VXFORM(vmaxuw, 1, 2),
9280 GEN_VXFORM(vmaxsb, 1, 4),
9281 GEN_VXFORM(vmaxsh, 1, 5),
9282 GEN_VXFORM(vmaxsw, 1, 6),
9283 GEN_VXFORM(vminub, 1, 8),
9284 GEN_VXFORM(vminuh, 1, 9),
9285 GEN_VXFORM(vminuw, 1, 10),
9286 GEN_VXFORM(vminsb, 1, 12),
9287 GEN_VXFORM(vminsh, 1, 13),
9288 GEN_VXFORM(vminsw, 1, 14),
9289 GEN_VXFORM(vavgub, 1, 16),
9290 GEN_VXFORM(vavguh, 1, 17),
9291 GEN_VXFORM(vavguw, 1, 18),
9292 GEN_VXFORM(vavgsb, 1, 20),
9293 GEN_VXFORM(vavgsh, 1, 21),
9294 GEN_VXFORM(vavgsw, 1, 22),
9295 GEN_VXFORM(vmrghb, 6, 0),
9296 GEN_VXFORM(vmrghh, 6, 1),
9297 GEN_VXFORM(vmrghw, 6, 2),
9298 GEN_VXFORM(vmrglb, 6, 4),
9299 GEN_VXFORM(vmrglh, 6, 5),
9300 GEN_VXFORM(vmrglw, 6, 6),
9301 GEN_VXFORM(vmuloub, 4, 0),
9302 GEN_VXFORM(vmulouh, 4, 1),
9303 GEN_VXFORM(vmulosb, 4, 4),
9304 GEN_VXFORM(vmulosh, 4, 5),
9305 GEN_VXFORM(vmuleub, 4, 8),
9306 GEN_VXFORM(vmuleuh, 4, 9),
9307 GEN_VXFORM(vmulesb, 4, 12),
9308 GEN_VXFORM(vmulesh, 4, 13),
9309 GEN_VXFORM(vslb, 2, 4),
9310 GEN_VXFORM(vslh, 2, 5),
9311 GEN_VXFORM(vslw, 2, 6),
9312 GEN_VXFORM(vsrb, 2, 8),
9313 GEN_VXFORM(vsrh, 2, 9),
9314 GEN_VXFORM(vsrw, 2, 10),
9315 GEN_VXFORM(vsrab, 2, 12),
9316 GEN_VXFORM(vsrah, 2, 13),
9317 GEN_VXFORM(vsraw, 2, 14),
9318 GEN_VXFORM(vslo, 6, 16),
9319 GEN_VXFORM(vsro, 6, 17),
9320 GEN_VXFORM(vaddcuw, 0, 6),
9321 GEN_VXFORM(vsubcuw, 0, 22),
9322 GEN_VXFORM(vaddubs, 0, 8),
9323 GEN_VXFORM(vadduhs, 0, 9),
9324 GEN_VXFORM(vadduws, 0, 10),
9325 GEN_VXFORM(vaddsbs, 0, 12),
9326 GEN_VXFORM(vaddshs, 0, 13),
9327 GEN_VXFORM(vaddsws, 0, 14),
9328 GEN_VXFORM(vsububs, 0, 24),
9329 GEN_VXFORM(vsubuhs, 0, 25),
9330 GEN_VXFORM(vsubuws, 0, 26),
9331 GEN_VXFORM(vsubsbs, 0, 28),
9332 GEN_VXFORM(vsubshs, 0, 29),
9333 GEN_VXFORM(vsubsws, 0, 30),
9334 GEN_VXFORM(vrlb, 2, 0),
9335 GEN_VXFORM(vrlh, 2, 1),
9336 GEN_VXFORM(vrlw, 2, 2),
9337 GEN_VXFORM(vsl, 2, 7),
9338 GEN_VXFORM(vsr, 2, 11),
9339 GEN_VXFORM(vpkuhum, 7, 0),
9340 GEN_VXFORM(vpkuwum, 7, 1),
9341 GEN_VXFORM(vpkuhus, 7, 2),
9342 GEN_VXFORM(vpkuwus, 7, 3),
9343 GEN_VXFORM(vpkshus, 7, 4),
9344 GEN_VXFORM(vpkswus, 7, 5),
9345 GEN_VXFORM(vpkshss, 7, 6),
9346 GEN_VXFORM(vpkswss, 7, 7),
9347 GEN_VXFORM(vpkpx, 7, 12),
9348 GEN_VXFORM(vsum4ubs, 4, 24),
9349 GEN_VXFORM(vsum4sbs, 4, 28),
9350 GEN_VXFORM(vsum4shs, 4, 25),
9351 GEN_VXFORM(vsum2sws, 4, 26),
9352 GEN_VXFORM(vsumsws, 4, 30),
9353 GEN_VXFORM(vaddfp, 5, 0),
9354 GEN_VXFORM(vsubfp, 5, 1),
9355 GEN_VXFORM(vmaxfp, 5, 16),
9356 GEN_VXFORM(vminfp, 5, 17),
9360 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
9361 GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
9362 #define GEN_VXRFORM(name, opc2, opc3) \
9363 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
9364 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
9365 GEN_VXRFORM(vcmpequb, 3, 0)
9366 GEN_VXRFORM(vcmpequh, 3, 1)
9367 GEN_VXRFORM(vcmpequw, 3, 2)
9368 GEN_VXRFORM(vcmpgtsb, 3, 12)
9369 GEN_VXRFORM(vcmpgtsh, 3, 13)
9370 GEN_VXRFORM(vcmpgtsw, 3, 14)
9371 GEN_VXRFORM(vcmpgtub, 3, 8)
9372 GEN_VXRFORM(vcmpgtuh, 3, 9)
9373 GEN_VXRFORM(vcmpgtuw, 3, 10)
9374 GEN_VXRFORM(vcmpeqfp, 3, 3)
9375 GEN_VXRFORM(vcmpgefp, 3, 7)
9376 GEN_VXRFORM(vcmpgtfp, 3, 11)
9377 GEN_VXRFORM(vcmpbfp, 3, 15)
9379 #undef GEN_VXFORM_SIMM
9380 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
9381 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9382 GEN_VXFORM_SIMM(vspltisb, 6, 12),
9383 GEN_VXFORM_SIMM(vspltish, 6, 13),
9384 GEN_VXFORM_SIMM(vspltisw, 6, 14),
9386 #undef GEN_VXFORM_NOA
9387 #define GEN_VXFORM_NOA(name, opc2, opc3) \
9388 GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
9389 GEN_VXFORM_NOA(vupkhsb, 7, 8),
9390 GEN_VXFORM_NOA(vupkhsh, 7, 9),
9391 GEN_VXFORM_NOA(vupklsb, 7, 10),
9392 GEN_VXFORM_NOA(vupklsh, 7, 11),
9393 GEN_VXFORM_NOA(vupkhpx, 7, 13),
9394 GEN_VXFORM_NOA(vupklpx, 7, 15),
9395 GEN_VXFORM_NOA(vrefp, 5, 4),
9396 GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
9397 GEN_VXFORM_NOA(vexptefp, 5, 6),
9398 GEN_VXFORM_NOA(vlogefp, 5, 7),
9399 GEN_VXFORM_NOA(vrfim, 5, 8),
9400 GEN_VXFORM_NOA(vrfin, 5, 9),
9401 GEN_VXFORM_NOA(vrfip, 5, 10),
9402 GEN_VXFORM_NOA(vrfiz, 5, 11),
9404 #undef GEN_VXFORM_UIMM
9405 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
9406 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9407 GEN_VXFORM_UIMM(vspltb, 6, 8),
9408 GEN_VXFORM_UIMM(vsplth, 6, 9),
9409 GEN_VXFORM_UIMM(vspltw, 6, 10),
9410 GEN_VXFORM_UIMM(vcfux, 5, 12),
9411 GEN_VXFORM_UIMM(vcfsx, 5, 13),
9412 GEN_VXFORM_UIMM(vctuxs, 5, 14),
9413 GEN_VXFORM_UIMM(vctsxs, 5, 15),
9415 #undef GEN_VAFORM_PAIRED
9416 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
9417 GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
9418 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
9419 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
9420 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
9421 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
9422 GEN_VAFORM_PAIRED(vsel, vperm, 21),
9423 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
9426 #define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
9427 GEN_OPCODE_DUAL(name0##_##name1, 0x04, opc2, opc3, inval0, inval1, type, PPC_NONE)
9428 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9429 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9430 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9431 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9432 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9433 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9434 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9435 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE),
9436 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE),
9437 GEN_SPE(speundef, evand, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE),
9438 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9439 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9440 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9441 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE),
9442 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE),
9443 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE),
9444 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE),
9445 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9446 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9447 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9448 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9449 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9450 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE),
9451 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE),
9452 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9453 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9454 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE),
9455 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE),
9456 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE),
9458 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9459 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE),
9460 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE),
9461 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9462 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9463 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9464 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9465 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9466 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9467 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9468 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9469 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9470 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9471 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9473 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9474 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE),
9475 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE),
9476 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9477 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9478 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE),
9479 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9480 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9481 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9482 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9483 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9484 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9485 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9486 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9488 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE),
9489 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9490 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE),
9491 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9492 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE),
9493 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9494 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE),
9495 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE),
9496 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9497 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9498 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9499 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9500 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9501 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9502 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE),
9503 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9505 #undef GEN_SPEOP_LDST
9506 #define GEN_SPEOP_LDST(name, opc2, sh) \
9507 GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
9508 GEN_SPEOP_LDST(evldd, 0x00, 3),
9509 GEN_SPEOP_LDST(evldw, 0x01, 3),
9510 GEN_SPEOP_LDST(evldh, 0x02, 3),
9511 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1),
9512 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1),
9513 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1),
9514 GEN_SPEOP_LDST(evlwhe, 0x08, 2),
9515 GEN_SPEOP_LDST(evlwhou, 0x0A, 2),
9516 GEN_SPEOP_LDST(evlwhos, 0x0B, 2),
9517 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2),
9518 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2),
9520 GEN_SPEOP_LDST(evstdd, 0x10, 3),
9521 GEN_SPEOP_LDST(evstdw, 0x11, 3),
9522 GEN_SPEOP_LDST(evstdh, 0x12, 3),
9523 GEN_SPEOP_LDST(evstwhe, 0x18, 2),
9524 GEN_SPEOP_LDST(evstwho, 0x1A, 2),
9525 GEN_SPEOP_LDST(evstwwe, 0x1C, 2),
9526 GEN_SPEOP_LDST(evstwwo, 0x1E, 2),
9529 #include "helper_regs.h"
9530 #include "translate_init.c"
9532 /*****************************************************************************/
9533 /* Misc PowerPC helpers */
9534 void cpu_dump_state (CPUPPCState *env, FILE *f, fprintf_function cpu_fprintf,
9542 cpu_synchronize_state(env);
9544 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
9545 TARGET_FMT_lx " XER " TARGET_FMT_lx "\n",
9546 env->nip, env->lr, env->ctr, cpu_read_xer(env));
9547 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
9548 TARGET_FMT_lx " idx %d\n", env->msr, env->spr[SPR_HID0],
9549 env->hflags, env->mmu_idx);
9550 #if !defined(NO_TIMER_DUMP)
9551 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
9552 #if !defined(CONFIG_USER_ONLY)
9556 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
9557 #if !defined(CONFIG_USER_ONLY)
9558 , cpu_ppc_load_decr(env)
9562 for (i = 0; i < 32; i++) {
9563 if ((i & (RGPL - 1)) == 0)
9564 cpu_fprintf(f, "GPR%02d", i);
9565 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
9566 if ((i & (RGPL - 1)) == (RGPL - 1))
9567 cpu_fprintf(f, "\n");
9569 cpu_fprintf(f, "CR ");
9570 for (i = 0; i < 8; i++)
9571 cpu_fprintf(f, "%01x", env->crf[i]);
9572 cpu_fprintf(f, " [");
9573 for (i = 0; i < 8; i++) {
9575 if (env->crf[i] & 0x08)
9577 else if (env->crf[i] & 0x04)
9579 else if (env->crf[i] & 0x02)
9581 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
9583 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
9585 for (i = 0; i < 32; i++) {
9586 if ((i & (RFPL - 1)) == 0)
9587 cpu_fprintf(f, "FPR%02d", i);
9588 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
9589 if ((i & (RFPL - 1)) == (RFPL - 1))
9590 cpu_fprintf(f, "\n");
9592 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr);
9593 #if !defined(CONFIG_USER_ONLY)
9594 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx
9595 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n",
9596 env->spr[SPR_SRR0], env->spr[SPR_SRR1],
9597 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]);
9599 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx
9600 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n",
9601 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1],
9602 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]);
9604 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx
9605 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n",
9606 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5],
9607 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]);
9609 if (env->excp_model == POWERPC_EXCP_BOOKE) {
9610 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx
9611 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n",
9612 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1],
9613 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]);
9615 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx
9616 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n",
9617 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR],
9618 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]);
9620 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx
9621 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n",
9622 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR],
9623 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]);
9625 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx
9626 " EPR " TARGET_FMT_lx "\n",
9627 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8],
9628 env->spr[SPR_BOOKE_EPR]);
9631 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx
9632 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n",
9633 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1],
9634 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]);
9637 * IVORs are left out as they are large and do not change often --
9638 * they can be read with "p $ivor0", "p $ivor1", etc.
9642 #if defined(TARGET_PPC64)
9643 if (env->flags & POWERPC_FLAG_CFAR) {
9644 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar);
9648 switch (env->mmu_model) {
9649 case POWERPC_MMU_32B:
9650 case POWERPC_MMU_601:
9651 case POWERPC_MMU_SOFT_6xx:
9652 case POWERPC_MMU_SOFT_74xx:
9653 #if defined(TARGET_PPC64)
9654 case POWERPC_MMU_64B:
9656 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx "\n", env->spr[SPR_SDR1]);
9658 case POWERPC_MMU_BOOKE206:
9659 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx
9660 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n",
9661 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1],
9662 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]);
9664 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx
9665 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n",
9666 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6],
9667 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]);
9669 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx
9670 " TLB1CFG " TARGET_FMT_lx "\n",
9671 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG],
9672 env->spr[SPR_BOOKE_TLB1CFG]);
9683 void cpu_dump_statistics (CPUPPCState *env, FILE*f, fprintf_function cpu_fprintf,
9686 #if defined(DO_PPC_STATISTICS)
9687 opc_handler_t **t1, **t2, **t3, *handler;
9691 for (op1 = 0; op1 < 64; op1++) {
9693 if (is_indirect_opcode(handler)) {
9694 t2 = ind_table(handler);
9695 for (op2 = 0; op2 < 32; op2++) {
9697 if (is_indirect_opcode(handler)) {
9698 t3 = ind_table(handler);
9699 for (op3 = 0; op3 < 32; op3++) {
9701 if (handler->count == 0)
9703 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
9704 "%016" PRIx64 " %" PRId64 "\n",
9705 op1, op2, op3, op1, (op3 << 5) | op2,
9707 handler->count, handler->count);
9710 if (handler->count == 0)
9712 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
9713 "%016" PRIx64 " %" PRId64 "\n",
9714 op1, op2, op1, op2, handler->oname,
9715 handler->count, handler->count);
9719 if (handler->count == 0)
9721 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
9723 op1, op1, handler->oname,
9724 handler->count, handler->count);
9730 /*****************************************************************************/
9731 static inline void gen_intermediate_code_internal(CPUPPCState *env,
9732 TranslationBlock *tb,
9735 DisasContext ctx, *ctxp = &ctx;
9736 opc_handler_t **table, *handler;
9737 target_ulong pc_start;
9738 uint16_t *gen_opc_end;
9745 gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
9748 ctx.exception = POWERPC_EXCP_NONE;
9749 ctx.spr_cb = env->spr_cb;
9750 ctx.mem_idx = env->mmu_idx;
9751 ctx.insns_flags = env->insns_flags;
9752 ctx.insns_flags2 = env->insns_flags2;
9753 ctx.access_type = -1;
9754 ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0;
9755 #if defined(TARGET_PPC64)
9756 ctx.sf_mode = msr_is_64bit(env, env->msr);
9757 ctx.has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
9759 ctx.fpu_enabled = msr_fp;
9760 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
9761 ctx.spe_enabled = msr_spe;
9763 ctx.spe_enabled = 0;
9764 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
9765 ctx.altivec_enabled = msr_vr;
9767 ctx.altivec_enabled = 0;
9768 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
9769 ctx.singlestep_enabled = CPU_SINGLE_STEP;
9771 ctx.singlestep_enabled = 0;
9772 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
9773 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
9774 if (unlikely(env->singlestep_enabled))
9775 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9776 #if defined (DO_SINGLE_STEP) && 0
9777 /* Single step trace mode */
9781 max_insns = tb->cflags & CF_COUNT_MASK;
9783 max_insns = CF_COUNT_MASK;
9786 /* Set env in case of segfault during code fetch */
9787 while (ctx.exception == POWERPC_EXCP_NONE
9788 && tcg_ctx.gen_opc_ptr < gen_opc_end) {
9789 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
9790 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
9791 if (bp->pc == ctx.nip) {
9792 gen_debug_exception(ctxp);
9797 if (unlikely(search_pc)) {
9798 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
9802 tcg_ctx.gen_opc_instr_start[lj++] = 0;
9804 tcg_ctx.gen_opc_pc[lj] = ctx.nip;
9805 tcg_ctx.gen_opc_instr_start[lj] = 1;
9806 tcg_ctx.gen_opc_icount[lj] = num_insns;
9808 LOG_DISAS("----------------\n");
9809 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9810 ctx.nip, ctx.mem_idx, (int)msr_ir);
9811 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
9813 if (unlikely(ctx.le_mode)) {
9814 ctx.opcode = bswap32(cpu_ldl_code(env, ctx.nip));
9816 ctx.opcode = cpu_ldl_code(env, ctx.nip);
9818 LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n",
9819 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
9820 opc3(ctx.opcode), ctx.le_mode ? "little" : "big");
9821 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
9822 tcg_gen_debug_insn_start(ctx.nip);
9825 table = env->opcodes;
9827 handler = table[opc1(ctx.opcode)];
9828 if (is_indirect_opcode(handler)) {
9829 table = ind_table(handler);
9830 handler = table[opc2(ctx.opcode)];
9831 if (is_indirect_opcode(handler)) {
9832 table = ind_table(handler);
9833 handler = table[opc3(ctx.opcode)];
9836 /* Is opcode *REALLY* valid ? */
9837 if (unlikely(handler->handler == &gen_invalid)) {
9838 if (qemu_log_enabled()) {
9839 qemu_log("invalid/unsupported opcode: "
9840 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx " %d\n",
9841 opc1(ctx.opcode), opc2(ctx.opcode),
9842 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
9847 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) && Rc(ctx.opcode))) {
9848 inval = handler->inval2;
9850 inval = handler->inval1;
9853 if (unlikely((ctx.opcode & inval) != 0)) {
9854 if (qemu_log_enabled()) {
9855 qemu_log("invalid bits: %08x for opcode: "
9856 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx "\n",
9857 ctx.opcode & inval, opc1(ctx.opcode),
9858 opc2(ctx.opcode), opc3(ctx.opcode),
9859 ctx.opcode, ctx.nip - 4);
9861 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
9865 (*(handler->handler))(&ctx);
9866 #if defined(DO_PPC_STATISTICS)
9869 /* Check trace mode exceptions */
9870 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
9871 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
9872 ctx.exception != POWERPC_SYSCALL &&
9873 ctx.exception != POWERPC_EXCP_TRAP &&
9874 ctx.exception != POWERPC_EXCP_BRANCH)) {
9875 gen_exception(ctxp, POWERPC_EXCP_TRACE);
9876 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
9877 (env->singlestep_enabled) ||
9879 num_insns >= max_insns)) {
9880 /* if we reach a page boundary or are single stepping, stop
9886 if (tb->cflags & CF_LAST_IO)
9888 if (ctx.exception == POWERPC_EXCP_NONE) {
9889 gen_goto_tb(&ctx, 0, ctx.nip);
9890 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
9891 if (unlikely(env->singlestep_enabled)) {
9892 gen_debug_exception(ctxp);
9894 /* Generate the return instruction */
9897 gen_tb_end(tb, num_insns);
9898 *tcg_ctx.gen_opc_ptr = INDEX_op_end;
9899 if (unlikely(search_pc)) {
9900 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
9903 tcg_ctx.gen_opc_instr_start[lj++] = 0;
9905 tb->size = ctx.nip - pc_start;
9906 tb->icount = num_insns;
9908 #if defined(DEBUG_DISAS)
9909 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
9911 flags = env->bfd_mach;
9912 flags |= ctx.le_mode << 16;
9913 qemu_log("IN: %s\n", lookup_symbol(pc_start));
9914 log_target_disas(env, pc_start, ctx.nip - pc_start, flags);
9920 void gen_intermediate_code (CPUPPCState *env, struct TranslationBlock *tb)
9922 gen_intermediate_code_internal(env, tb, 0);
9925 void gen_intermediate_code_pc (CPUPPCState *env, struct TranslationBlock *tb)
9927 gen_intermediate_code_internal(env, tb, 1);
9930 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, int pc_pos)
9932 env->nip = tcg_ctx.gen_opc_pc[pc_pos];