2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
30 #include "qemu-common.h"
36 #define CPU_SINGLE_STEP 0x1
37 #define CPU_BRANCH_STEP 0x2
38 #define GDBSTUB_SINGLE_STEP 0x4
40 /* Include definitions for instructions classes and implementations flags */
41 //#define DO_SINGLE_STEP
42 //#define PPC_DEBUG_DISAS
43 //#define DO_PPC_STATISTICS
44 //#define OPTIMIZE_FPRF_UPDATE
46 /*****************************************************************************/
47 /* Code translation helpers */
49 /* global register indexes */
50 static TCGv_ptr cpu_env;
51 static char cpu_reg_names[10*3 + 22*4 /* GPR */
52 #if !defined(TARGET_PPC64)
53 + 10*4 + 22*5 /* SPE GPRh */
55 + 10*4 + 22*5 /* FPR */
56 + 2*(10*6 + 22*7) /* AVRh, AVRl */
58 static TCGv cpu_gpr[32];
59 #if !defined(TARGET_PPC64)
60 static TCGv cpu_gprh[32];
62 static TCGv_i64 cpu_fpr[32];
63 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
64 static TCGv_i32 cpu_crf[8];
70 static TCGv cpu_reserve;
71 static TCGv_i32 cpu_fpscr;
72 static TCGv_i32 cpu_access_type;
74 /* dyngen register indexes */
77 #include "gen-icount.h"
79 void ppc_translate_init(void)
83 static int done_init = 0;
88 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
89 #if TARGET_LONG_BITS > HOST_LONG_BITS
90 cpu_T[0] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t0), "T0");
91 cpu_T[1] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t1), "T1");
92 cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
94 cpu_T[0] = tcg_global_reg_new(TCG_AREG1, "T0");
95 cpu_T[1] = tcg_global_reg_new(TCG_AREG2, "T1");
97 /* XXX: This is a temporary workaround for i386.
98 * On i386 qemu_st32 runs out of registers.
99 * The proper fix is to remove cpu_T.
101 cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
103 cpu_T[2] = tcg_global_reg_new(TCG_AREG3, "T2");
109 for (i = 0; i < 8; i++) {
110 sprintf(p, "crf%d", i);
111 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
112 offsetof(CPUState, crf[i]), p);
116 for (i = 0; i < 32; i++) {
117 sprintf(p, "r%d", i);
118 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
119 offsetof(CPUState, gpr[i]), p);
120 p += (i < 10) ? 3 : 4;
121 #if !defined(TARGET_PPC64)
122 sprintf(p, "r%dH", i);
123 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
124 offsetof(CPUState, gprh[i]), p);
125 p += (i < 10) ? 4 : 5;
128 sprintf(p, "fp%d", i);
129 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
130 offsetof(CPUState, fpr[i]), p);
131 p += (i < 10) ? 4 : 5;
133 sprintf(p, "avr%dH", i);
134 #ifdef WORDS_BIGENDIAN
135 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
136 offsetof(CPUState, avr[i].u64[0]), p);
138 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
139 offsetof(CPUState, avr[i].u64[1]), p);
141 p += (i < 10) ? 6 : 7;
143 sprintf(p, "avr%dL", i);
144 #ifdef WORDS_BIGENDIAN
145 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
146 offsetof(CPUState, avr[i].u64[1]), p);
148 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
149 offsetof(CPUState, avr[i].u64[0]), p);
151 p += (i < 10) ? 6 : 7;
154 cpu_nip = tcg_global_mem_new(TCG_AREG0,
155 offsetof(CPUState, nip), "nip");
157 cpu_msr = tcg_global_mem_new(TCG_AREG0,
158 offsetof(CPUState, msr), "msr");
160 cpu_ctr = tcg_global_mem_new(TCG_AREG0,
161 offsetof(CPUState, ctr), "ctr");
163 cpu_lr = tcg_global_mem_new(TCG_AREG0,
164 offsetof(CPUState, lr), "lr");
166 cpu_xer = tcg_global_mem_new(TCG_AREG0,
167 offsetof(CPUState, xer), "xer");
169 cpu_reserve = tcg_global_mem_new(TCG_AREG0,
170 offsetof(CPUState, reserve), "reserve");
172 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
173 offsetof(CPUState, fpscr), "fpscr");
175 cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
176 offsetof(CPUState, access_type), "access_type");
178 /* register helpers */
185 #if defined(OPTIMIZE_FPRF_UPDATE)
186 static uint16_t *gen_fprf_buf[OPC_BUF_SIZE];
187 static uint16_t **gen_fprf_ptr;
190 /* internal defines */
191 typedef struct DisasContext {
192 struct TranslationBlock *tb;
196 /* Routine used to access memory */
198 /* Translation flags */
199 #if !defined(CONFIG_USER_ONLY)
202 #if defined(TARGET_PPC64)
208 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
209 int singlestep_enabled;
212 struct opc_handler_t {
215 /* instruction type */
218 void (*handler)(DisasContext *ctx);
219 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
222 #if defined(DO_PPC_STATISTICS)
227 static always_inline void gen_reset_fpstatus (void)
229 #ifdef CONFIG_SOFTFLOAT
230 gen_op_reset_fpstatus();
234 static always_inline void gen_compute_fprf (TCGv_i64 arg, int set_fprf, int set_rc)
236 TCGv_i32 t0 = tcg_temp_new_i32();
239 /* This case might be optimized later */
240 #if defined(OPTIMIZE_FPRF_UPDATE)
241 *gen_fprf_ptr++ = gen_opc_ptr;
243 tcg_gen_movi_i32(t0, 1);
244 gen_helper_compute_fprf(t0, arg, t0);
245 if (unlikely(set_rc)) {
246 tcg_gen_mov_i32(cpu_crf[1], t0);
248 gen_helper_float_check_status();
249 } else if (unlikely(set_rc)) {
250 /* We always need to compute fpcc */
251 tcg_gen_movi_i32(t0, 0);
252 gen_helper_compute_fprf(t0, arg, t0);
253 tcg_gen_mov_i32(cpu_crf[1], t0);
255 gen_helper_float_check_status();
258 tcg_temp_free_i32(t0);
261 static always_inline void gen_optimize_fprf (void)
263 #if defined(OPTIMIZE_FPRF_UPDATE)
266 for (ptr = gen_fprf_buf; ptr != (gen_fprf_ptr - 1); ptr++)
267 *ptr = INDEX_op_nop1;
268 gen_fprf_ptr = gen_fprf_buf;
272 static always_inline void gen_set_access_type(int access_type)
274 tcg_gen_movi_i32(cpu_access_type, access_type);
277 static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip)
279 #if defined(TARGET_PPC64)
281 tcg_gen_movi_tl(cpu_nip, nip);
284 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
287 #define GEN_EXCP(ctx, excp, error) \
289 TCGv_i32 t0 = tcg_const_i32(excp); \
290 TCGv_i32 t1 = tcg_const_i32(error); \
291 if ((ctx)->exception == POWERPC_EXCP_NONE) { \
292 gen_update_nip(ctx, (ctx)->nip); \
294 gen_helper_raise_exception_err(t0, t1); \
295 tcg_temp_free_i32(t0); \
296 tcg_temp_free_i32(t1); \
297 ctx->exception = (excp); \
300 #define GEN_EXCP_INVAL(ctx) \
301 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
302 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_INVAL)
304 #define GEN_EXCP_PRIVOPC(ctx) \
305 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
306 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_OPC)
308 #define GEN_EXCP_PRIVREG(ctx) \
309 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
310 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_REG)
312 #define GEN_EXCP_NO_FP(ctx) \
313 GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0)
315 #define GEN_EXCP_NO_AP(ctx) \
316 GEN_EXCP(ctx, POWERPC_EXCP_APU, 0)
318 #define GEN_EXCP_NO_VR(ctx) \
319 GEN_EXCP(ctx, POWERPC_EXCP_VPU, 0)
321 /* Stop translation */
322 static always_inline void GEN_STOP (DisasContext *ctx)
324 gen_update_nip(ctx, ctx->nip);
325 ctx->exception = POWERPC_EXCP_STOP;
328 /* No need to update nip here, as execution flow will change */
329 static always_inline void GEN_SYNC (DisasContext *ctx)
331 ctx->exception = POWERPC_EXCP_SYNC;
334 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
335 static void gen_##name (DisasContext *ctx); \
336 GEN_OPCODE(name, opc1, opc2, opc3, inval, type); \
337 static void gen_##name (DisasContext *ctx)
339 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
340 static void gen_##name (DisasContext *ctx); \
341 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type); \
342 static void gen_##name (DisasContext *ctx)
344 typedef struct opcode_t {
345 unsigned char opc1, opc2, opc3;
346 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
347 unsigned char pad[5];
349 unsigned char pad[1];
351 opc_handler_t handler;
355 /*****************************************************************************/
356 /*** Instruction decoding ***/
357 #define EXTRACT_HELPER(name, shift, nb) \
358 static always_inline uint32_t name (uint32_t opcode) \
360 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
363 #define EXTRACT_SHELPER(name, shift, nb) \
364 static always_inline int32_t name (uint32_t opcode) \
366 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
370 EXTRACT_HELPER(opc1, 26, 6);
372 EXTRACT_HELPER(opc2, 1, 5);
374 EXTRACT_HELPER(opc3, 6, 5);
375 /* Update Cr0 flags */
376 EXTRACT_HELPER(Rc, 0, 1);
378 EXTRACT_HELPER(rD, 21, 5);
380 EXTRACT_HELPER(rS, 21, 5);
382 EXTRACT_HELPER(rA, 16, 5);
384 EXTRACT_HELPER(rB, 11, 5);
386 EXTRACT_HELPER(rC, 6, 5);
388 EXTRACT_HELPER(crfD, 23, 3);
389 EXTRACT_HELPER(crfS, 18, 3);
390 EXTRACT_HELPER(crbD, 21, 5);
391 EXTRACT_HELPER(crbA, 16, 5);
392 EXTRACT_HELPER(crbB, 11, 5);
394 EXTRACT_HELPER(_SPR, 11, 10);
395 static always_inline uint32_t SPR (uint32_t opcode)
397 uint32_t sprn = _SPR(opcode);
399 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
401 /*** Get constants ***/
402 EXTRACT_HELPER(IMM, 12, 8);
403 /* 16 bits signed immediate value */
404 EXTRACT_SHELPER(SIMM, 0, 16);
405 /* 16 bits unsigned immediate value */
406 EXTRACT_HELPER(UIMM, 0, 16);
408 EXTRACT_HELPER(NB, 11, 5);
410 EXTRACT_HELPER(SH, 11, 5);
412 EXTRACT_HELPER(MB, 6, 5);
414 EXTRACT_HELPER(ME, 1, 5);
416 EXTRACT_HELPER(TO, 21, 5);
418 EXTRACT_HELPER(CRM, 12, 8);
419 EXTRACT_HELPER(FM, 17, 8);
420 EXTRACT_HELPER(SR, 16, 4);
421 EXTRACT_HELPER(FPIMM, 12, 4);
423 /*** Jump target decoding ***/
425 EXTRACT_SHELPER(d, 0, 16);
426 /* Immediate address */
427 static always_inline target_ulong LI (uint32_t opcode)
429 return (opcode >> 0) & 0x03FFFFFC;
432 static always_inline uint32_t BD (uint32_t opcode)
434 return (opcode >> 0) & 0xFFFC;
437 EXTRACT_HELPER(BO, 21, 5);
438 EXTRACT_HELPER(BI, 16, 5);
439 /* Absolute/relative address */
440 EXTRACT_HELPER(AA, 1, 1);
442 EXTRACT_HELPER(LK, 0, 1);
444 /* Create a mask between <start> and <end> bits */
445 static always_inline target_ulong MASK (uint32_t start, uint32_t end)
449 #if defined(TARGET_PPC64)
450 if (likely(start == 0)) {
451 ret = UINT64_MAX << (63 - end);
452 } else if (likely(end == 63)) {
453 ret = UINT64_MAX >> start;
456 if (likely(start == 0)) {
457 ret = UINT32_MAX << (31 - end);
458 } else if (likely(end == 31)) {
459 ret = UINT32_MAX >> start;
463 ret = (((target_ulong)(-1ULL)) >> (start)) ^
464 (((target_ulong)(-1ULL) >> (end)) >> 1);
465 if (unlikely(start > end))
472 /*****************************************************************************/
473 /* PowerPC Instructions types definitions */
475 PPC_NONE = 0x0000000000000000ULL,
476 /* PowerPC base instructions set */
477 PPC_INSNS_BASE = 0x0000000000000001ULL,
478 /* integer operations instructions */
479 #define PPC_INTEGER PPC_INSNS_BASE
480 /* flow control instructions */
481 #define PPC_FLOW PPC_INSNS_BASE
482 /* virtual memory instructions */
483 #define PPC_MEM PPC_INSNS_BASE
484 /* ld/st with reservation instructions */
485 #define PPC_RES PPC_INSNS_BASE
486 /* spr/msr access instructions */
487 #define PPC_MISC PPC_INSNS_BASE
488 /* Deprecated instruction sets */
489 /* Original POWER instruction set */
490 PPC_POWER = 0x0000000000000002ULL,
491 /* POWER2 instruction set extension */
492 PPC_POWER2 = 0x0000000000000004ULL,
493 /* Power RTC support */
494 PPC_POWER_RTC = 0x0000000000000008ULL,
495 /* Power-to-PowerPC bridge (601) */
496 PPC_POWER_BR = 0x0000000000000010ULL,
497 /* 64 bits PowerPC instruction set */
498 PPC_64B = 0x0000000000000020ULL,
499 /* New 64 bits extensions (PowerPC 2.0x) */
500 PPC_64BX = 0x0000000000000040ULL,
501 /* 64 bits hypervisor extensions */
502 PPC_64H = 0x0000000000000080ULL,
503 /* New wait instruction (PowerPC 2.0x) */
504 PPC_WAIT = 0x0000000000000100ULL,
505 /* Time base mftb instruction */
506 PPC_MFTB = 0x0000000000000200ULL,
508 /* Fixed-point unit extensions */
509 /* PowerPC 602 specific */
510 PPC_602_SPEC = 0x0000000000000400ULL,
511 /* isel instruction */
512 PPC_ISEL = 0x0000000000000800ULL,
513 /* popcntb instruction */
514 PPC_POPCNTB = 0x0000000000001000ULL,
515 /* string load / store */
516 PPC_STRING = 0x0000000000002000ULL,
518 /* Floating-point unit extensions */
519 /* Optional floating point instructions */
520 PPC_FLOAT = 0x0000000000010000ULL,
521 /* New floating-point extensions (PowerPC 2.0x) */
522 PPC_FLOAT_EXT = 0x0000000000020000ULL,
523 PPC_FLOAT_FSQRT = 0x0000000000040000ULL,
524 PPC_FLOAT_FRES = 0x0000000000080000ULL,
525 PPC_FLOAT_FRSQRTE = 0x0000000000100000ULL,
526 PPC_FLOAT_FRSQRTES = 0x0000000000200000ULL,
527 PPC_FLOAT_FSEL = 0x0000000000400000ULL,
528 PPC_FLOAT_STFIWX = 0x0000000000800000ULL,
530 /* Vector/SIMD extensions */
531 /* Altivec support */
532 PPC_ALTIVEC = 0x0000000001000000ULL,
533 /* PowerPC 2.03 SPE extension */
534 PPC_SPE = 0x0000000002000000ULL,
535 /* PowerPC 2.03 SPE floating-point extension */
536 PPC_SPEFPU = 0x0000000004000000ULL,
538 /* Optional memory control instructions */
539 PPC_MEM_TLBIA = 0x0000000010000000ULL,
540 PPC_MEM_TLBIE = 0x0000000020000000ULL,
541 PPC_MEM_TLBSYNC = 0x0000000040000000ULL,
542 /* sync instruction */
543 PPC_MEM_SYNC = 0x0000000080000000ULL,
544 /* eieio instruction */
545 PPC_MEM_EIEIO = 0x0000000100000000ULL,
547 /* Cache control instructions */
548 PPC_CACHE = 0x0000000200000000ULL,
549 /* icbi instruction */
550 PPC_CACHE_ICBI = 0x0000000400000000ULL,
551 /* dcbz instruction with fixed cache line size */
552 PPC_CACHE_DCBZ = 0x0000000800000000ULL,
553 /* dcbz instruction with tunable cache line size */
554 PPC_CACHE_DCBZT = 0x0000001000000000ULL,
555 /* dcba instruction */
556 PPC_CACHE_DCBA = 0x0000002000000000ULL,
557 /* Freescale cache locking instructions */
558 PPC_CACHE_LOCK = 0x0000004000000000ULL,
560 /* MMU related extensions */
561 /* external control instructions */
562 PPC_EXTERN = 0x0000010000000000ULL,
563 /* segment register access instructions */
564 PPC_SEGMENT = 0x0000020000000000ULL,
565 /* PowerPC 6xx TLB management instructions */
566 PPC_6xx_TLB = 0x0000040000000000ULL,
567 /* PowerPC 74xx TLB management instructions */
568 PPC_74xx_TLB = 0x0000080000000000ULL,
569 /* PowerPC 40x TLB management instructions */
570 PPC_40x_TLB = 0x0000100000000000ULL,
571 /* segment register access instructions for PowerPC 64 "bridge" */
572 PPC_SEGMENT_64B = 0x0000200000000000ULL,
574 PPC_SLBI = 0x0000400000000000ULL,
576 /* Embedded PowerPC dedicated instructions */
577 PPC_WRTEE = 0x0001000000000000ULL,
578 /* PowerPC 40x exception model */
579 PPC_40x_EXCP = 0x0002000000000000ULL,
580 /* PowerPC 405 Mac instructions */
581 PPC_405_MAC = 0x0004000000000000ULL,
582 /* PowerPC 440 specific instructions */
583 PPC_440_SPEC = 0x0008000000000000ULL,
584 /* BookE (embedded) PowerPC specification */
585 PPC_BOOKE = 0x0010000000000000ULL,
586 /* mfapidi instruction */
587 PPC_MFAPIDI = 0x0020000000000000ULL,
588 /* tlbiva instruction */
589 PPC_TLBIVA = 0x0040000000000000ULL,
590 /* tlbivax instruction */
591 PPC_TLBIVAX = 0x0080000000000000ULL,
592 /* PowerPC 4xx dedicated instructions */
593 PPC_4xx_COMMON = 0x0100000000000000ULL,
594 /* PowerPC 40x ibct instructions */
595 PPC_40x_ICBT = 0x0200000000000000ULL,
596 /* rfmci is not implemented in all BookE PowerPC */
597 PPC_RFMCI = 0x0400000000000000ULL,
598 /* rfdi instruction */
599 PPC_RFDI = 0x0800000000000000ULL,
601 PPC_DCR = 0x1000000000000000ULL,
602 /* DCR extended accesse */
603 PPC_DCRX = 0x2000000000000000ULL,
604 /* user-mode DCR access, implemented in PowerPC 460 */
605 PPC_DCRUX = 0x4000000000000000ULL,
608 /*****************************************************************************/
609 /* PowerPC instructions table */
610 #if HOST_LONG_BITS == 64
615 #if defined(__APPLE__)
616 #define OPCODES_SECTION \
617 __attribute__ ((section("__TEXT,__opcodes"), unused, aligned (OPC_ALIGN) ))
619 #define OPCODES_SECTION \
620 __attribute__ ((section(".opcodes"), unused, aligned (OPC_ALIGN) ))
623 #if defined(DO_PPC_STATISTICS)
624 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
625 OPCODES_SECTION opcode_t opc_##name = { \
633 .handler = &gen_##name, \
634 .oname = stringify(name), \
636 .oname = stringify(name), \
638 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
639 OPCODES_SECTION opcode_t opc_##name = { \
647 .handler = &gen_##name, \
653 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
654 OPCODES_SECTION opcode_t opc_##name = { \
662 .handler = &gen_##name, \
664 .oname = stringify(name), \
666 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
667 OPCODES_SECTION opcode_t opc_##name = { \
675 .handler = &gen_##name, \
681 #define GEN_OPCODE_MARK(name) \
682 OPCODES_SECTION opcode_t opc_##name = { \
688 .inval = 0x00000000, \
692 .oname = stringify(name), \
695 /* SPR load/store helpers */
696 static always_inline void gen_load_spr(TCGv t, int reg)
698 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
701 static always_inline void gen_store_spr(int reg, TCGv t)
703 tcg_gen_st_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
706 /* Start opcode list */
707 GEN_OPCODE_MARK(start);
709 /* Invalid instruction */
710 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE)
715 static opc_handler_t invalid_handler = {
718 .handler = gen_invalid,
721 /*** Integer comparison ***/
723 static always_inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
727 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer);
728 tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO);
729 tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1);
731 l1 = gen_new_label();
732 l2 = gen_new_label();
733 l3 = gen_new_label();
735 tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1);
736 tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2);
738 tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1);
739 tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2);
741 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ);
744 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT);
747 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT);
751 static always_inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
753 TCGv t0 = tcg_const_local_tl(arg1);
754 gen_op_cmp(arg0, t0, s, crf);
758 #if defined(TARGET_PPC64)
759 static always_inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
762 t0 = tcg_temp_local_new();
763 t1 = tcg_temp_local_new();
765 tcg_gen_ext32s_tl(t0, arg0);
766 tcg_gen_ext32s_tl(t1, arg1);
768 tcg_gen_ext32u_tl(t0, arg0);
769 tcg_gen_ext32u_tl(t1, arg1);
771 gen_op_cmp(t0, t1, s, crf);
776 static always_inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
778 TCGv t0 = tcg_const_local_tl(arg1);
779 gen_op_cmp32(arg0, t0, s, crf);
784 static always_inline void gen_set_Rc0 (DisasContext *ctx, TCGv reg)
786 #if defined(TARGET_PPC64)
788 gen_op_cmpi32(reg, 0, 1, 0);
791 gen_op_cmpi(reg, 0, 1, 0);
795 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER)
797 #if defined(TARGET_PPC64)
798 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
799 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
800 1, crfD(ctx->opcode));
803 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
804 1, crfD(ctx->opcode));
808 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
810 #if defined(TARGET_PPC64)
811 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
812 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
813 1, crfD(ctx->opcode));
816 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
817 1, crfD(ctx->opcode));
821 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER)
823 #if defined(TARGET_PPC64)
824 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
825 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
826 0, crfD(ctx->opcode));
829 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
830 0, crfD(ctx->opcode));
834 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
836 #if defined(TARGET_PPC64)
837 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
838 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
839 0, crfD(ctx->opcode));
842 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
843 0, crfD(ctx->opcode));
846 /* isel (PowerPC 2.03 specification) */
847 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL)
850 uint32_t bi = rC(ctx->opcode);
854 l1 = gen_new_label();
855 l2 = gen_new_label();
857 mask = 1 << (3 - (bi & 0x03));
858 t0 = tcg_temp_new_i32();
859 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
860 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
861 if (rA(ctx->opcode) == 0)
862 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
864 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
867 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
869 tcg_temp_free_i32(t0);
872 /*** Integer arithmetic ***/
874 static always_inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, TCGv arg1, TCGv arg2, int sub)
879 l1 = gen_new_label();
880 /* Start with XER OV disabled, the most likely case */
881 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
882 t0 = tcg_temp_local_new();
883 tcg_gen_xor_tl(t0, arg0, arg1);
884 #if defined(TARGET_PPC64)
886 tcg_gen_ext32s_tl(t0, t0);
889 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
891 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
892 tcg_gen_xor_tl(t0, arg1, arg2);
893 #if defined(TARGET_PPC64)
895 tcg_gen_ext32s_tl(t0, t0);
898 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
900 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
901 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
906 static always_inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1, TCGv arg2, int sub)
908 int l1 = gen_new_label();
910 #if defined(TARGET_PPC64)
911 if (!(ctx->sf_mode)) {
916 tcg_gen_ext32u_tl(t0, arg1);
917 tcg_gen_ext32u_tl(t1, arg2);
919 tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1);
921 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
923 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
931 tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1);
933 tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1);
935 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
940 /* Common add function */
941 static always_inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
942 int add_ca, int compute_ca, int compute_ov)
946 if ((!compute_ca && !compute_ov) ||
947 (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) {
950 t0 = tcg_temp_local_new();
954 t1 = tcg_temp_local_new();
955 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
956 tcg_gen_shri_tl(t1, t1, XER_CA);
959 if (compute_ca && compute_ov) {
960 /* Start with XER CA and OV disabled, the most likely case */
961 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
962 } else if (compute_ca) {
963 /* Start with XER CA disabled, the most likely case */
964 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
965 } else if (compute_ov) {
966 /* Start with XER OV disabled, the most likely case */
967 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
970 tcg_gen_add_tl(t0, arg1, arg2);
973 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
976 tcg_gen_add_tl(t0, t0, t1);
977 gen_op_arith_compute_ca(ctx, t0, t1, 0);
981 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
984 if (unlikely(Rc(ctx->opcode) != 0))
985 gen_set_Rc0(ctx, t0);
987 if (!TCGV_EQUAL(t0, ret)) {
988 tcg_gen_mov_tl(ret, t0);
992 /* Add functions with two operands */
993 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
994 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER) \
996 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
997 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
998 add_ca, compute_ca, compute_ov); \
1000 /* Add functions with one operand and one immediate */
1001 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
1002 add_ca, compute_ca, compute_ov) \
1003 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER) \
1005 TCGv t0 = tcg_const_local_tl(const_val); \
1006 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1007 cpu_gpr[rA(ctx->opcode)], t0, \
1008 add_ca, compute_ca, compute_ov); \
1009 tcg_temp_free(t0); \
1012 /* add add. addo addo. */
1013 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
1014 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
1015 /* addc addc. addco addco. */
1016 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
1017 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
1018 /* adde adde. addeo addeo. */
1019 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
1020 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
1021 /* addme addme. addmeo addmeo. */
1022 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
1023 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
1024 /* addze addze. addzeo addzeo.*/
1025 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
1026 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
1028 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1030 target_long simm = SIMM(ctx->opcode);
1032 if (rA(ctx->opcode) == 0) {
1034 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
1036 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm);
1040 static always_inline void gen_op_addic (DisasContext *ctx, TCGv ret, TCGv arg1,
1043 target_long simm = SIMM(ctx->opcode);
1045 /* Start with XER CA and OV disabled, the most likely case */
1046 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1048 if (likely(simm != 0)) {
1049 TCGv t0 = tcg_temp_local_new();
1050 tcg_gen_addi_tl(t0, arg1, simm);
1051 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
1052 tcg_gen_mov_tl(ret, t0);
1055 tcg_gen_mov_tl(ret, arg1);
1058 gen_set_Rc0(ctx, ret);
1061 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1063 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1065 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1067 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1070 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1072 target_long simm = SIMM(ctx->opcode);
1074 if (rA(ctx->opcode) == 0) {
1076 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
1078 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16);
1082 static always_inline void gen_op_arith_divw (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1083 int sign, int compute_ov)
1085 int l1 = gen_new_label();
1086 int l2 = gen_new_label();
1087 TCGv_i32 t0 = tcg_temp_local_new_i32();
1088 TCGv_i32 t1 = tcg_temp_local_new_i32();
1090 tcg_gen_trunc_tl_i32(t0, arg1);
1091 tcg_gen_trunc_tl_i32(t1, arg2);
1092 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
1094 int l3 = gen_new_label();
1095 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
1096 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
1098 tcg_gen_div_i32(t0, t0, t1);
1100 tcg_gen_divu_i32(t0, t0, t1);
1103 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1108 tcg_gen_sari_i32(t0, t0, 31);
1110 tcg_gen_movi_i32(t0, 0);
1113 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1116 tcg_gen_extu_i32_tl(ret, t0);
1117 tcg_temp_free_i32(t0);
1118 tcg_temp_free_i32(t1);
1119 if (unlikely(Rc(ctx->opcode) != 0))
1120 gen_set_Rc0(ctx, ret);
1123 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
1124 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) \
1126 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
1127 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1128 sign, compute_ov); \
1130 /* divwu divwu. divwuo divwuo. */
1131 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1132 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1133 /* divw divw. divwo divwo. */
1134 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1135 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1136 #if defined(TARGET_PPC64)
1137 static always_inline void gen_op_arith_divd (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1138 int sign, int compute_ov)
1140 int l1 = gen_new_label();
1141 int l2 = gen_new_label();
1143 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
1145 int l3 = gen_new_label();
1146 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
1147 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
1149 tcg_gen_div_i64(ret, arg1, arg2);
1151 tcg_gen_divu_i64(ret, arg1, arg2);
1154 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1159 tcg_gen_sari_i64(ret, arg1, 63);
1161 tcg_gen_movi_i64(ret, 0);
1164 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1167 if (unlikely(Rc(ctx->opcode) != 0))
1168 gen_set_Rc0(ctx, ret);
1170 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1171 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \
1173 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1174 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1175 sign, compute_ov); \
1177 /* divwu divwu. divwuo divwuo. */
1178 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1179 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1180 /* divw divw. divwo divwo. */
1181 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1182 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1186 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER)
1190 t0 = tcg_temp_new_i64();
1191 t1 = tcg_temp_new_i64();
1192 #if defined(TARGET_PPC64)
1193 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1194 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1195 tcg_gen_mul_i64(t0, t0, t1);
1196 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1198 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1199 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1200 tcg_gen_mul_i64(t0, t0, t1);
1201 tcg_gen_shri_i64(t0, t0, 32);
1202 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1204 tcg_temp_free_i64(t0);
1205 tcg_temp_free_i64(t1);
1206 if (unlikely(Rc(ctx->opcode) != 0))
1207 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1209 /* mulhwu mulhwu. */
1210 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER)
1214 t0 = tcg_temp_new_i64();
1215 t1 = tcg_temp_new_i64();
1216 #if defined(TARGET_PPC64)
1217 tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1218 tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1219 tcg_gen_mul_i64(t0, t0, t1);
1220 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1222 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1223 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1224 tcg_gen_mul_i64(t0, t0, t1);
1225 tcg_gen_shri_i64(t0, t0, 32);
1226 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1228 tcg_temp_free_i64(t0);
1229 tcg_temp_free_i64(t1);
1230 if (unlikely(Rc(ctx->opcode) != 0))
1231 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1234 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER)
1236 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1237 cpu_gpr[rB(ctx->opcode)]);
1238 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1239 if (unlikely(Rc(ctx->opcode) != 0))
1240 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1242 /* mullwo mullwo. */
1243 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER)
1248 t0 = tcg_temp_new_i64();
1249 t1 = tcg_temp_new_i64();
1250 l1 = gen_new_label();
1251 /* Start with XER OV disabled, the most likely case */
1252 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1253 #if defined(TARGET_PPC64)
1254 tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1255 tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1257 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1258 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1260 tcg_gen_mul_i64(t0, t0, t1);
1261 #if defined(TARGET_PPC64)
1262 tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0);
1263 tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1);
1265 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1266 tcg_gen_ext32s_i64(t1, t0);
1267 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
1269 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1271 tcg_temp_free_i64(t0);
1272 tcg_temp_free_i64(t1);
1273 if (unlikely(Rc(ctx->opcode) != 0))
1274 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1277 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1279 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1282 #if defined(TARGET_PPC64)
1283 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
1284 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \
1286 gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \
1287 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
1288 if (unlikely(Rc(ctx->opcode) != 0)) \
1289 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1292 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00);
1293 /* mulhdu mulhdu. */
1294 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02);
1296 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B)
1298 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1299 cpu_gpr[rB(ctx->opcode)]);
1300 if (unlikely(Rc(ctx->opcode) != 0))
1301 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1303 /* mulldo mulldo. */
1304 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17);
1307 /* neg neg. nego nego. */
1308 static always_inline void gen_op_arith_neg (DisasContext *ctx, TCGv ret, TCGv arg1, int ov_check)
1310 int l1 = gen_new_label();
1311 int l2 = gen_new_label();
1312 TCGv t0 = tcg_temp_local_new();
1313 #if defined(TARGET_PPC64)
1315 tcg_gen_mov_tl(t0, arg1);
1316 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1);
1320 tcg_gen_ext32s_tl(t0, arg1);
1321 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1);
1323 tcg_gen_neg_tl(ret, arg1);
1325 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1329 tcg_gen_mov_tl(ret, t0);
1331 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1335 if (unlikely(Rc(ctx->opcode) != 0))
1336 gen_set_Rc0(ctx, ret);
1338 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER)
1340 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1342 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER)
1344 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1347 /* Common subf function */
1348 static always_inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1349 int add_ca, int compute_ca, int compute_ov)
1353 if ((!compute_ca && !compute_ov) ||
1354 (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) {
1357 t0 = tcg_temp_local_new();
1361 t1 = tcg_temp_local_new();
1362 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
1363 tcg_gen_shri_tl(t1, t1, XER_CA);
1366 if (compute_ca && compute_ov) {
1367 /* Start with XER CA and OV disabled, the most likely case */
1368 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
1369 } else if (compute_ca) {
1370 /* Start with XER CA disabled, the most likely case */
1371 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1372 } else if (compute_ov) {
1373 /* Start with XER OV disabled, the most likely case */
1374 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1378 tcg_gen_not_tl(t0, arg1);
1379 tcg_gen_add_tl(t0, t0, arg2);
1380 gen_op_arith_compute_ca(ctx, t0, arg2, 0);
1381 tcg_gen_add_tl(t0, t0, t1);
1382 gen_op_arith_compute_ca(ctx, t0, t1, 0);
1385 tcg_gen_sub_tl(t0, arg2, arg1);
1387 gen_op_arith_compute_ca(ctx, t0, arg2, 1);
1391 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1394 if (unlikely(Rc(ctx->opcode) != 0))
1395 gen_set_Rc0(ctx, t0);
1397 if (!TCGV_EQUAL(t0, ret)) {
1398 tcg_gen_mov_tl(ret, t0);
1402 /* Sub functions with Two operands functions */
1403 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1404 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER) \
1406 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1407 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1408 add_ca, compute_ca, compute_ov); \
1410 /* Sub functions with one operand and one immediate */
1411 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1412 add_ca, compute_ca, compute_ov) \
1413 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER) \
1415 TCGv t0 = tcg_const_local_tl(const_val); \
1416 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1417 cpu_gpr[rA(ctx->opcode)], t0, \
1418 add_ca, compute_ca, compute_ov); \
1419 tcg_temp_free(t0); \
1421 /* subf subf. subfo subfo. */
1422 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1423 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1424 /* subfc subfc. subfco subfco. */
1425 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1426 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1427 /* subfe subfe. subfeo subfo. */
1428 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1429 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1430 /* subfme subfme. subfmeo subfmeo. */
1431 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1432 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1433 /* subfze subfze. subfzeo subfzeo.*/
1434 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1435 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1437 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1439 /* Start with XER CA and OV disabled, the most likely case */
1440 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1441 TCGv t0 = tcg_temp_local_new();
1442 TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
1443 tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
1444 gen_op_arith_compute_ca(ctx, t0, t1, 1);
1446 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
1450 /*** Integer logical ***/
1451 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1452 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) \
1454 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1455 cpu_gpr[rB(ctx->opcode)]); \
1456 if (unlikely(Rc(ctx->opcode) != 0)) \
1457 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1460 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1461 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) \
1463 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1464 if (unlikely(Rc(ctx->opcode) != 0)) \
1465 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1469 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1471 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1473 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1475 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1476 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1479 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1481 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1482 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1485 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER)
1487 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1488 if (unlikely(Rc(ctx->opcode) != 0))
1489 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1492 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1493 /* extsb & extsb. */
1494 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1495 /* extsh & extsh. */
1496 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1498 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1500 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1502 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER)
1506 rs = rS(ctx->opcode);
1507 ra = rA(ctx->opcode);
1508 rb = rB(ctx->opcode);
1509 /* Optimisation for mr. ri case */
1510 if (rs != ra || rs != rb) {
1512 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1514 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1515 if (unlikely(Rc(ctx->opcode) != 0))
1516 gen_set_Rc0(ctx, cpu_gpr[ra]);
1517 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1518 gen_set_Rc0(ctx, cpu_gpr[rs]);
1519 #if defined(TARGET_PPC64)
1525 /* Set process priority to low */
1529 /* Set process priority to medium-low */
1533 /* Set process priority to normal */
1536 #if !defined(CONFIG_USER_ONLY)
1538 if (ctx->supervisor > 0) {
1539 /* Set process priority to very low */
1544 if (ctx->supervisor > 0) {
1545 /* Set process priority to medium-hight */
1550 if (ctx->supervisor > 0) {
1551 /* Set process priority to high */
1556 if (ctx->supervisor > 1) {
1557 /* Set process priority to very high */
1567 TCGv t0 = tcg_temp_new();
1568 gen_load_spr(t0, SPR_PPR);
1569 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1570 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1571 gen_store_spr(SPR_PPR, t0);
1578 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1580 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER)
1582 /* Optimisation for "set to zero" case */
1583 if (rS(ctx->opcode) != rB(ctx->opcode))
1584 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1586 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1587 if (unlikely(Rc(ctx->opcode) != 0))
1588 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1591 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1593 target_ulong uimm = UIMM(ctx->opcode);
1595 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1597 /* XXX: should handle special NOPs for POWER series */
1600 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1603 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1605 target_ulong uimm = UIMM(ctx->opcode);
1607 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1611 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1614 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1616 target_ulong uimm = UIMM(ctx->opcode);
1618 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1622 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1625 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1627 target_ulong uimm = UIMM(ctx->opcode);
1629 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1633 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1635 /* popcntb : PowerPC 2.03 specification */
1636 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB)
1638 #if defined(TARGET_PPC64)
1640 gen_helper_popcntb_64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1643 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1646 #if defined(TARGET_PPC64)
1647 /* extsw & extsw. */
1648 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1650 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B)
1652 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1653 if (unlikely(Rc(ctx->opcode) != 0))
1654 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1658 /*** Integer rotate ***/
1659 /* rlwimi & rlwimi. */
1660 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1662 uint32_t mb, me, sh;
1664 mb = MB(ctx->opcode);
1665 me = ME(ctx->opcode);
1666 sh = SH(ctx->opcode);
1667 if (likely(sh == 0 && mb == 0 && me == 31)) {
1668 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1672 TCGv t0 = tcg_temp_new();
1673 #if defined(TARGET_PPC64)
1674 TCGv_i32 t2 = tcg_temp_new_i32();
1675 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1676 tcg_gen_rotli_i32(t2, t2, sh);
1677 tcg_gen_extu_i32_i64(t0, t2);
1678 tcg_temp_free_i32(t2);
1680 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1682 #if defined(TARGET_PPC64)
1686 mask = MASK(mb, me);
1687 t1 = tcg_temp_new();
1688 tcg_gen_andi_tl(t0, t0, mask);
1689 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1690 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1694 if (unlikely(Rc(ctx->opcode) != 0))
1695 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1697 /* rlwinm & rlwinm. */
1698 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1700 uint32_t mb, me, sh;
1702 sh = SH(ctx->opcode);
1703 mb = MB(ctx->opcode);
1704 me = ME(ctx->opcode);
1706 if (likely(mb == 0 && me == (31 - sh))) {
1707 if (likely(sh == 0)) {
1708 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1710 TCGv t0 = tcg_temp_new();
1711 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1712 tcg_gen_shli_tl(t0, t0, sh);
1713 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1716 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1717 TCGv t0 = tcg_temp_new();
1718 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1719 tcg_gen_shri_tl(t0, t0, mb);
1720 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1723 TCGv t0 = tcg_temp_new();
1724 #if defined(TARGET_PPC64)
1725 TCGv_i32 t1 = tcg_temp_new_i32();
1726 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1727 tcg_gen_rotli_i32(t1, t1, sh);
1728 tcg_gen_extu_i32_i64(t0, t1);
1729 tcg_temp_free_i32(t1);
1731 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1733 #if defined(TARGET_PPC64)
1737 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1740 if (unlikely(Rc(ctx->opcode) != 0))
1741 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1743 /* rlwnm & rlwnm. */
1744 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1748 #if defined(TARGET_PPC64)
1752 mb = MB(ctx->opcode);
1753 me = ME(ctx->opcode);
1754 t0 = tcg_temp_new();
1755 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1756 #if defined(TARGET_PPC64)
1757 t1 = tcg_temp_new_i32();
1758 t2 = tcg_temp_new_i32();
1759 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1760 tcg_gen_trunc_i64_i32(t2, t0);
1761 tcg_gen_rotl_i32(t1, t1, t2);
1762 tcg_gen_extu_i32_i64(t0, t1);
1763 tcg_temp_free_i32(t1);
1764 tcg_temp_free_i32(t2);
1766 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1768 if (unlikely(mb != 0 || me != 31)) {
1769 #if defined(TARGET_PPC64)
1773 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1775 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1778 if (unlikely(Rc(ctx->opcode) != 0))
1779 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1782 #if defined(TARGET_PPC64)
1783 #define GEN_PPC64_R2(name, opc1, opc2) \
1784 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1786 gen_##name(ctx, 0); \
1788 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1791 gen_##name(ctx, 1); \
1793 #define GEN_PPC64_R4(name, opc1, opc2) \
1794 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1796 gen_##name(ctx, 0, 0); \
1798 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
1801 gen_##name(ctx, 0, 1); \
1803 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1806 gen_##name(ctx, 1, 0); \
1808 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
1811 gen_##name(ctx, 1, 1); \
1814 static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb,
1815 uint32_t me, uint32_t sh)
1817 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1818 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1819 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1820 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1822 TCGv t0 = tcg_temp_new();
1823 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1824 if (likely(mb == 0 && me == 63)) {
1825 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1827 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1831 if (unlikely(Rc(ctx->opcode) != 0))
1832 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1834 /* rldicl - rldicl. */
1835 static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1839 sh = SH(ctx->opcode) | (shn << 5);
1840 mb = MB(ctx->opcode) | (mbn << 5);
1841 gen_rldinm(ctx, mb, 63, sh);
1843 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1844 /* rldicr - rldicr. */
1845 static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1849 sh = SH(ctx->opcode) | (shn << 5);
1850 me = MB(ctx->opcode) | (men << 5);
1851 gen_rldinm(ctx, 0, me, sh);
1853 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1854 /* rldic - rldic. */
1855 static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1859 sh = SH(ctx->opcode) | (shn << 5);
1860 mb = MB(ctx->opcode) | (mbn << 5);
1861 gen_rldinm(ctx, mb, 63 - sh, sh);
1863 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1865 static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb,
1870 mb = MB(ctx->opcode);
1871 me = ME(ctx->opcode);
1872 t0 = tcg_temp_new();
1873 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1874 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1875 if (unlikely(mb != 0 || me != 63)) {
1876 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1878 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1881 if (unlikely(Rc(ctx->opcode) != 0))
1882 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1885 /* rldcl - rldcl. */
1886 static always_inline void gen_rldcl (DisasContext *ctx, int mbn)
1890 mb = MB(ctx->opcode) | (mbn << 5);
1891 gen_rldnm(ctx, mb, 63);
1893 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1894 /* rldcr - rldcr. */
1895 static always_inline void gen_rldcr (DisasContext *ctx, int men)
1899 me = MB(ctx->opcode) | (men << 5);
1900 gen_rldnm(ctx, 0, me);
1902 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1903 /* rldimi - rldimi. */
1904 static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn)
1906 uint32_t sh, mb, me;
1908 sh = SH(ctx->opcode) | (shn << 5);
1909 mb = MB(ctx->opcode) | (mbn << 5);
1911 if (unlikely(sh == 0 && mb == 0)) {
1912 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1917 t0 = tcg_temp_new();
1918 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1919 t1 = tcg_temp_new();
1920 mask = MASK(mb, me);
1921 tcg_gen_andi_tl(t0, t0, mask);
1922 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1923 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1927 if (unlikely(Rc(ctx->opcode) != 0))
1928 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1930 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1933 /*** Integer shift ***/
1935 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER)
1939 l1 = gen_new_label();
1940 l2 = gen_new_label();
1942 t0 = tcg_temp_local_new();
1943 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1944 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
1945 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1948 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
1949 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1952 if (unlikely(Rc(ctx->opcode) != 0))
1953 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1956 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER)
1958 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
1959 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1960 if (unlikely(Rc(ctx->opcode) != 0))
1961 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1963 /* srawi & srawi. */
1964 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER)
1966 int sh = SH(ctx->opcode);
1970 l1 = gen_new_label();
1971 l2 = gen_new_label();
1972 t0 = tcg_temp_local_new();
1973 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1974 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
1975 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1976 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1977 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1980 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1982 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1983 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh);
1986 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1987 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1989 if (unlikely(Rc(ctx->opcode) != 0))
1990 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1993 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER)
1997 l1 = gen_new_label();
1998 l2 = gen_new_label();
2000 t0 = tcg_temp_local_new();
2001 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
2002 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
2003 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2006 t1 = tcg_temp_new();
2007 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
2008 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t1, t0);
2012 if (unlikely(Rc(ctx->opcode) != 0))
2013 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2015 #if defined(TARGET_PPC64)
2017 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B)
2021 l1 = gen_new_label();
2022 l2 = gen_new_label();
2024 t0 = tcg_temp_local_new();
2025 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
2026 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
2027 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2030 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
2033 if (unlikely(Rc(ctx->opcode) != 0))
2034 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2037 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B)
2039 gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
2040 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2041 if (unlikely(Rc(ctx->opcode) != 0))
2042 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2044 /* sradi & sradi. */
2045 static always_inline void gen_sradi (DisasContext *ctx, int n)
2047 int sh = SH(ctx->opcode) + (n << 5);
2051 l1 = gen_new_label();
2052 l2 = gen_new_label();
2053 t0 = tcg_temp_local_new();
2054 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
2055 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
2056 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2057 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
2060 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
2063 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
2065 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2066 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
2068 if (unlikely(Rc(ctx->opcode) != 0))
2069 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2071 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B)
2075 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B)
2080 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B)
2084 l1 = gen_new_label();
2085 l2 = gen_new_label();
2087 t0 = tcg_temp_local_new();
2088 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
2089 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
2090 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2093 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
2096 if (unlikely(Rc(ctx->opcode) != 0))
2097 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2101 /*** Floating-Point arithmetic ***/
2102 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
2103 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type) \
2105 if (unlikely(!ctx->fpu_enabled)) { \
2106 GEN_EXCP_NO_FP(ctx); \
2109 gen_reset_fpstatus(); \
2110 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2111 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2113 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2115 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
2116 Rc(ctx->opcode) != 0); \
2119 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2120 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2121 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2123 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2124 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
2126 if (unlikely(!ctx->fpu_enabled)) { \
2127 GEN_EXCP_NO_FP(ctx); \
2130 gen_reset_fpstatus(); \
2131 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2132 cpu_fpr[rB(ctx->opcode)]); \
2134 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2136 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2137 set_fprf, Rc(ctx->opcode) != 0); \
2139 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2140 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2141 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2143 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2144 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
2146 if (unlikely(!ctx->fpu_enabled)) { \
2147 GEN_EXCP_NO_FP(ctx); \
2150 gen_reset_fpstatus(); \
2151 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2152 cpu_fpr[rC(ctx->opcode)]); \
2154 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2156 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2157 set_fprf, Rc(ctx->opcode) != 0); \
2159 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2160 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2161 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2163 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2164 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type) \
2166 if (unlikely(!ctx->fpu_enabled)) { \
2167 GEN_EXCP_NO_FP(ctx); \
2170 gen_reset_fpstatus(); \
2171 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2172 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2173 set_fprf, Rc(ctx->opcode) != 0); \
2176 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2177 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type) \
2179 if (unlikely(!ctx->fpu_enabled)) { \
2180 GEN_EXCP_NO_FP(ctx); \
2183 gen_reset_fpstatus(); \
2184 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2185 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2186 set_fprf, Rc(ctx->opcode) != 0); \
2190 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2192 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2194 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2197 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2200 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2203 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2206 GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES)
2208 if (unlikely(!ctx->fpu_enabled)) {
2209 GEN_EXCP_NO_FP(ctx);
2212 gen_reset_fpstatus();
2213 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2214 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2215 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2219 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2221 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2224 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
2226 if (unlikely(!ctx->fpu_enabled)) {
2227 GEN_EXCP_NO_FP(ctx);
2230 gen_reset_fpstatus();
2231 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2232 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2235 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
2237 if (unlikely(!ctx->fpu_enabled)) {
2238 GEN_EXCP_NO_FP(ctx);
2241 gen_reset_fpstatus();
2242 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2243 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2244 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2247 /*** Floating-Point multiply-and-add ***/
2248 /* fmadd - fmadds */
2249 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2250 /* fmsub - fmsubs */
2251 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2252 /* fnmadd - fnmadds */
2253 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2254 /* fnmsub - fnmsubs */
2255 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2257 /*** Floating-Point round & convert ***/
2259 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2261 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2263 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2264 #if defined(TARGET_PPC64)
2266 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2268 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2270 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2274 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2276 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2278 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2280 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2282 /*** Floating-Point compare ***/
2284 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT)
2286 if (unlikely(!ctx->fpu_enabled)) {
2287 GEN_EXCP_NO_FP(ctx);
2290 gen_reset_fpstatus();
2291 gen_helper_fcmpo(cpu_crf[crfD(ctx->opcode)],
2292 cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2293 gen_helper_float_check_status();
2297 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT)
2299 if (unlikely(!ctx->fpu_enabled)) {
2300 GEN_EXCP_NO_FP(ctx);
2303 gen_reset_fpstatus();
2304 gen_helper_fcmpu(cpu_crf[crfD(ctx->opcode)],
2305 cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2306 gen_helper_float_check_status();
2309 /*** Floating-point move ***/
2311 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2312 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
2315 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2316 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT)
2318 if (unlikely(!ctx->fpu_enabled)) {
2319 GEN_EXCP_NO_FP(ctx);
2322 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2323 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2327 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2328 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
2330 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2331 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
2333 /*** Floating-Point status & ctrl register ***/
2335 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT)
2339 if (unlikely(!ctx->fpu_enabled)) {
2340 GEN_EXCP_NO_FP(ctx);
2343 gen_optimize_fprf();
2344 bfa = 4 * (7 - crfS(ctx->opcode));
2345 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa);
2346 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2347 tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2351 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT)
2353 if (unlikely(!ctx->fpu_enabled)) {
2354 GEN_EXCP_NO_FP(ctx);
2357 gen_optimize_fprf();
2358 gen_reset_fpstatus();
2359 tcg_gen_extu_i32_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2360 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2364 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT)
2368 if (unlikely(!ctx->fpu_enabled)) {
2369 GEN_EXCP_NO_FP(ctx);
2372 crb = 32 - (crbD(ctx->opcode) >> 2);
2373 gen_optimize_fprf();
2374 gen_reset_fpstatus();
2375 if (likely(crb != 30 && crb != 29))
2376 tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(1 << crb));
2377 if (unlikely(Rc(ctx->opcode) != 0)) {
2378 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2383 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT)
2387 if (unlikely(!ctx->fpu_enabled)) {
2388 GEN_EXCP_NO_FP(ctx);
2391 crb = 32 - (crbD(ctx->opcode) >> 2);
2392 gen_optimize_fprf();
2393 gen_reset_fpstatus();
2394 /* XXX: we pretend we can only do IEEE floating-point computations */
2395 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2396 TCGv_i32 t0 = tcg_const_i32(crb);
2397 gen_helper_fpscr_setbit(t0);
2398 tcg_temp_free_i32(t0);
2400 if (unlikely(Rc(ctx->opcode) != 0)) {
2401 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2403 /* We can raise a differed exception */
2404 gen_helper_float_check_status();
2408 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x02010000, PPC_FLOAT)
2412 if (unlikely(!ctx->fpu_enabled)) {
2413 GEN_EXCP_NO_FP(ctx);
2416 gen_optimize_fprf();
2417 gen_reset_fpstatus();
2418 t0 = tcg_const_i32(FM(ctx->opcode));
2419 gen_helper_store_fpscr(cpu_fpr[rB(ctx->opcode)], t0);
2420 tcg_temp_free_i32(t0);
2421 if (unlikely(Rc(ctx->opcode) != 0)) {
2422 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2424 /* We can raise a differed exception */
2425 gen_helper_float_check_status();
2429 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT)
2435 if (unlikely(!ctx->fpu_enabled)) {
2436 GEN_EXCP_NO_FP(ctx);
2439 bf = crbD(ctx->opcode) >> 2;
2441 gen_optimize_fprf();
2442 gen_reset_fpstatus();
2443 t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh));
2444 t1 = tcg_const_i32(1 << sh);
2445 gen_helper_store_fpscr(t0, t1);
2446 tcg_temp_free_i64(t0);
2447 tcg_temp_free_i32(t1);
2448 if (unlikely(Rc(ctx->opcode) != 0)) {
2449 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2451 /* We can raise a differed exception */
2452 gen_helper_float_check_status();
2455 /*** Addressing modes ***/
2456 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2457 static always_inline void gen_addr_imm_index (TCGv EA,
2461 target_long simm = SIMM(ctx->opcode);
2464 if (rA(ctx->opcode) == 0)
2465 tcg_gen_movi_tl(EA, simm);
2466 else if (likely(simm != 0))
2467 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2469 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2472 static always_inline void gen_addr_reg_index (TCGv EA,
2475 if (rA(ctx->opcode) == 0)
2476 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2478 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2481 static always_inline void gen_addr_register (TCGv EA,
2484 if (rA(ctx->opcode) == 0)
2485 tcg_gen_movi_tl(EA, 0);
2487 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2490 static always_inline void gen_check_align (DisasContext *ctx, TCGv EA, int mask)
2492 int l1 = gen_new_label();
2493 TCGv t0 = tcg_temp_new();
2495 /* NIP cannot be restored if the memory exception comes from an helper */
2496 gen_update_nip(ctx, ctx->nip - 4);
2497 tcg_gen_andi_tl(t0, EA, mask);
2498 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2499 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2500 t2 = tcg_const_i32(0);
2501 gen_helper_raise_exception_err(t1, t2);
2502 tcg_temp_free_i32(t1);
2503 tcg_temp_free_i32(t2);
2508 /*** Integer load ***/
2509 #if defined(TARGET_PPC64)
2510 #define GEN_QEMU_LD_PPC64(width) \
2511 static always_inline void gen_qemu_ld##width##_ppc64(TCGv t0, TCGv t1, int flags)\
2513 if (likely(flags & 2)) \
2514 tcg_gen_qemu_ld##width(t0, t1, flags >> 2); \
2516 TCGv addr = tcg_temp_new(); \
2517 tcg_gen_ext32u_tl(addr, t1); \
2518 tcg_gen_qemu_ld##width(t0, addr, flags >> 2); \
2519 tcg_temp_free(addr); \
2522 GEN_QEMU_LD_PPC64(8u)
2523 GEN_QEMU_LD_PPC64(8s)
2524 GEN_QEMU_LD_PPC64(16u)
2525 GEN_QEMU_LD_PPC64(16s)
2526 GEN_QEMU_LD_PPC64(32u)
2527 GEN_QEMU_LD_PPC64(32s)
2528 GEN_QEMU_LD_PPC64(64)
2530 #define GEN_QEMU_ST_PPC64(width) \
2531 static always_inline void gen_qemu_st##width##_ppc64(TCGv t0, TCGv t1, int flags)\
2533 if (likely(flags & 2)) \
2534 tcg_gen_qemu_st##width(t0, t1, flags >> 2); \
2536 TCGv addr = tcg_temp_new(); \
2537 tcg_gen_ext32u_tl(addr, t1); \
2538 tcg_gen_qemu_st##width(t0, addr, flags >> 2); \
2539 tcg_temp_free(addr); \
2542 GEN_QEMU_ST_PPC64(8)
2543 GEN_QEMU_ST_PPC64(16)
2544 GEN_QEMU_ST_PPC64(32)
2545 GEN_QEMU_ST_PPC64(64)
2547 static always_inline void gen_qemu_ld8u(TCGv arg0, TCGv arg1, int flags)
2549 gen_qemu_ld8u_ppc64(arg0, arg1, flags);
2552 static always_inline void gen_qemu_ld8s(TCGv arg0, TCGv arg1, int flags)
2554 gen_qemu_ld8s_ppc64(arg0, arg1, flags);
2557 static always_inline void gen_qemu_ld16u(TCGv arg0, TCGv arg1, int flags)
2559 if (unlikely(flags & 1)) {
2561 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2562 t0 = tcg_temp_new_i32();
2563 tcg_gen_trunc_tl_i32(t0, arg0);
2564 tcg_gen_bswap16_i32(t0, t0);
2565 tcg_gen_extu_i32_tl(arg0, t0);
2566 tcg_temp_free_i32(t0);
2568 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2571 static always_inline void gen_qemu_ld16s(TCGv arg0, TCGv arg1, int flags)
2573 if (unlikely(flags & 1)) {
2575 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2576 t0 = tcg_temp_new_i32();
2577 tcg_gen_trunc_tl_i32(t0, arg0);
2578 tcg_gen_bswap16_i32(t0, t0);
2579 tcg_gen_extu_i32_tl(arg0, t0);
2580 tcg_gen_ext16s_tl(arg0, arg0);
2581 tcg_temp_free_i32(t0);
2583 gen_qemu_ld16s_ppc64(arg0, arg1, flags);
2586 static always_inline void gen_qemu_ld32u(TCGv arg0, TCGv arg1, int flags)
2588 if (unlikely(flags & 1)) {
2590 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2591 t0 = tcg_temp_new_i32();
2592 tcg_gen_trunc_tl_i32(t0, arg0);
2593 tcg_gen_bswap_i32(t0, t0);
2594 tcg_gen_extu_i32_tl(arg0, t0);
2595 tcg_temp_free_i32(t0);
2597 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2600 static always_inline void gen_qemu_ld32s(TCGv arg0, TCGv arg1, int flags)
2602 if (unlikely(flags & 1)) {
2604 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2605 t0 = tcg_temp_new_i32();
2606 tcg_gen_trunc_tl_i32(t0, arg0);
2607 tcg_gen_bswap_i32(t0, t0);
2608 tcg_gen_ext_i32_tl(arg0, t0);
2609 tcg_temp_free_i32(t0);
2611 gen_qemu_ld32s_ppc64(arg0, arg1, flags);
2614 static always_inline void gen_qemu_ld64(TCGv arg0, TCGv arg1, int flags)
2616 gen_qemu_ld64_ppc64(arg0, arg1, flags);
2617 if (unlikely(flags & 1))
2618 tcg_gen_bswap_i64(arg0, arg0);
2621 static always_inline void gen_qemu_st8(TCGv arg0, TCGv arg1, int flags)
2623 gen_qemu_st8_ppc64(arg0, arg1, flags);
2626 static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
2628 if (unlikely(flags & 1)) {
2631 t0 = tcg_temp_new_i32();
2632 tcg_gen_trunc_tl_i32(t0, arg0);
2633 tcg_gen_ext16u_i32(t0, t0);
2634 tcg_gen_bswap16_i32(t0, t0);
2635 t1 = tcg_temp_new_i64();
2636 tcg_gen_extu_i32_tl(t1, t0);
2637 tcg_temp_free_i32(t0);
2638 gen_qemu_st16_ppc64(t1, arg1, flags);
2639 tcg_temp_free_i64(t1);
2641 gen_qemu_st16_ppc64(arg0, arg1, flags);
2644 static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
2646 if (unlikely(flags & 1)) {
2649 t0 = tcg_temp_new_i32();
2650 tcg_gen_trunc_tl_i32(t0, arg0);
2651 tcg_gen_bswap_i32(t0, t0);
2652 t1 = tcg_temp_new_i64();
2653 tcg_gen_extu_i32_tl(t1, t0);
2654 tcg_temp_free_i32(t0);
2655 gen_qemu_st32_ppc64(t1, arg1, flags);
2656 tcg_temp_free_i64(t1);
2658 gen_qemu_st32_ppc64(arg0, arg1, flags);
2661 static always_inline void gen_qemu_st64(TCGv arg0, TCGv arg1, int flags)
2663 if (unlikely(flags & 1)) {
2664 TCGv_i64 t0 = tcg_temp_new_i64();
2665 tcg_gen_bswap_i64(t0, arg0);
2666 gen_qemu_st64_ppc64(t0, arg1, flags);
2667 tcg_temp_free_i64(t0);
2669 gen_qemu_st64_ppc64(arg0, arg1, flags);
2673 #else /* defined(TARGET_PPC64) */
2674 #define GEN_QEMU_LD_PPC32(width) \
2675 static always_inline void gen_qemu_ld##width##_ppc32(TCGv arg0, TCGv arg1, int flags) \
2677 tcg_gen_qemu_ld##width(arg0, arg1, flags >> 1); \
2679 GEN_QEMU_LD_PPC32(8u)
2680 GEN_QEMU_LD_PPC32(8s)
2681 GEN_QEMU_LD_PPC32(16u)
2682 GEN_QEMU_LD_PPC32(16s)
2683 GEN_QEMU_LD_PPC32(32u)
2684 GEN_QEMU_LD_PPC32(32s)
2685 static always_inline void gen_qemu_ld64_ppc32(TCGv_i64 arg0, TCGv arg1, int flags)
2687 tcg_gen_qemu_ld64(arg0, arg1, flags >> 1);
2690 #define GEN_QEMU_ST_PPC32(width) \
2691 static always_inline void gen_qemu_st##width##_ppc32(TCGv arg0, TCGv arg1, int flags) \
2693 tcg_gen_qemu_st##width(arg0, arg1, flags >> 1); \
2695 GEN_QEMU_ST_PPC32(8)
2696 GEN_QEMU_ST_PPC32(16)
2697 GEN_QEMU_ST_PPC32(32)
2698 static always_inline void gen_qemu_st64_ppc32(TCGv_i64 arg0, TCGv arg1, int flags)
2700 tcg_gen_qemu_st64(arg0, arg1, flags >> 1);
2703 static always_inline void gen_qemu_ld8u(TCGv arg0, TCGv arg1, int flags)
2705 gen_qemu_ld8u_ppc32(arg0, arg1, flags >> 1);
2708 static always_inline void gen_qemu_ld8s(TCGv arg0, TCGv arg1, int flags)
2710 gen_qemu_ld8s_ppc32(arg0, arg1, flags >> 1);
2713 static always_inline void gen_qemu_ld16u(TCGv arg0, TCGv arg1, int flags)
2715 gen_qemu_ld16u_ppc32(arg0, arg1, flags >> 1);
2716 if (unlikely(flags & 1))
2717 tcg_gen_bswap16_i32(arg0, arg0);
2720 static always_inline void gen_qemu_ld16s(TCGv arg0, TCGv arg1, int flags)
2722 if (unlikely(flags & 1)) {
2723 gen_qemu_ld16u_ppc32(arg0, arg1, flags);
2724 tcg_gen_bswap16_i32(arg0, arg0);
2725 tcg_gen_ext16s_i32(arg0, arg0);
2727 gen_qemu_ld16s_ppc32(arg0, arg1, flags);
2730 static always_inline void gen_qemu_ld32u(TCGv arg0, TCGv arg1, int flags)
2732 gen_qemu_ld32u_ppc32(arg0, arg1, flags);
2733 if (unlikely(flags & 1))
2734 tcg_gen_bswap_i32(arg0, arg0);
2737 static always_inline void gen_qemu_ld64(TCGv_i64 arg0, TCGv arg1, int flags)
2739 gen_qemu_ld64_ppc32(arg0, arg1, flags);
2740 if (unlikely(flags & 1))
2741 tcg_gen_bswap_i64(arg0, arg0);
2744 static always_inline void gen_qemu_st8(TCGv arg0, TCGv arg1, int flags)
2746 gen_qemu_st8_ppc32(arg0, arg1, flags);
2749 static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
2751 if (unlikely(flags & 1)) {
2752 TCGv_i32 temp = tcg_temp_new_i32();
2753 tcg_gen_ext16u_i32(temp, arg0);
2754 tcg_gen_bswap16_i32(temp, temp);
2755 gen_qemu_st16_ppc32(temp, arg1, flags);
2756 tcg_temp_free_i32(temp);
2758 gen_qemu_st16_ppc32(arg0, arg1, flags);
2761 static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
2763 if (unlikely(flags & 1)) {
2764 TCGv_i32 temp = tcg_temp_new_i32();
2765 tcg_gen_bswap_i32(temp, arg0);
2766 gen_qemu_st32_ppc32(temp, arg1, flags);
2767 tcg_temp_free_i32(temp);
2769 gen_qemu_st32_ppc32(arg0, arg1, flags);
2772 static always_inline void gen_qemu_st64(TCGv_i64 arg0, TCGv arg1, int flags)
2774 if (unlikely(flags & 1)) {
2775 TCGv_i64 temp = tcg_temp_new_i64();
2776 tcg_gen_bswap_i64(temp, arg0);
2777 gen_qemu_st64_ppc32(temp, arg1, flags);
2778 tcg_temp_free_i64(temp);
2780 gen_qemu_st64_ppc32(arg0, arg1, flags);
2784 #define GEN_LD(name, ldop, opc, type) \
2785 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
2787 TCGv EA = tcg_temp_new(); \
2788 gen_set_access_type(ACCESS_INT); \
2789 gen_addr_imm_index(EA, ctx, 0); \
2790 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2791 tcg_temp_free(EA); \
2794 #define GEN_LDU(name, ldop, opc, type) \
2795 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2798 if (unlikely(rA(ctx->opcode) == 0 || \
2799 rA(ctx->opcode) == rD(ctx->opcode))) { \
2800 GEN_EXCP_INVAL(ctx); \
2803 EA = tcg_temp_new(); \
2804 gen_set_access_type(ACCESS_INT); \
2805 if (type == PPC_64B) \
2806 gen_addr_imm_index(EA, ctx, 0x03); \
2808 gen_addr_imm_index(EA, ctx, 0); \
2809 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2810 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2811 tcg_temp_free(EA); \
2814 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2815 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2818 if (unlikely(rA(ctx->opcode) == 0 || \
2819 rA(ctx->opcode) == rD(ctx->opcode))) { \
2820 GEN_EXCP_INVAL(ctx); \
2823 EA = tcg_temp_new(); \
2824 gen_set_access_type(ACCESS_INT); \
2825 gen_addr_reg_index(EA, ctx); \
2826 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2827 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2828 tcg_temp_free(EA); \
2831 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2832 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
2834 TCGv EA = tcg_temp_new(); \
2835 gen_set_access_type(ACCESS_INT); \
2836 gen_addr_reg_index(EA, ctx); \
2837 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2838 tcg_temp_free(EA); \
2841 #define GEN_LDS(name, ldop, op, type) \
2842 GEN_LD(name, ldop, op | 0x20, type); \
2843 GEN_LDU(name, ldop, op | 0x21, type); \
2844 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2845 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2847 /* lbz lbzu lbzux lbzx */
2848 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2849 /* lha lhau lhaux lhax */
2850 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2851 /* lhz lhzu lhzux lhzx */
2852 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2853 /* lwz lwzu lwzux lwzx */
2854 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2855 #if defined(TARGET_PPC64)
2857 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2859 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2861 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2863 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2864 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B)
2867 if (Rc(ctx->opcode)) {
2868 if (unlikely(rA(ctx->opcode) == 0 ||
2869 rA(ctx->opcode) == rD(ctx->opcode))) {
2870 GEN_EXCP_INVAL(ctx);
2874 EA = tcg_temp_new();
2875 gen_set_access_type(ACCESS_INT);
2876 gen_addr_imm_index(EA, ctx, 0x03);
2877 if (ctx->opcode & 0x02) {
2878 /* lwa (lwau is undefined) */
2879 gen_qemu_ld32s(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);
2882 gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);
2884 if (Rc(ctx->opcode))
2885 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2889 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX)
2891 #if defined(CONFIG_USER_ONLY)
2892 GEN_EXCP_PRIVOPC(ctx);
2897 /* Restore CPU state */
2898 if (unlikely(ctx->supervisor == 0)) {
2899 GEN_EXCP_PRIVOPC(ctx);
2902 ra = rA(ctx->opcode);
2903 rd = rD(ctx->opcode);
2904 if (unlikely((rd & 1) || rd == ra)) {
2905 GEN_EXCP_INVAL(ctx);
2908 if (unlikely(ctx->mem_idx & 1)) {
2909 /* Little-endian mode is not handled */
2910 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2913 EA = tcg_temp_new();
2914 gen_set_access_type(ACCESS_INT);
2915 gen_addr_imm_index(EA, ctx, 0x0F);
2916 gen_qemu_ld64(cpu_gpr[rd], EA, ctx->mem_idx);
2917 tcg_gen_addi_tl(EA, EA, 8);
2918 gen_qemu_ld64(cpu_gpr[rd+1], EA, ctx->mem_idx);
2924 /*** Integer store ***/
2925 #define GEN_ST(name, stop, opc, type) \
2926 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
2928 TCGv EA = tcg_temp_new(); \
2929 gen_set_access_type(ACCESS_INT); \
2930 gen_addr_imm_index(EA, ctx, 0); \
2931 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2932 tcg_temp_free(EA); \
2935 #define GEN_STU(name, stop, opc, type) \
2936 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2939 if (unlikely(rA(ctx->opcode) == 0)) { \
2940 GEN_EXCP_INVAL(ctx); \
2943 EA = tcg_temp_new(); \
2944 gen_set_access_type(ACCESS_INT); \
2945 if (type == PPC_64B) \
2946 gen_addr_imm_index(EA, ctx, 0x03); \
2948 gen_addr_imm_index(EA, ctx, 0); \
2949 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2950 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2951 tcg_temp_free(EA); \
2954 #define GEN_STUX(name, stop, opc2, opc3, type) \
2955 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2958 if (unlikely(rA(ctx->opcode) == 0)) { \
2959 GEN_EXCP_INVAL(ctx); \
2962 EA = tcg_temp_new(); \
2963 gen_set_access_type(ACCESS_INT); \
2964 gen_addr_reg_index(EA, ctx); \
2965 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2966 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2967 tcg_temp_free(EA); \
2970 #define GEN_STX(name, stop, opc2, opc3, type) \
2971 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
2973 TCGv EA = tcg_temp_new(); \
2974 gen_set_access_type(ACCESS_INT); \
2975 gen_addr_reg_index(EA, ctx); \
2976 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2977 tcg_temp_free(EA); \
2980 #define GEN_STS(name, stop, op, type) \
2981 GEN_ST(name, stop, op | 0x20, type); \
2982 GEN_STU(name, stop, op | 0x21, type); \
2983 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2984 GEN_STX(name, stop, 0x17, op | 0x00, type)
2986 /* stb stbu stbux stbx */
2987 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2988 /* sth sthu sthux sthx */
2989 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2990 /* stw stwu stwux stwx */
2991 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2992 #if defined(TARGET_PPC64)
2993 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2994 GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2995 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B)
3000 rs = rS(ctx->opcode);
3001 if ((ctx->opcode & 0x3) == 0x2) {
3002 #if defined(CONFIG_USER_ONLY)
3003 GEN_EXCP_PRIVOPC(ctx);
3006 if (unlikely(ctx->supervisor == 0)) {
3007 GEN_EXCP_PRIVOPC(ctx);
3010 if (unlikely(rs & 1)) {
3011 GEN_EXCP_INVAL(ctx);
3014 if (unlikely(ctx->mem_idx & 1)) {
3015 /* Little-endian mode is not handled */
3016 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
3019 EA = tcg_temp_new();
3020 gen_set_access_type(ACCESS_INT);
3021 gen_addr_imm_index(EA, ctx, 0x03);
3022 gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
3023 tcg_gen_addi_tl(EA, EA, 8);
3024 gen_qemu_st64(cpu_gpr[rs+1], EA, ctx->mem_idx);
3029 if (Rc(ctx->opcode)) {
3030 if (unlikely(rA(ctx->opcode) == 0)) {
3031 GEN_EXCP_INVAL(ctx);
3035 EA = tcg_temp_new();
3036 gen_set_access_type(ACCESS_INT);
3037 gen_addr_imm_index(EA, ctx, 0x03);
3038 gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
3039 if (Rc(ctx->opcode))
3040 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3045 /*** Integer load and store with byte reverse ***/
3047 void always_inline gen_qemu_ld16ur(TCGv t0, TCGv t1, int flags)
3049 TCGv_i32 temp = tcg_temp_new_i32();
3050 gen_qemu_ld16u(t0, t1, flags);
3051 tcg_gen_trunc_tl_i32(temp, t0);
3052 tcg_gen_bswap16_i32(temp, temp);
3053 tcg_gen_extu_i32_tl(t0, temp);
3054 tcg_temp_free_i32(temp);
3056 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3059 void always_inline gen_qemu_ld32ur(TCGv t0, TCGv t1, int flags)
3061 TCGv_i32 temp = tcg_temp_new_i32();
3062 gen_qemu_ld32u(t0, t1, flags);
3063 tcg_gen_trunc_tl_i32(temp, t0);
3064 tcg_gen_bswap_i32(temp, temp);
3065 tcg_gen_extu_i32_tl(t0, temp);
3066 tcg_temp_free_i32(temp);
3068 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3071 void always_inline gen_qemu_st16r(TCGv t0, TCGv t1, int flags)
3073 TCGv_i32 temp = tcg_temp_new_i32();
3074 TCGv t2 = tcg_temp_new();
3075 tcg_gen_trunc_tl_i32(temp, t0);
3076 tcg_gen_ext16u_i32(temp, temp);
3077 tcg_gen_bswap16_i32(temp, temp);
3078 tcg_gen_extu_i32_tl(t2, temp);
3079 tcg_temp_free_i32(temp);
3080 gen_qemu_st16(t2, t1, flags);
3083 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3086 void always_inline gen_qemu_st32r(TCGv t0, TCGv t1, int flags)
3088 TCGv_i32 temp = tcg_temp_new_i32();
3089 TCGv t2 = tcg_temp_new();
3090 tcg_gen_trunc_tl_i32(temp, t0);
3091 tcg_gen_bswap_i32(temp, temp);
3092 tcg_gen_extu_i32_tl(t2, temp);
3093 tcg_temp_free_i32(temp);
3094 gen_qemu_st32(t2, t1, flags);
3097 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3099 /*** Integer load and store multiple ***/
3101 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
3103 TCGv t0 = tcg_temp_new();
3104 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
3105 /* NIP cannot be restored if the memory exception comes from an helper */
3106 gen_update_nip(ctx, ctx->nip - 4);
3107 gen_addr_imm_index(t0, ctx, 0);
3108 gen_helper_lmw(t0, t1);
3110 tcg_temp_free_i32(t1);
3114 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
3116 TCGv t0 = tcg_temp_new();
3117 TCGv_i32 t1 = tcg_const_i32(rS(ctx->opcode));
3118 /* NIP cannot be restored if the memory exception comes from an helper */
3119 gen_update_nip(ctx, ctx->nip - 4);
3120 gen_addr_imm_index(t0, ctx, 0);
3121 gen_helper_stmw(t0, t1);
3123 tcg_temp_free_i32(t1);
3126 /*** Integer load and store strings ***/
3128 /* PowerPC32 specification says we must generate an exception if
3129 * rA is in the range of registers to be loaded.
3130 * In an other hand, IBM says this is valid, but rA won't be loaded.
3131 * For now, I'll follow the spec...
3133 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING)
3137 int nb = NB(ctx->opcode);
3138 int start = rD(ctx->opcode);
3139 int ra = rA(ctx->opcode);
3145 if (unlikely(((start + nr) > 32 &&
3146 start <= ra && (start + nr - 32) > ra) ||
3147 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
3148 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3149 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_LSWX);
3152 /* NIP cannot be restored if the memory exception comes from an helper */
3153 gen_update_nip(ctx, ctx->nip - 4);
3154 t0 = tcg_temp_new();
3155 gen_addr_register(t0, ctx);
3156 t1 = tcg_const_i32(nb);
3157 t2 = tcg_const_i32(start);
3158 gen_helper_lsw(t0, t1, t2);
3160 tcg_temp_free_i32(t1);
3161 tcg_temp_free_i32(t2);
3165 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING)
3167 TCGv t0 = tcg_temp_new();
3168 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
3169 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
3170 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
3171 /* NIP cannot be restored if the memory exception comes from an helper */
3172 gen_update_nip(ctx, ctx->nip - 4);
3173 gen_addr_reg_index(t0, ctx);
3174 gen_helper_lswx(t0, t1, t2, t3);
3176 tcg_temp_free_i32(t1);
3177 tcg_temp_free_i32(t2);
3178 tcg_temp_free_i32(t3);
3182 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING)
3184 int nb = NB(ctx->opcode);
3185 TCGv t0 = tcg_temp_new();
3187 TCGv_i32 t2 = tcg_const_i32(rS(ctx->opcode));
3188 /* NIP cannot be restored if the memory exception comes from an helper */
3189 gen_update_nip(ctx, ctx->nip - 4);
3190 gen_addr_register(t0, ctx);
3193 t1 = tcg_const_i32(nb);
3194 gen_helper_stsw(t0, t1, t2);
3196 tcg_temp_free_i32(t1);
3197 tcg_temp_free_i32(t2);
3201 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING)
3203 TCGv t0 = tcg_temp_new();
3204 TCGv_i32 t1 = tcg_temp_new_i32();
3205 TCGv_i32 t2 = tcg_const_i32(rS(ctx->opcode));
3206 /* NIP cannot be restored if the memory exception comes from an helper */
3207 gen_update_nip(ctx, ctx->nip - 4);
3208 gen_addr_reg_index(t0, ctx);
3209 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3210 tcg_gen_andi_i32(t1, t1, 0x7F);
3211 gen_helper_stsw(t0, t1, t2);
3213 tcg_temp_free_i32(t1);
3214 tcg_temp_free_i32(t2);
3217 /*** Memory synchronisation ***/
3219 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO)
3224 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM)
3230 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES)
3232 TCGv t0 = tcg_temp_local_new();
3233 gen_set_access_type(ACCESS_RES);
3234 gen_addr_reg_index(t0, ctx);
3235 gen_check_align(ctx, t0, 0x03);
3236 #if defined(TARGET_PPC64)
3238 tcg_gen_ext32u_tl(t0, t0);
3240 gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
3241 tcg_gen_mov_tl(cpu_reserve, t0);
3246 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES)
3248 int l1 = gen_new_label();
3249 TCGv t0 = tcg_temp_local_new();
3250 gen_set_access_type(ACCESS_RES);
3251 gen_addr_reg_index(t0, ctx);
3252 gen_check_align(ctx, t0, 0x03);
3253 #if defined(TARGET_PPC64)
3255 tcg_gen_ext32u_tl(t0, t0);
3257 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3258 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3259 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3260 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3261 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3262 gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], t0, ctx->mem_idx);
3264 tcg_gen_movi_tl(cpu_reserve, -1);
3268 #if defined(TARGET_PPC64)
3270 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B)
3272 TCGv t0 = tcg_temp_local_new();
3273 gen_set_access_type(ACCESS_RES);
3274 gen_addr_reg_index(t0, ctx);
3275 gen_check_align(ctx, t0, 0x07);
3277 tcg_gen_ext32u_tl(t0, t0);
3278 gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
3279 tcg_gen_mov_tl(cpu_reserve, t0);
3284 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B)
3286 int l1 = gen_new_label();
3287 TCGv t0 = tcg_temp_local_new();
3288 gen_set_access_type(ACCESS_RES);
3289 gen_addr_reg_index(t0, ctx);
3290 gen_check_align(ctx, t0, 0x07);
3292 tcg_gen_ext32u_tl(t0, t0);
3293 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3294 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3295 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3296 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3297 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3298 gen_qemu_st64(cpu_gpr[rS(ctx->opcode)], t0, ctx->mem_idx);
3300 tcg_gen_movi_tl(cpu_reserve, -1);
3303 #endif /* defined(TARGET_PPC64) */
3306 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC)
3311 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT)
3313 TCGv_i32 t0 = tcg_temp_new_i32();
3314 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUState, halted));
3315 tcg_temp_free_i32(t0);
3316 /* Stop translation, as the CPU is supposed to sleep from now */
3317 GEN_EXCP(ctx, EXCP_HLT, 1);
3320 /*** Floating-point load ***/
3321 #define GEN_LDF(name, ldop, opc, type) \
3322 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
3325 if (unlikely(!ctx->fpu_enabled)) { \
3326 GEN_EXCP_NO_FP(ctx); \
3329 gen_set_access_type(ACCESS_FLOAT); \
3330 EA = tcg_temp_new(); \
3331 gen_addr_imm_index(EA, ctx, 0); \
3332 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3333 tcg_temp_free(EA); \
3336 #define GEN_LDUF(name, ldop, opc, type) \
3337 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \
3340 if (unlikely(!ctx->fpu_enabled)) { \
3341 GEN_EXCP_NO_FP(ctx); \
3344 if (unlikely(rA(ctx->opcode) == 0)) { \
3345 GEN_EXCP_INVAL(ctx); \
3348 gen_set_access_type(ACCESS_FLOAT); \
3349 EA = tcg_temp_new(); \
3350 gen_addr_imm_index(EA, ctx, 0); \
3351 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3352 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3353 tcg_temp_free(EA); \
3356 #define GEN_LDUXF(name, ldop, opc, type) \
3357 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type) \
3360 if (unlikely(!ctx->fpu_enabled)) { \
3361 GEN_EXCP_NO_FP(ctx); \
3364 if (unlikely(rA(ctx->opcode) == 0)) { \
3365 GEN_EXCP_INVAL(ctx); \
3368 gen_set_access_type(ACCESS_FLOAT); \
3369 EA = tcg_temp_new(); \
3370 gen_addr_reg_index(EA, ctx); \
3371 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3372 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3373 tcg_temp_free(EA); \
3376 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3377 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
3380 if (unlikely(!ctx->fpu_enabled)) { \
3381 GEN_EXCP_NO_FP(ctx); \
3384 gen_set_access_type(ACCESS_FLOAT); \
3385 EA = tcg_temp_new(); \
3386 gen_addr_reg_index(EA, ctx); \
3387 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3388 tcg_temp_free(EA); \
3391 #define GEN_LDFS(name, ldop, op, type) \
3392 GEN_LDF(name, ldop, op | 0x20, type); \
3393 GEN_LDUF(name, ldop, op | 0x21, type); \
3394 GEN_LDUXF(name, ldop, op | 0x01, type); \
3395 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3397 static always_inline void gen_qemu_ld32fs(TCGv_i64 arg1, TCGv arg2, int flags)
3399 TCGv t0 = tcg_temp_new();
3400 TCGv_i32 t1 = tcg_temp_new_i32();
3401 gen_qemu_ld32u(t0, arg2, flags);
3402 tcg_gen_trunc_tl_i32(t1, t0);
3404 gen_helper_float32_to_float64(arg1, t1);
3405 tcg_temp_free_i32(t1);
3408 /* lfd lfdu lfdux lfdx */
3409 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3410 /* lfs lfsu lfsux lfsx */
3411 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3413 /*** Floating-point store ***/
3414 #define GEN_STF(name, stop, opc, type) \
3415 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
3418 if (unlikely(!ctx->fpu_enabled)) { \
3419 GEN_EXCP_NO_FP(ctx); \
3422 gen_set_access_type(ACCESS_FLOAT); \
3423 EA = tcg_temp_new(); \
3424 gen_addr_imm_index(EA, ctx, 0); \
3425 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3426 tcg_temp_free(EA); \
3429 #define GEN_STUF(name, stop, opc, type) \
3430 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \
3433 if (unlikely(!ctx->fpu_enabled)) { \
3434 GEN_EXCP_NO_FP(ctx); \
3437 if (unlikely(rA(ctx->opcode) == 0)) { \
3438 GEN_EXCP_INVAL(ctx); \
3441 gen_set_access_type(ACCESS_FLOAT); \
3442 EA = tcg_temp_new(); \
3443 gen_addr_imm_index(EA, ctx, 0); \
3444 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3445 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3446 tcg_temp_free(EA); \
3449 #define GEN_STUXF(name, stop, opc, type) \
3450 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type) \
3453 if (unlikely(!ctx->fpu_enabled)) { \
3454 GEN_EXCP_NO_FP(ctx); \
3457 if (unlikely(rA(ctx->opcode) == 0)) { \
3458 GEN_EXCP_INVAL(ctx); \
3461 gen_set_access_type(ACCESS_FLOAT); \
3462 EA = tcg_temp_new(); \
3463 gen_addr_reg_index(EA, ctx); \
3464 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3465 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3466 tcg_temp_free(EA); \
3469 #define GEN_STXF(name, stop, opc2, opc3, type) \
3470 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
3473 if (unlikely(!ctx->fpu_enabled)) { \
3474 GEN_EXCP_NO_FP(ctx); \
3477 gen_set_access_type(ACCESS_FLOAT); \
3478 EA = tcg_temp_new(); \
3479 gen_addr_reg_index(EA, ctx); \
3480 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3481 tcg_temp_free(EA); \
3484 #define GEN_STFS(name, stop, op, type) \
3485 GEN_STF(name, stop, op | 0x20, type); \
3486 GEN_STUF(name, stop, op | 0x21, type); \
3487 GEN_STUXF(name, stop, op | 0x01, type); \
3488 GEN_STXF(name, stop, 0x17, op | 0x00, type)
3490 static always_inline void gen_qemu_st32fs(TCGv_i64 arg1, TCGv arg2, int flags)
3492 TCGv_i32 t0 = tcg_temp_new_i32();
3493 TCGv t1 = tcg_temp_new();
3494 gen_helper_float64_to_float32(t0, arg1);
3495 tcg_gen_extu_i32_tl(t1, t0);
3496 tcg_temp_free_i32(t0);
3497 gen_qemu_st32(t1, arg2, flags);
3501 /* stfd stfdu stfdux stfdx */
3502 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3503 /* stfs stfsu stfsux stfsx */
3504 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3507 static always_inline void gen_qemu_st32fiw(TCGv_i64 arg1, TCGv arg2, int flags)
3509 TCGv t0 = tcg_temp_new();
3510 tcg_gen_trunc_i64_tl(t0, arg1),
3511 gen_qemu_st32(t0, arg2, flags);
3515 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3518 static always_inline void gen_goto_tb (DisasContext *ctx, int n,
3521 TranslationBlock *tb;
3523 #if defined(TARGET_PPC64)
3525 dest = (uint32_t) dest;
3527 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3528 likely(!ctx->singlestep_enabled)) {
3530 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3531 tcg_gen_exit_tb((long)tb + n);
3533 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3534 if (unlikely(ctx->singlestep_enabled)) {
3535 if ((ctx->singlestep_enabled &
3536 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3537 ctx->exception == POWERPC_EXCP_BRANCH) {
3538 target_ulong tmp = ctx->nip;
3540 GEN_EXCP(ctx, POWERPC_EXCP_TRACE, 0);
3543 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3544 gen_update_nip(ctx, dest);
3545 gen_helper_raise_debug();
3552 static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip)
3554 #if defined(TARGET_PPC64)
3555 if (ctx->sf_mode == 0)
3556 tcg_gen_movi_tl(cpu_lr, (uint32_t)nip);
3559 tcg_gen_movi_tl(cpu_lr, nip);
3563 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3565 target_ulong li, target;
3567 ctx->exception = POWERPC_EXCP_BRANCH;
3568 /* sign extend LI */
3569 #if defined(TARGET_PPC64)
3571 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
3574 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
3575 if (likely(AA(ctx->opcode) == 0))
3576 target = ctx->nip + li - 4;
3579 if (LK(ctx->opcode))
3580 gen_setlr(ctx, ctx->nip);
3581 gen_goto_tb(ctx, 0, target);
3588 static always_inline void gen_bcond (DisasContext *ctx, int type)
3590 uint32_t bo = BO(ctx->opcode);
3591 int l1 = gen_new_label();
3594 ctx->exception = POWERPC_EXCP_BRANCH;
3595 if (type == BCOND_LR || type == BCOND_CTR) {
3596 target = tcg_temp_local_new();
3597 if (type == BCOND_CTR)
3598 tcg_gen_mov_tl(target, cpu_ctr);
3600 tcg_gen_mov_tl(target, cpu_lr);
3602 if (LK(ctx->opcode))
3603 gen_setlr(ctx, ctx->nip);
3604 l1 = gen_new_label();
3605 if ((bo & 0x4) == 0) {
3606 /* Decrement and test CTR */
3607 TCGv temp = tcg_temp_new();
3608 if (unlikely(type == BCOND_CTR)) {
3609 GEN_EXCP_INVAL(ctx);
3612 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3613 #if defined(TARGET_PPC64)
3615 tcg_gen_ext32u_tl(temp, cpu_ctr);
3618 tcg_gen_mov_tl(temp, cpu_ctr);
3620 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3622 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3624 tcg_temp_free(temp);
3626 if ((bo & 0x10) == 0) {
3628 uint32_t bi = BI(ctx->opcode);
3629 uint32_t mask = 1 << (3 - (bi & 0x03));
3630 TCGv_i32 temp = tcg_temp_new_i32();
3633 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3634 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3636 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3637 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3639 tcg_temp_free_i32(temp);
3641 if (type == BCOND_IM) {
3642 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3643 if (likely(AA(ctx->opcode) == 0)) {
3644 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3646 gen_goto_tb(ctx, 0, li);
3649 gen_goto_tb(ctx, 1, ctx->nip);
3651 #if defined(TARGET_PPC64)
3652 if (!(ctx->sf_mode))
3653 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3656 tcg_gen_andi_tl(cpu_nip, target, ~3);
3659 #if defined(TARGET_PPC64)
3660 if (!(ctx->sf_mode))
3661 tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip);
3664 tcg_gen_movi_tl(cpu_nip, ctx->nip);
3669 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3671 gen_bcond(ctx, BCOND_IM);
3674 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW)
3676 gen_bcond(ctx, BCOND_CTR);
3679 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW)
3681 gen_bcond(ctx, BCOND_LR);
3684 /*** Condition register logical ***/
3685 #define GEN_CRLOGIC(name, tcg_op, opc) \
3686 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) \
3691 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3692 t0 = tcg_temp_new_i32(); \
3694 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3696 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3698 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3699 t1 = tcg_temp_new_i32(); \
3700 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3702 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3704 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3706 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3707 tcg_op(t0, t0, t1); \
3708 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3709 tcg_gen_andi_i32(t0, t0, bitmask); \
3710 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3711 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3712 tcg_temp_free_i32(t0); \
3713 tcg_temp_free_i32(t1); \
3717 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3719 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3721 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3723 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3725 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3727 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3729 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3731 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3733 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER)
3735 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3738 /*** System linkage ***/
3739 /* rfi (supervisor only) */
3740 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW)
3742 #if defined(CONFIG_USER_ONLY)
3743 GEN_EXCP_PRIVOPC(ctx);
3745 /* Restore CPU state */
3746 if (unlikely(!ctx->supervisor)) {
3747 GEN_EXCP_PRIVOPC(ctx);
3755 #if defined(TARGET_PPC64)
3756 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B)
3758 #if defined(CONFIG_USER_ONLY)
3759 GEN_EXCP_PRIVOPC(ctx);
3761 /* Restore CPU state */
3762 if (unlikely(!ctx->supervisor)) {
3763 GEN_EXCP_PRIVOPC(ctx);
3771 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H)
3773 #if defined(CONFIG_USER_ONLY)
3774 GEN_EXCP_PRIVOPC(ctx);
3776 /* Restore CPU state */
3777 if (unlikely(ctx->supervisor <= 1)) {
3778 GEN_EXCP_PRIVOPC(ctx);
3788 #if defined(CONFIG_USER_ONLY)
3789 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3791 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3793 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW)
3797 lev = (ctx->opcode >> 5) & 0x7F;
3798 GEN_EXCP(ctx, POWERPC_SYSCALL, lev);
3803 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW)
3805 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3806 /* Update the nip since this might generate a trap exception */
3807 gen_update_nip(ctx, ctx->nip);
3808 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3809 tcg_temp_free_i32(t0);
3813 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3815 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3816 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3817 /* Update the nip since this might generate a trap exception */
3818 gen_update_nip(ctx, ctx->nip);
3819 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], t0, t1);
3821 tcg_temp_free_i32(t1);
3824 #if defined(TARGET_PPC64)
3826 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B)
3828 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3829 /* Update the nip since this might generate a trap exception */
3830 gen_update_nip(ctx, ctx->nip);
3831 gen_helper_td(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3832 tcg_temp_free_i32(t0);
3836 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B)
3838 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3839 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3840 /* Update the nip since this might generate a trap exception */
3841 gen_update_nip(ctx, ctx->nip);
3842 gen_helper_td(cpu_gpr[rA(ctx->opcode)], t0, t1);
3844 tcg_temp_free_i32(t1);
3848 /*** Processor control ***/
3850 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC)
3852 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer);
3853 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA);
3854 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA));
3858 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC)
3862 if (likely(ctx->opcode & 0x00100000)) {
3863 crm = CRM(ctx->opcode);
3864 if (likely((crm ^ (crm - 1)) == 0)) {
3866 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3869 gen_helper_load_cr(cpu_gpr[rD(ctx->opcode)]);
3874 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC)
3876 #if defined(CONFIG_USER_ONLY)
3877 GEN_EXCP_PRIVREG(ctx);
3879 if (unlikely(!ctx->supervisor)) {
3880 GEN_EXCP_PRIVREG(ctx);
3883 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3888 #define SPR_NOACCESS ((void *)(-1UL))
3890 static void spr_noaccess (void *opaque, int sprn)
3892 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3893 printf("ERROR: try to access SPR %d !\n", sprn);
3895 #define SPR_NOACCESS (&spr_noaccess)
3899 static always_inline void gen_op_mfspr (DisasContext *ctx)
3901 void (*read_cb)(void *opaque, int sprn);
3902 uint32_t sprn = SPR(ctx->opcode);
3904 #if !defined(CONFIG_USER_ONLY)
3905 if (ctx->supervisor == 2)
3906 read_cb = ctx->spr_cb[sprn].hea_read;
3907 else if (ctx->supervisor)
3908 read_cb = ctx->spr_cb[sprn].oea_read;
3911 read_cb = ctx->spr_cb[sprn].uea_read;
3912 if (likely(read_cb != NULL)) {
3913 if (likely(read_cb != SPR_NOACCESS)) {
3914 (*read_cb)(ctx, sprn);
3915 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3917 /* Privilege exception */
3918 /* This is a hack to avoid warnings when running Linux:
3919 * this OS breaks the PowerPC virtualisation model,
3920 * allowing userland application to read the PVR
3922 if (sprn != SPR_PVR) {
3923 if (loglevel != 0) {
3924 fprintf(logfile, "Trying to read privileged spr %d %03x at "
3925 ADDRX "\n", sprn, sprn, ctx->nip);
3927 printf("Trying to read privileged spr %d %03x at " ADDRX "\n",
3928 sprn, sprn, ctx->nip);
3930 GEN_EXCP_PRIVREG(ctx);
3934 if (loglevel != 0) {
3935 fprintf(logfile, "Trying to read invalid spr %d %03x at "
3936 ADDRX "\n", sprn, sprn, ctx->nip);
3938 printf("Trying to read invalid spr %d %03x at " ADDRX "\n",
3939 sprn, sprn, ctx->nip);
3940 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3941 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3945 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC)
3951 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB)
3957 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC)
3961 crm = CRM(ctx->opcode);
3962 if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) {
3963 TCGv_i32 temp = tcg_temp_new_i32();
3965 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3966 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3967 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3968 tcg_temp_free_i32(temp);
3970 TCGv_i32 temp = tcg_const_i32(crm);
3971 gen_helper_store_cr(cpu_gpr[rS(ctx->opcode)], temp);
3972 tcg_temp_free_i32(temp);
3977 #if defined(TARGET_PPC64)
3978 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B)
3980 #if defined(CONFIG_USER_ONLY)
3981 GEN_EXCP_PRIVREG(ctx);
3983 if (unlikely(!ctx->supervisor)) {
3984 GEN_EXCP_PRIVREG(ctx);
3987 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3988 if (ctx->opcode & 0x00010000) {
3989 /* Special form that does not need any synchronisation */
3990 TCGv t0 = tcg_temp_new();
3991 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3992 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3993 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3996 /* XXX: we need to update nip before the store
3997 * if we enter power saving mode, we will exit the loop
3998 * directly from ppc_store_msr
4000 gen_update_nip(ctx, ctx->nip);
4001 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
4002 /* Must stop the translation as machine state (may have) changed */
4003 /* Note that mtmsr is not always defined as context-synchronizing */
4004 ctx->exception = POWERPC_EXCP_STOP;
4010 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC)
4012 #if defined(CONFIG_USER_ONLY)
4013 GEN_EXCP_PRIVREG(ctx);
4015 if (unlikely(!ctx->supervisor)) {
4016 GEN_EXCP_PRIVREG(ctx);
4019 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4020 if (ctx->opcode & 0x00010000) {
4021 /* Special form that does not need any synchronisation */
4022 TCGv t0 = tcg_temp_new();
4023 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
4024 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
4025 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
4028 /* XXX: we need to update nip before the store
4029 * if we enter power saving mode, we will exit the loop
4030 * directly from ppc_store_msr
4032 gen_update_nip(ctx, ctx->nip);
4033 #if defined(TARGET_PPC64)
4034 if (!ctx->sf_mode) {
4035 TCGv t0 = tcg_temp_new();
4036 TCGv t1 = tcg_temp_new();
4037 tcg_gen_andi_tl(t0, cpu_msr, 0xFFFFFFFF00000000ULL);
4038 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
4039 tcg_gen_or_tl(t0, t0, t1);
4041 gen_helper_store_msr(t0);
4045 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
4046 /* Must stop the translation as machine state (may have) changed */
4047 /* Note that mtmsr is not always defined as context-synchronizing */
4048 ctx->exception = POWERPC_EXCP_STOP;
4054 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC)
4056 void (*write_cb)(void *opaque, int sprn);
4057 uint32_t sprn = SPR(ctx->opcode);
4059 #if !defined(CONFIG_USER_ONLY)
4060 if (ctx->supervisor == 2)
4061 write_cb = ctx->spr_cb[sprn].hea_write;
4062 else if (ctx->supervisor)
4063 write_cb = ctx->spr_cb[sprn].oea_write;
4066 write_cb = ctx->spr_cb[sprn].uea_write;
4067 if (likely(write_cb != NULL)) {
4068 if (likely(write_cb != SPR_NOACCESS)) {
4069 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4070 (*write_cb)(ctx, sprn);
4072 /* Privilege exception */
4073 if (loglevel != 0) {
4074 fprintf(logfile, "Trying to write privileged spr %d %03x at "
4075 ADDRX "\n", sprn, sprn, ctx->nip);
4077 printf("Trying to write privileged spr %d %03x at " ADDRX "\n",
4078 sprn, sprn, ctx->nip);
4079 GEN_EXCP_PRIVREG(ctx);
4083 if (loglevel != 0) {
4084 fprintf(logfile, "Trying to write invalid spr %d %03x at "
4085 ADDRX "\n", sprn, sprn, ctx->nip);
4087 printf("Trying to write invalid spr %d %03x at " ADDRX "\n",
4088 sprn, sprn, ctx->nip);
4089 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
4090 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
4094 /*** Cache management ***/
4096 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE)
4098 /* XXX: specification says this is treated as a load by the MMU */
4099 TCGv t0 = tcg_temp_new();
4100 gen_set_access_type(ACCESS_CACHE);
4101 gen_addr_reg_index(t0, ctx);
4102 gen_qemu_ld8u(t0, t0, ctx->mem_idx);
4106 /* dcbi (Supervisor only) */
4107 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE)
4109 #if defined(CONFIG_USER_ONLY)
4110 GEN_EXCP_PRIVOPC(ctx);
4113 if (unlikely(!ctx->supervisor)) {
4114 GEN_EXCP_PRIVOPC(ctx);
4117 EA = tcg_temp_new();
4118 gen_set_access_type(ACCESS_CACHE);
4119 gen_addr_reg_index(EA, ctx);
4120 val = tcg_temp_new();
4121 /* XXX: specification says this should be treated as a store by the MMU */
4122 gen_qemu_ld8u(val, EA, ctx->mem_idx);
4123 gen_qemu_st8(val, EA, ctx->mem_idx);
4130 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE)
4132 /* XXX: specification say this is treated as a load by the MMU */
4133 TCGv t0 = tcg_temp_new();
4134 gen_set_access_type(ACCESS_CACHE);
4135 gen_addr_reg_index(t0, ctx);
4136 gen_qemu_ld8u(t0, t0, ctx->mem_idx);
4141 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE)
4143 /* interpreted as no-op */
4144 /* XXX: specification say this is treated as a load by the MMU
4145 * but does not generate any exception
4150 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE)
4152 /* interpreted as no-op */
4153 /* XXX: specification say this is treated as a load by the MMU
4154 * but does not generate any exception
4159 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ)
4161 TCGv t0 = tcg_temp_new();
4162 gen_addr_reg_index(t0, ctx);
4163 /* NIP cannot be restored if the memory exception comes from an helper */
4164 gen_update_nip(ctx, ctx->nip - 4);
4165 gen_helper_dcbz(t0);
4169 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT)
4171 TCGv t0 = tcg_temp_new();
4172 gen_addr_reg_index(t0, ctx);
4173 /* NIP cannot be restored if the memory exception comes from an helper */
4174 gen_update_nip(ctx, ctx->nip - 4);
4175 if (ctx->opcode & 0x00200000)
4176 gen_helper_dcbz(t0);
4178 gen_helper_dcbz_970(t0);
4183 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI)
4185 TCGv t0 = tcg_temp_new();
4186 /* NIP cannot be restored if the memory exception comes from an helper */
4187 gen_update_nip(ctx, ctx->nip - 4);
4188 gen_addr_reg_index(t0, ctx);
4189 gen_helper_icbi(t0);
4195 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA)
4197 /* interpreted as no-op */
4198 /* XXX: specification say this is treated as a store by the MMU
4199 * but does not generate any exception
4203 /*** Segment register manipulation ***/
4204 /* Supervisor only: */
4206 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT)
4208 #if defined(CONFIG_USER_ONLY)
4209 GEN_EXCP_PRIVREG(ctx);
4212 if (unlikely(!ctx->supervisor)) {
4213 GEN_EXCP_PRIVREG(ctx);
4216 t0 = tcg_const_tl(SR(ctx->opcode));
4217 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4223 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT)
4225 #if defined(CONFIG_USER_ONLY)
4226 GEN_EXCP_PRIVREG(ctx);
4229 if (unlikely(!ctx->supervisor)) {
4230 GEN_EXCP_PRIVREG(ctx);
4233 t0 = tcg_temp_new();
4234 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4235 tcg_gen_andi_tl(t0, t0, 0xF);
4236 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4242 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT)
4244 #if defined(CONFIG_USER_ONLY)
4245 GEN_EXCP_PRIVREG(ctx);
4248 if (unlikely(!ctx->supervisor)) {
4249 GEN_EXCP_PRIVREG(ctx);
4252 t0 = tcg_const_tl(SR(ctx->opcode));
4253 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4259 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT)
4261 #if defined(CONFIG_USER_ONLY)
4262 GEN_EXCP_PRIVREG(ctx);
4265 if (unlikely(!ctx->supervisor)) {
4266 GEN_EXCP_PRIVREG(ctx);
4269 t0 = tcg_temp_new();
4270 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4271 tcg_gen_andi_tl(t0, t0, 0xF);
4272 gen_helper_store_sr(t0, cpu_gpr[rD(ctx->opcode)]);
4277 #if defined(TARGET_PPC64)
4278 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4280 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B)
4282 #if defined(CONFIG_USER_ONLY)
4283 GEN_EXCP_PRIVREG(ctx);
4286 if (unlikely(!ctx->supervisor)) {
4287 GEN_EXCP_PRIVREG(ctx);
4290 t0 = tcg_const_tl(SR(ctx->opcode));
4291 gen_helper_load_slb(cpu_gpr[rD(ctx->opcode)], t0);
4297 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
4300 #if defined(CONFIG_USER_ONLY)
4301 GEN_EXCP_PRIVREG(ctx);
4304 if (unlikely(!ctx->supervisor)) {
4305 GEN_EXCP_PRIVREG(ctx);
4308 t0 = tcg_temp_new();
4309 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4310 tcg_gen_andi_tl(t0, t0, 0xF);
4311 gen_helper_load_slb(cpu_gpr[rD(ctx->opcode)], t0);
4317 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B)
4319 #if defined(CONFIG_USER_ONLY)
4320 GEN_EXCP_PRIVREG(ctx);
4323 if (unlikely(!ctx->supervisor)) {
4324 GEN_EXCP_PRIVREG(ctx);
4327 t0 = tcg_const_tl(SR(ctx->opcode));
4328 gen_helper_store_slb(t0, cpu_gpr[rS(ctx->opcode)]);
4334 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
4337 #if defined(CONFIG_USER_ONLY)
4338 GEN_EXCP_PRIVREG(ctx);
4341 if (unlikely(!ctx->supervisor)) {
4342 GEN_EXCP_PRIVREG(ctx);
4345 t0 = tcg_temp_new();
4346 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4347 tcg_gen_andi_tl(t0, t0, 0xF);
4348 gen_helper_store_slb(t0, cpu_gpr[rS(ctx->opcode)]);
4352 #endif /* defined(TARGET_PPC64) */
4354 /*** Lookaside buffer management ***/
4355 /* Optional & supervisor only: */
4357 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA)
4359 #if defined(CONFIG_USER_ONLY)
4360 GEN_EXCP_PRIVOPC(ctx);
4362 if (unlikely(!ctx->supervisor)) {
4363 GEN_EXCP_PRIVOPC(ctx);
4371 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE)
4373 #if defined(CONFIG_USER_ONLY)
4374 GEN_EXCP_PRIVOPC(ctx);
4376 if (unlikely(!ctx->supervisor)) {
4377 GEN_EXCP_PRIVOPC(ctx);
4380 #if defined(TARGET_PPC64)
4381 if (!ctx->sf_mode) {
4382 TCGv t0 = tcg_temp_new();
4383 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4384 gen_helper_tlbie(t0);
4388 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4393 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC)
4395 #if defined(CONFIG_USER_ONLY)
4396 GEN_EXCP_PRIVOPC(ctx);
4398 if (unlikely(!ctx->supervisor)) {
4399 GEN_EXCP_PRIVOPC(ctx);
4402 /* This has no effect: it should ensure that all previous
4403 * tlbie have completed
4409 #if defined(TARGET_PPC64)
4411 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI)
4413 #if defined(CONFIG_USER_ONLY)
4414 GEN_EXCP_PRIVOPC(ctx);
4416 if (unlikely(!ctx->supervisor)) {
4417 GEN_EXCP_PRIVOPC(ctx);
4425 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI)
4427 #if defined(CONFIG_USER_ONLY)
4428 GEN_EXCP_PRIVOPC(ctx);
4430 if (unlikely(!ctx->supervisor)) {
4431 GEN_EXCP_PRIVOPC(ctx);
4434 gen_helper_slbie(cpu_gpr[rB(ctx->opcode)]);
4439 /*** External control ***/
4442 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN)
4444 /* Should check EAR[E] ! */
4445 TCGv t0 = tcg_temp_new();
4446 gen_set_access_type(ACCESS_RES);
4447 gen_addr_reg_index(t0, ctx);
4448 gen_check_align(ctx, t0, 0x03);
4449 gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
4454 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN)
4456 /* Should check EAR[E] ! */
4457 TCGv t0 = tcg_temp_new();
4458 gen_set_access_type(ACCESS_RES);
4459 gen_addr_reg_index(t0, ctx);
4460 gen_check_align(ctx, t0, 0x03);
4461 gen_qemu_st32(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
4465 /* PowerPC 601 specific instructions */
4467 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR)
4469 int l1 = gen_new_label();
4470 int l2 = gen_new_label();
4471 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4472 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4475 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4477 if (unlikely(Rc(ctx->opcode) != 0))
4478 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4482 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR)
4484 int l1 = gen_new_label();
4485 int l2 = gen_new_label();
4486 int l3 = gen_new_label();
4487 /* Start with XER OV disabled, the most likely case */
4488 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4489 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4490 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4491 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4494 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4497 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4499 if (unlikely(Rc(ctx->opcode) != 0))
4500 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4504 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR)
4506 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4507 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], t0);
4508 tcg_temp_free_i32(t0);
4509 /* Rc=1 sets CR0 to an undefined state */
4513 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR)
4515 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4516 if (unlikely(Rc(ctx->opcode) != 0))
4517 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4521 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR)
4523 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4524 if (unlikely(Rc(ctx->opcode) != 0))
4525 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4529 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR)
4531 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4532 if (unlikely(Rc(ctx->opcode) != 0))
4533 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4536 /* divso - divso. */
4537 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR)
4539 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4540 if (unlikely(Rc(ctx->opcode) != 0))
4541 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4545 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR)
4547 int l1 = gen_new_label();
4548 int l2 = gen_new_label();
4549 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4550 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4553 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4555 if (unlikely(Rc(ctx->opcode) != 0))
4556 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4560 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR)
4562 int l1 = gen_new_label();
4563 int l2 = gen_new_label();
4564 TCGv t0 = tcg_temp_new();
4565 TCGv t1 = tcg_temp_new();
4566 TCGv t2 = tcg_temp_new();
4567 /* Start with XER OV disabled, the most likely case */
4568 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4569 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4570 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4571 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4572 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4573 tcg_gen_andc_tl(t1, t1, t2);
4574 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4575 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4576 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4579 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4584 if (unlikely(Rc(ctx->opcode) != 0))
4585 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4589 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4591 target_long simm = SIMM(ctx->opcode);
4592 int l1 = gen_new_label();
4593 int l2 = gen_new_label();
4594 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4595 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4598 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4600 if (unlikely(Rc(ctx->opcode) != 0))
4601 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4604 /* lscbx - lscbx. */
4605 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR)
4607 TCGv t0 = tcg_temp_new();
4608 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4609 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4610 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4612 gen_addr_reg_index(t0, ctx);
4613 /* NIP cannot be restored if the memory exception comes from an helper */
4614 gen_update_nip(ctx, ctx->nip - 4);
4615 gen_helper_lscbx(t0, t0, t1, t2, t3);
4616 tcg_temp_free_i32(t1);
4617 tcg_temp_free_i32(t2);
4618 tcg_temp_free_i32(t3);
4619 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4620 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4621 if (unlikely(Rc(ctx->opcode) != 0))
4622 gen_set_Rc0(ctx, t0);
4626 /* maskg - maskg. */
4627 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR)
4629 int l1 = gen_new_label();
4630 TCGv t0 = tcg_temp_new();
4631 TCGv t1 = tcg_temp_new();
4632 TCGv t2 = tcg_temp_new();
4633 TCGv t3 = tcg_temp_new();
4634 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4635 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4636 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4637 tcg_gen_addi_tl(t2, t0, 1);
4638 tcg_gen_shr_tl(t2, t3, t2);
4639 tcg_gen_shr_tl(t3, t3, t1);
4640 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4641 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4642 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4648 if (unlikely(Rc(ctx->opcode) != 0))
4649 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4652 /* maskir - maskir. */
4653 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR)
4655 TCGv t0 = tcg_temp_new();
4656 TCGv t1 = tcg_temp_new();
4657 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4658 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4659 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4662 if (unlikely(Rc(ctx->opcode) != 0))
4663 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4667 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR)
4669 TCGv_i64 t0 = tcg_temp_new_i64();
4670 TCGv_i64 t1 = tcg_temp_new_i64();
4671 TCGv t2 = tcg_temp_new();
4672 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4673 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4674 tcg_gen_mul_i64(t0, t0, t1);
4675 tcg_gen_trunc_i64_tl(t2, t0);
4676 gen_store_spr(SPR_MQ, t2);
4677 tcg_gen_shri_i64(t1, t0, 32);
4678 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4679 tcg_temp_free_i64(t0);
4680 tcg_temp_free_i64(t1);
4682 if (unlikely(Rc(ctx->opcode) != 0))
4683 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4687 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR)
4689 int l1 = gen_new_label();
4690 TCGv_i64 t0 = tcg_temp_new_i64();
4691 TCGv_i64 t1 = tcg_temp_new_i64();
4692 TCGv t2 = tcg_temp_new();
4693 /* Start with XER OV disabled, the most likely case */
4694 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4695 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4696 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4697 tcg_gen_mul_i64(t0, t0, t1);
4698 tcg_gen_trunc_i64_tl(t2, t0);
4699 gen_store_spr(SPR_MQ, t2);
4700 tcg_gen_shri_i64(t1, t0, 32);
4701 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4702 tcg_gen_ext32s_i64(t1, t0);
4703 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4704 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4706 tcg_temp_free_i64(t0);
4707 tcg_temp_free_i64(t1);
4709 if (unlikely(Rc(ctx->opcode) != 0))
4710 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4714 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR)
4716 int l1 = gen_new_label();
4717 int l2 = gen_new_label();
4718 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4719 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4722 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4724 if (unlikely(Rc(ctx->opcode) != 0))
4725 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4728 /* nabso - nabso. */
4729 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR)
4731 int l1 = gen_new_label();
4732 int l2 = gen_new_label();
4733 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4734 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4737 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4739 /* nabs never overflows */
4740 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4741 if (unlikely(Rc(ctx->opcode) != 0))
4742 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4746 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4748 uint32_t mb = MB(ctx->opcode);
4749 uint32_t me = ME(ctx->opcode);
4750 TCGv t0 = tcg_temp_new();
4751 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4752 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4753 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4754 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4755 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4757 if (unlikely(Rc(ctx->opcode) != 0))
4758 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4762 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR)
4764 TCGv t0 = tcg_temp_new();
4765 TCGv t1 = tcg_temp_new();
4766 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4767 tcg_gen_movi_tl(t1, 0x80000000);
4768 tcg_gen_shr_tl(t1, t1, t0);
4769 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4770 tcg_gen_and_tl(t0, t0, t1);
4771 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4772 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4775 if (unlikely(Rc(ctx->opcode) != 0))
4776 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4780 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR)
4782 TCGv t0 = tcg_temp_new();
4783 TCGv t1 = tcg_temp_new();
4784 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4785 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4786 tcg_gen_subfi_tl(t1, 32, t1);
4787 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4788 tcg_gen_or_tl(t1, t0, t1);
4789 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4790 gen_store_spr(SPR_MQ, t1);
4793 if (unlikely(Rc(ctx->opcode) != 0))
4794 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4798 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR)
4800 TCGv t0 = tcg_temp_new();
4801 TCGv t1 = tcg_temp_new();
4802 TCGv t2 = tcg_temp_new();
4803 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4804 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4805 tcg_gen_shl_tl(t2, t2, t0);
4806 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4807 gen_load_spr(t1, SPR_MQ);
4808 gen_store_spr(SPR_MQ, t0);
4809 tcg_gen_and_tl(t0, t0, t2);
4810 tcg_gen_andc_tl(t1, t1, t2);
4811 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4815 if (unlikely(Rc(ctx->opcode) != 0))
4816 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4820 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR)
4822 int sh = SH(ctx->opcode);
4823 TCGv t0 = tcg_temp_new();
4824 TCGv t1 = tcg_temp_new();
4825 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4826 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4827 tcg_gen_or_tl(t1, t0, t1);
4828 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4829 gen_store_spr(SPR_MQ, t1);
4832 if (unlikely(Rc(ctx->opcode) != 0))
4833 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4836 /* slliq - slliq. */
4837 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR)
4839 int sh = SH(ctx->opcode);
4840 TCGv t0 = tcg_temp_new();
4841 TCGv t1 = tcg_temp_new();
4842 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4843 gen_load_spr(t1, SPR_MQ);
4844 gen_store_spr(SPR_MQ, t0);
4845 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4846 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4847 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4850 if (unlikely(Rc(ctx->opcode) != 0))
4851 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4855 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR)
4857 int l1 = gen_new_label();
4858 int l2 = gen_new_label();
4859 TCGv t0 = tcg_temp_local_new();
4860 TCGv t1 = tcg_temp_local_new();
4861 TCGv t2 = tcg_temp_local_new();
4862 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4863 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4864 tcg_gen_shl_tl(t1, t1, t2);
4865 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4866 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4867 gen_load_spr(t0, SPR_MQ);
4868 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4871 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4872 gen_load_spr(t2, SPR_MQ);
4873 tcg_gen_andc_tl(t1, t2, t1);
4874 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4879 if (unlikely(Rc(ctx->opcode) != 0))
4880 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4884 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR)
4886 int l1 = gen_new_label();
4887 TCGv t0 = tcg_temp_new();
4888 TCGv t1 = tcg_temp_new();
4889 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4890 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4891 tcg_gen_subfi_tl(t1, 32, t1);
4892 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4893 tcg_gen_or_tl(t1, t0, t1);
4894 gen_store_spr(SPR_MQ, t1);
4895 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4896 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4897 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4898 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4902 if (unlikely(Rc(ctx->opcode) != 0))
4903 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4906 /* sraiq - sraiq. */
4907 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR)
4909 int sh = SH(ctx->opcode);
4910 int l1 = gen_new_label();
4911 TCGv t0 = tcg_temp_new();
4912 TCGv t1 = tcg_temp_new();
4913 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4914 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4915 tcg_gen_or_tl(t0, t0, t1);
4916 gen_store_spr(SPR_MQ, t0);
4917 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4918 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4919 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4920 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4922 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
4925 if (unlikely(Rc(ctx->opcode) != 0))
4926 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4930 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR)
4932 int l1 = gen_new_label();
4933 int l2 = gen_new_label();
4934 TCGv t0 = tcg_temp_new();
4935 TCGv t1 = tcg_temp_local_new();
4936 TCGv t2 = tcg_temp_local_new();
4937 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4938 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4939 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
4940 tcg_gen_subfi_tl(t2, 32, t2);
4941 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
4942 tcg_gen_or_tl(t0, t0, t2);
4943 gen_store_spr(SPR_MQ, t0);
4944 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4945 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
4946 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
4947 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
4950 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
4951 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4952 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4953 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
4954 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4958 if (unlikely(Rc(ctx->opcode) != 0))
4959 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4963 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR)
4965 TCGv t0 = tcg_temp_new();
4966 TCGv t1 = tcg_temp_new();
4967 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4968 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4969 tcg_gen_subfi_tl(t1, 32, t1);
4970 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4971 tcg_gen_or_tl(t1, t0, t1);
4972 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4973 gen_store_spr(SPR_MQ, t1);
4976 if (unlikely(Rc(ctx->opcode) != 0))
4977 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4981 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR)
4983 TCGv t0 = tcg_temp_new();
4984 TCGv t1 = tcg_temp_new();
4985 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4986 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4987 gen_store_spr(SPR_MQ, t0);
4988 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
4991 if (unlikely(Rc(ctx->opcode) != 0))
4992 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4996 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR)
4998 TCGv t0 = tcg_temp_new();
4999 TCGv t1 = tcg_temp_new();
5000 TCGv t2 = tcg_temp_new();
5001 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5002 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5003 tcg_gen_shr_tl(t1, t1, t0);
5004 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5005 gen_load_spr(t2, SPR_MQ);
5006 gen_store_spr(SPR_MQ, t0);
5007 tcg_gen_and_tl(t0, t0, t1);
5008 tcg_gen_andc_tl(t2, t2, t1);
5009 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5013 if (unlikely(Rc(ctx->opcode) != 0))
5014 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5018 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR)
5020 int sh = SH(ctx->opcode);
5021 TCGv t0 = tcg_temp_new();
5022 TCGv t1 = tcg_temp_new();
5023 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5024 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5025 tcg_gen_or_tl(t1, t0, t1);
5026 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5027 gen_store_spr(SPR_MQ, t1);
5030 if (unlikely(Rc(ctx->opcode) != 0))
5031 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5035 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR)
5037 int sh = SH(ctx->opcode);
5038 TCGv t0 = tcg_temp_new();
5039 TCGv t1 = tcg_temp_new();
5040 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5041 gen_load_spr(t1, SPR_MQ);
5042 gen_store_spr(SPR_MQ, t0);
5043 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
5044 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
5045 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5048 if (unlikely(Rc(ctx->opcode) != 0))
5049 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5053 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR)
5055 int l1 = gen_new_label();
5056 int l2 = gen_new_label();
5057 TCGv t0 = tcg_temp_local_new();
5058 TCGv t1 = tcg_temp_local_new();
5059 TCGv t2 = tcg_temp_local_new();
5060 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5061 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5062 tcg_gen_shr_tl(t2, t1, t2);
5063 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5064 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5065 gen_load_spr(t0, SPR_MQ);
5066 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5069 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5070 tcg_gen_and_tl(t0, t0, t2);
5071 gen_load_spr(t1, SPR_MQ);
5072 tcg_gen_andc_tl(t1, t1, t2);
5073 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5078 if (unlikely(Rc(ctx->opcode) != 0))
5079 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5083 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR)
5085 int l1 = gen_new_label();
5086 TCGv t0 = tcg_temp_new();
5087 TCGv t1 = tcg_temp_new();
5088 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5089 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5090 tcg_gen_subfi_tl(t1, 32, t1);
5091 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5092 tcg_gen_or_tl(t1, t0, t1);
5093 gen_store_spr(SPR_MQ, t1);
5094 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5095 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5096 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5097 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5101 if (unlikely(Rc(ctx->opcode) != 0))
5102 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5105 /* PowerPC 602 specific instructions */
5107 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC)
5110 GEN_EXCP_INVAL(ctx);
5114 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC)
5117 GEN_EXCP_INVAL(ctx);
5121 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC)
5123 #if defined(CONFIG_USER_ONLY)
5124 GEN_EXCP_PRIVOPC(ctx);
5126 if (unlikely(!ctx->supervisor)) {
5127 GEN_EXCP_PRIVOPC(ctx);
5130 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5134 /* 602 - 603 - G2 TLB management */
5136 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB)
5138 #if defined(CONFIG_USER_ONLY)
5139 GEN_EXCP_PRIVOPC(ctx);
5141 if (unlikely(!ctx->supervisor)) {
5142 GEN_EXCP_PRIVOPC(ctx);
5145 gen_helper_6xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5150 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB)
5152 #if defined(CONFIG_USER_ONLY)
5153 GEN_EXCP_PRIVOPC(ctx);
5155 if (unlikely(!ctx->supervisor)) {
5156 GEN_EXCP_PRIVOPC(ctx);
5159 gen_helper_6xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5163 /* 74xx TLB management */
5165 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB)
5167 #if defined(CONFIG_USER_ONLY)
5168 GEN_EXCP_PRIVOPC(ctx);
5170 if (unlikely(!ctx->supervisor)) {
5171 GEN_EXCP_PRIVOPC(ctx);
5174 gen_helper_74xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5179 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB)
5181 #if defined(CONFIG_USER_ONLY)
5182 GEN_EXCP_PRIVOPC(ctx);
5184 if (unlikely(!ctx->supervisor)) {
5185 GEN_EXCP_PRIVOPC(ctx);
5188 gen_helper_74xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5192 /* POWER instructions not in PowerPC 601 */
5194 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER)
5196 /* Cache line flush: implemented as no-op */
5200 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER)
5202 /* Cache line invalidate: privileged and treated as no-op */
5203 #if defined(CONFIG_USER_ONLY)
5204 GEN_EXCP_PRIVOPC(ctx);
5206 if (unlikely(!ctx->supervisor)) {
5207 GEN_EXCP_PRIVOPC(ctx);
5214 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER)
5216 /* Data cache line store: treated as no-op */
5219 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER)
5221 #if defined(CONFIG_USER_ONLY)
5222 GEN_EXCP_PRIVOPC(ctx);
5224 int ra = rA(ctx->opcode);
5225 int rd = rD(ctx->opcode);
5227 if (unlikely(!ctx->supervisor)) {
5228 GEN_EXCP_PRIVOPC(ctx);
5231 t0 = tcg_temp_new();
5232 gen_addr_reg_index(t0, ctx);
5233 tcg_gen_shri_tl(t0, t0, 28);
5234 tcg_gen_andi_tl(t0, t0, 0xF);
5235 gen_helper_load_sr(cpu_gpr[rd], t0);
5237 if (ra != 0 && ra != rd)
5238 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5242 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER)
5244 #if defined(CONFIG_USER_ONLY)
5245 GEN_EXCP_PRIVOPC(ctx);
5248 if (unlikely(!ctx->supervisor)) {
5249 GEN_EXCP_PRIVOPC(ctx);
5252 t0 = tcg_temp_new();
5253 gen_addr_reg_index(t0, ctx);
5254 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], t0);
5259 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER)
5261 #if defined(CONFIG_USER_ONLY)
5262 GEN_EXCP_PRIVOPC(ctx);
5264 if (unlikely(!ctx->supervisor)) {
5265 GEN_EXCP_PRIVOPC(ctx);
5273 /* svc is not implemented for now */
5275 /* POWER2 specific instructions */
5276 /* Quad manipulation (load/store two floats at a time) */
5279 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5281 int rd = rD(ctx->opcode);
5282 TCGv t0 = tcg_temp_new();
5283 gen_addr_imm_index(t0, ctx, 0);
5284 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5285 tcg_gen_addi_tl(t0, t0, 8);
5286 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5291 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5293 int ra = rA(ctx->opcode);
5294 int rd = rD(ctx->opcode);
5295 TCGv t0 = tcg_temp_new();
5296 TCGv t1 = tcg_temp_new();
5297 gen_addr_imm_index(t0, ctx, 0);
5298 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5299 tcg_gen_addi_tl(t1, t0, 8);
5300 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5302 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5308 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2)
5310 int ra = rA(ctx->opcode);
5311 int rd = rD(ctx->opcode);
5312 TCGv t0 = tcg_temp_new();
5313 TCGv t1 = tcg_temp_new();
5314 gen_addr_reg_index(t0, ctx);
5315 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5316 tcg_gen_addi_tl(t1, t0, 8);
5317 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5319 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5325 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2)
5327 int rd = rD(ctx->opcode);
5328 TCGv t0 = tcg_temp_new();
5329 gen_addr_reg_index(t0, ctx);
5330 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5331 tcg_gen_addi_tl(t0, t0, 8);
5332 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5337 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5339 int rd = rD(ctx->opcode);
5340 TCGv t0 = tcg_temp_new();
5341 gen_addr_imm_index(t0, ctx, 0);
5342 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5343 tcg_gen_addi_tl(t0, t0, 8);
5344 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5349 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5351 int ra = rA(ctx->opcode);
5352 int rd = rD(ctx->opcode);
5353 TCGv t0 = tcg_temp_new();
5354 TCGv t1 = tcg_temp_new();
5355 gen_addr_imm_index(t0, ctx, 0);
5356 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5357 tcg_gen_addi_tl(t1, t0, 8);
5358 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5360 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5366 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2)
5368 int ra = rA(ctx->opcode);
5369 int rd = rD(ctx->opcode);
5370 TCGv t0 = tcg_temp_new();
5371 TCGv t1 = tcg_temp_new();
5372 gen_addr_reg_index(t0, ctx);
5373 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5374 tcg_gen_addi_tl(t1, t0, 8);
5375 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5377 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5383 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2)
5385 int rd = rD(ctx->opcode);
5386 TCGv t0 = tcg_temp_new();
5387 gen_addr_reg_index(t0, ctx);
5388 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5389 tcg_gen_addi_tl(t0, t0, 8);
5390 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5394 /* BookE specific instructions */
5395 /* XXX: not implemented on 440 ? */
5396 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI)
5399 GEN_EXCP_INVAL(ctx);
5402 /* XXX: not implemented on 440 ? */
5403 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA)
5405 #if defined(CONFIG_USER_ONLY)
5406 GEN_EXCP_PRIVOPC(ctx);
5409 if (unlikely(!ctx->supervisor)) {
5410 GEN_EXCP_PRIVOPC(ctx);
5413 gen_addr_reg_index(t0, ctx);
5414 #if defined(TARGET_PPC64)
5416 tcg_gen_ext32u_tl(t0, t0);
5418 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
5423 /* All 405 MAC instructions are translated here */
5424 static always_inline void gen_405_mulladd_insn (DisasContext *ctx,
5426 int ra, int rb, int rt, int Rc)
5430 t0 = tcg_temp_local_new();
5431 t1 = tcg_temp_local_new();
5433 switch (opc3 & 0x0D) {
5435 /* macchw - macchw. - macchwo - macchwo. */
5436 /* macchws - macchws. - macchwso - macchwso. */
5437 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5438 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5439 /* mulchw - mulchw. */
5440 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5441 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5442 tcg_gen_ext16s_tl(t1, t1);
5445 /* macchwu - macchwu. - macchwuo - macchwuo. */
5446 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5447 /* mulchwu - mulchwu. */
5448 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5449 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5450 tcg_gen_ext16u_tl(t1, t1);
5453 /* machhw - machhw. - machhwo - machhwo. */
5454 /* machhws - machhws. - machhwso - machhwso. */
5455 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5456 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5457 /* mulhhw - mulhhw. */
5458 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5459 tcg_gen_ext16s_tl(t0, t0);
5460 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5461 tcg_gen_ext16s_tl(t1, t1);
5464 /* machhwu - machhwu. - machhwuo - machhwuo. */
5465 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5466 /* mulhhwu - mulhhwu. */
5467 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5468 tcg_gen_ext16u_tl(t0, t0);
5469 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5470 tcg_gen_ext16u_tl(t1, t1);
5473 /* maclhw - maclhw. - maclhwo - maclhwo. */
5474 /* maclhws - maclhws. - maclhwso - maclhwso. */
5475 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5476 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5477 /* mullhw - mullhw. */
5478 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5479 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5482 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5483 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5484 /* mullhwu - mullhwu. */
5485 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5486 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5490 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5491 tcg_gen_mul_tl(t1, t0, t1);
5493 /* nmultiply-and-accumulate (0x0E) */
5494 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5496 /* multiply-and-accumulate (0x0C) */
5497 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5501 /* Check overflow and/or saturate */
5502 int l1 = gen_new_label();
5505 /* Start with XER OV disabled, the most likely case */
5506 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
5510 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5511 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5512 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5513 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5516 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5517 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5521 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5524 tcg_gen_movi_tl(t0, UINT32_MAX);
5528 /* Check overflow */
5529 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
5532 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5535 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5539 if (unlikely(Rc) != 0) {
5541 gen_set_Rc0(ctx, cpu_gpr[rt]);
5545 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5546 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) \
5548 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5549 rD(ctx->opcode), Rc(ctx->opcode)); \
5552 /* macchw - macchw. */
5553 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5554 /* macchwo - macchwo. */
5555 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5556 /* macchws - macchws. */
5557 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5558 /* macchwso - macchwso. */
5559 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5560 /* macchwsu - macchwsu. */
5561 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5562 /* macchwsuo - macchwsuo. */
5563 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5564 /* macchwu - macchwu. */
5565 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5566 /* macchwuo - macchwuo. */
5567 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5568 /* machhw - machhw. */
5569 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5570 /* machhwo - machhwo. */
5571 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5572 /* machhws - machhws. */
5573 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5574 /* machhwso - machhwso. */
5575 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5576 /* machhwsu - machhwsu. */
5577 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5578 /* machhwsuo - machhwsuo. */
5579 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5580 /* machhwu - machhwu. */
5581 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5582 /* machhwuo - machhwuo. */
5583 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5584 /* maclhw - maclhw. */
5585 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5586 /* maclhwo - maclhwo. */
5587 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5588 /* maclhws - maclhws. */
5589 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5590 /* maclhwso - maclhwso. */
5591 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5592 /* maclhwu - maclhwu. */
5593 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5594 /* maclhwuo - maclhwuo. */
5595 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5596 /* maclhwsu - maclhwsu. */
5597 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5598 /* maclhwsuo - maclhwsuo. */
5599 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5600 /* nmacchw - nmacchw. */
5601 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5602 /* nmacchwo - nmacchwo. */
5603 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5604 /* nmacchws - nmacchws. */
5605 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5606 /* nmacchwso - nmacchwso. */
5607 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5608 /* nmachhw - nmachhw. */
5609 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5610 /* nmachhwo - nmachhwo. */
5611 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5612 /* nmachhws - nmachhws. */
5613 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5614 /* nmachhwso - nmachhwso. */
5615 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5616 /* nmaclhw - nmaclhw. */
5617 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5618 /* nmaclhwo - nmaclhwo. */
5619 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5620 /* nmaclhws - nmaclhws. */
5621 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5622 /* nmaclhwso - nmaclhwso. */
5623 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5625 /* mulchw - mulchw. */
5626 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5627 /* mulchwu - mulchwu. */
5628 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5629 /* mulhhw - mulhhw. */
5630 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5631 /* mulhhwu - mulhhwu. */
5632 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5633 /* mullhw - mullhw. */
5634 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5635 /* mullhwu - mullhwu. */
5636 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5639 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR)
5641 #if defined(CONFIG_USER_ONLY)
5642 GEN_EXCP_PRIVREG(ctx);
5645 if (unlikely(!ctx->supervisor)) {
5646 GEN_EXCP_PRIVREG(ctx);
5649 /* NIP cannot be restored if the memory exception comes from an helper */
5650 gen_update_nip(ctx, ctx->nip - 4);
5651 dcrn = tcg_const_tl(SPR(ctx->opcode));
5652 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], dcrn);
5653 tcg_temp_free(dcrn);
5658 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR)
5660 #if defined(CONFIG_USER_ONLY)
5661 GEN_EXCP_PRIVREG(ctx);
5664 if (unlikely(!ctx->supervisor)) {
5665 GEN_EXCP_PRIVREG(ctx);
5668 /* NIP cannot be restored if the memory exception comes from an helper */
5669 gen_update_nip(ctx, ctx->nip - 4);
5670 dcrn = tcg_const_tl(SPR(ctx->opcode));
5671 gen_helper_store_dcr(dcrn, cpu_gpr[rS(ctx->opcode)]);
5672 tcg_temp_free(dcrn);
5677 /* XXX: not implemented on 440 ? */
5678 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX)
5680 #if defined(CONFIG_USER_ONLY)
5681 GEN_EXCP_PRIVREG(ctx);
5683 if (unlikely(!ctx->supervisor)) {
5684 GEN_EXCP_PRIVREG(ctx);
5687 /* NIP cannot be restored if the memory exception comes from an helper */
5688 gen_update_nip(ctx, ctx->nip - 4);
5689 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5690 /* Note: Rc update flag set leads to undefined state of Rc0 */
5695 /* XXX: not implemented on 440 ? */
5696 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX)
5698 #if defined(CONFIG_USER_ONLY)
5699 GEN_EXCP_PRIVREG(ctx);
5701 if (unlikely(!ctx->supervisor)) {
5702 GEN_EXCP_PRIVREG(ctx);
5705 /* NIP cannot be restored if the memory exception comes from an helper */
5706 gen_update_nip(ctx, ctx->nip - 4);
5707 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5708 /* Note: Rc update flag set leads to undefined state of Rc0 */
5712 /* mfdcrux (PPC 460) : user-mode access to DCR */
5713 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX)
5715 /* NIP cannot be restored if the memory exception comes from an helper */
5716 gen_update_nip(ctx, ctx->nip - 4);
5717 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5718 /* Note: Rc update flag set leads to undefined state of Rc0 */
5721 /* mtdcrux (PPC 460) : user-mode access to DCR */
5722 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX)
5724 /* NIP cannot be restored if the memory exception comes from an helper */
5725 gen_update_nip(ctx, ctx->nip - 4);
5726 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5727 /* Note: Rc update flag set leads to undefined state of Rc0 */
5731 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON)
5733 #if defined(CONFIG_USER_ONLY)
5734 GEN_EXCP_PRIVOPC(ctx);
5736 if (unlikely(!ctx->supervisor)) {
5737 GEN_EXCP_PRIVOPC(ctx);
5740 /* interpreted as no-op */
5745 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON)
5747 #if defined(CONFIG_USER_ONLY)
5748 GEN_EXCP_PRIVOPC(ctx);
5751 if (unlikely(!ctx->supervisor)) {
5752 GEN_EXCP_PRIVOPC(ctx);
5755 EA = tcg_temp_new();
5756 gen_set_access_type(ACCESS_CACHE);
5757 gen_addr_reg_index(EA, ctx);
5758 val = tcg_temp_new();
5759 gen_qemu_ld32u(val, EA, ctx->mem_idx);
5761 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5767 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT)
5769 /* interpreted as no-op */
5770 /* XXX: specification say this is treated as a load by the MMU
5771 * but does not generate any exception
5776 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON)
5778 #if defined(CONFIG_USER_ONLY)
5779 GEN_EXCP_PRIVOPC(ctx);
5781 if (unlikely(!ctx->supervisor)) {
5782 GEN_EXCP_PRIVOPC(ctx);
5785 /* interpreted as no-op */
5790 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON)
5792 #if defined(CONFIG_USER_ONLY)
5793 GEN_EXCP_PRIVOPC(ctx);
5795 if (unlikely(!ctx->supervisor)) {
5796 GEN_EXCP_PRIVOPC(ctx);
5799 /* interpreted as no-op */
5803 /* rfci (supervisor only) */
5804 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP)
5806 #if defined(CONFIG_USER_ONLY)
5807 GEN_EXCP_PRIVOPC(ctx);
5809 if (unlikely(!ctx->supervisor)) {
5810 GEN_EXCP_PRIVOPC(ctx);
5813 /* Restore CPU state */
5814 gen_helper_40x_rfci();
5819 GEN_HANDLER(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE)
5821 #if defined(CONFIG_USER_ONLY)
5822 GEN_EXCP_PRIVOPC(ctx);
5824 if (unlikely(!ctx->supervisor)) {
5825 GEN_EXCP_PRIVOPC(ctx);
5828 /* Restore CPU state */
5834 /* BookE specific */
5835 /* XXX: not implemented on 440 ? */
5836 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI)
5838 #if defined(CONFIG_USER_ONLY)
5839 GEN_EXCP_PRIVOPC(ctx);
5841 if (unlikely(!ctx->supervisor)) {
5842 GEN_EXCP_PRIVOPC(ctx);
5845 /* Restore CPU state */
5851 /* XXX: not implemented on 440 ? */
5852 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI)
5854 #if defined(CONFIG_USER_ONLY)
5855 GEN_EXCP_PRIVOPC(ctx);
5857 if (unlikely(!ctx->supervisor)) {
5858 GEN_EXCP_PRIVOPC(ctx);
5861 /* Restore CPU state */
5867 /* TLB management - PowerPC 405 implementation */
5869 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB)
5871 #if defined(CONFIG_USER_ONLY)
5872 GEN_EXCP_PRIVOPC(ctx);
5874 if (unlikely(!ctx->supervisor)) {
5875 GEN_EXCP_PRIVOPC(ctx);
5878 switch (rB(ctx->opcode)) {
5880 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5883 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5886 GEN_EXCP_INVAL(ctx);
5892 /* tlbsx - tlbsx. */
5893 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB)
5895 #if defined(CONFIG_USER_ONLY)
5896 GEN_EXCP_PRIVOPC(ctx);
5899 if (unlikely(!ctx->supervisor)) {
5900 GEN_EXCP_PRIVOPC(ctx);
5903 t0 = tcg_temp_new();
5904 gen_addr_reg_index(t0, ctx);
5905 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5907 if (Rc(ctx->opcode)) {
5908 int l1 = gen_new_label();
5909 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5910 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5911 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5912 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5913 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5920 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB)
5922 #if defined(CONFIG_USER_ONLY)
5923 GEN_EXCP_PRIVOPC(ctx);
5925 if (unlikely(!ctx->supervisor)) {
5926 GEN_EXCP_PRIVOPC(ctx);
5929 switch (rB(ctx->opcode)) {
5931 gen_helper_4xx_tlbwe_hi(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5934 gen_helper_4xx_tlbwe_lo(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5937 GEN_EXCP_INVAL(ctx);
5943 /* TLB management - PowerPC 440 implementation */
5945 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE)
5947 #if defined(CONFIG_USER_ONLY)
5948 GEN_EXCP_PRIVOPC(ctx);
5950 if (unlikely(!ctx->supervisor)) {
5951 GEN_EXCP_PRIVOPC(ctx);
5954 switch (rB(ctx->opcode)) {
5959 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5960 gen_helper_440_tlbwe(t0, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5961 tcg_temp_free_i32(t0);
5965 GEN_EXCP_INVAL(ctx);
5971 /* tlbsx - tlbsx. */
5972 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE)
5974 #if defined(CONFIG_USER_ONLY)
5975 GEN_EXCP_PRIVOPC(ctx);
5978 if (unlikely(!ctx->supervisor)) {
5979 GEN_EXCP_PRIVOPC(ctx);
5982 t0 = tcg_temp_new();
5983 gen_addr_reg_index(t0, ctx);
5984 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5986 if (Rc(ctx->opcode)) {
5987 int l1 = gen_new_label();
5988 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5989 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5990 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5991 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5992 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5999 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE)
6001 #if defined(CONFIG_USER_ONLY)
6002 GEN_EXCP_PRIVOPC(ctx);
6004 if (unlikely(!ctx->supervisor)) {
6005 GEN_EXCP_PRIVOPC(ctx);
6008 switch (rB(ctx->opcode)) {
6013 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6014 gen_helper_440_tlbwe(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6015 tcg_temp_free_i32(t0);
6019 GEN_EXCP_INVAL(ctx);
6026 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE)
6028 #if defined(CONFIG_USER_ONLY)
6029 GEN_EXCP_PRIVOPC(ctx);
6032 if (unlikely(!ctx->supervisor)) {
6033 GEN_EXCP_PRIVOPC(ctx);
6036 t0 = tcg_temp_new();
6037 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6038 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6039 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6041 /* Stop translation to have a chance to raise an exception
6042 * if we just set msr_ee to 1
6049 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000EFC01, PPC_WRTEE)
6051 #if defined(CONFIG_USER_ONLY)
6052 GEN_EXCP_PRIVOPC(ctx);
6054 if (unlikely(!ctx->supervisor)) {
6055 GEN_EXCP_PRIVOPC(ctx);
6058 if (ctx->opcode & 0x00010000) {
6059 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6060 /* Stop translation to have a chance to raise an exception */
6063 tcg_gen_andi_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6068 /* PowerPC 440 specific instructions */
6070 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC)
6072 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6073 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
6074 cpu_gpr[rB(ctx->opcode)], t0);
6075 tcg_temp_free_i32(t0);
6078 /* mbar replaces eieio on 440 */
6079 GEN_HANDLER(mbar, 0x1F, 0x16, 0x13, 0x001FF801, PPC_BOOKE)
6081 /* interpreted as no-op */
6084 /* msync replaces sync on 440 */
6085 GEN_HANDLER(msync, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE)
6087 /* interpreted as no-op */
6091 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, PPC_BOOKE)
6093 /* interpreted as no-op */
6094 /* XXX: specification say this is treated as a load by the MMU
6095 * but does not generate any exception
6099 /*** Altivec vector extension ***/
6100 /* Altivec registers moves */
6102 #define GEN_VR_LDX(name, opc2, opc3) \
6103 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
6106 if (unlikely(!ctx->altivec_enabled)) { \
6107 GEN_EXCP_NO_VR(ctx); \
6110 EA = tcg_temp_new(); \
6111 gen_addr_reg_index(EA, ctx); \
6112 tcg_gen_andi_tl(EA, EA, ~0xf); \
6113 if (ctx->mem_idx & 1) { \
6114 gen_qemu_ld64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6115 tcg_gen_addi_tl(EA, EA, 8); \
6116 gen_qemu_ld64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6118 gen_qemu_ld64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6119 tcg_gen_addi_tl(EA, EA, 8); \
6120 gen_qemu_ld64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6122 tcg_temp_free(EA); \
6125 #define GEN_VR_STX(name, opc2, opc3) \
6126 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
6129 if (unlikely(!ctx->altivec_enabled)) { \
6130 GEN_EXCP_NO_VR(ctx); \
6133 EA = tcg_temp_new(); \
6134 gen_addr_reg_index(EA, ctx); \
6135 tcg_gen_andi_tl(EA, EA, ~0xf); \
6136 if (ctx->mem_idx & 1) { \
6137 gen_qemu_st64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6138 tcg_gen_addi_tl(EA, EA, 8); \
6139 gen_qemu_st64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6141 gen_qemu_st64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6142 tcg_gen_addi_tl(EA, EA, 8); \
6143 gen_qemu_st64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6145 tcg_temp_free(EA); \
6148 GEN_VR_LDX(lvx, 0x07, 0x03);
6149 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6150 GEN_VR_LDX(lvxl, 0x07, 0x0B);
6152 GEN_VR_STX(svx, 0x07, 0x07);
6153 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6154 GEN_VR_STX(svxl, 0x07, 0x0F);
6156 /*** SPE extension ***/
6157 /* Register moves */
6159 static always_inline void gen_load_gpr64(TCGv_i64 t, int reg) {
6160 #if defined(TARGET_PPC64)
6161 tcg_gen_mov_i64(t, cpu_gpr[reg]);
6163 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
6167 static always_inline void gen_store_gpr64(int reg, TCGv_i64 t) {
6168 #if defined(TARGET_PPC64)
6169 tcg_gen_mov_i64(cpu_gpr[reg], t);
6171 TCGv_i64 tmp = tcg_temp_new_i64();
6172 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
6173 tcg_gen_shri_i64(tmp, t, 32);
6174 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
6175 tcg_temp_free_i64(tmp);
6179 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
6180 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \
6182 if (Rc(ctx->opcode)) \
6188 /* Handler for undefined SPE opcodes */
6189 static always_inline void gen_speundef (DisasContext *ctx)
6191 GEN_EXCP_INVAL(ctx);
6195 #if defined(TARGET_PPC64)
6196 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6197 static always_inline void gen_##name (DisasContext *ctx) \
6199 if (unlikely(!ctx->spe_enabled)) { \
6200 GEN_EXCP_NO_AP(ctx); \
6203 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6204 cpu_gpr[rB(ctx->opcode)]); \
6207 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6208 static always_inline void gen_##name (DisasContext *ctx) \
6210 if (unlikely(!ctx->spe_enabled)) { \
6211 GEN_EXCP_NO_AP(ctx); \
6214 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6215 cpu_gpr[rB(ctx->opcode)]); \
6216 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6217 cpu_gprh[rB(ctx->opcode)]); \
6221 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
6222 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
6223 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
6224 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
6225 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
6226 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
6227 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
6228 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
6230 /* SPE logic immediate */
6231 #if defined(TARGET_PPC64)
6232 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6233 static always_inline void gen_##name (DisasContext *ctx) \
6235 if (unlikely(!ctx->spe_enabled)) { \
6236 GEN_EXCP_NO_AP(ctx); \
6239 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6240 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6241 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6242 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6243 tcg_opi(t0, t0, rB(ctx->opcode)); \
6244 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6245 tcg_gen_trunc_i64_i32(t1, t2); \
6246 tcg_temp_free_i64(t2); \
6247 tcg_opi(t1, t1, rB(ctx->opcode)); \
6248 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6249 tcg_temp_free_i32(t0); \
6250 tcg_temp_free_i32(t1); \
6253 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6254 static always_inline void gen_##name (DisasContext *ctx) \
6256 if (unlikely(!ctx->spe_enabled)) { \
6257 GEN_EXCP_NO_AP(ctx); \
6260 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6262 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6266 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
6267 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
6268 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
6269 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
6271 /* SPE arithmetic */
6272 #if defined(TARGET_PPC64)
6273 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6274 static always_inline void gen_##name (DisasContext *ctx) \
6276 if (unlikely(!ctx->spe_enabled)) { \
6277 GEN_EXCP_NO_AP(ctx); \
6280 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6281 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6282 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6283 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6285 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6286 tcg_gen_trunc_i64_i32(t1, t2); \
6287 tcg_temp_free_i64(t2); \
6289 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6290 tcg_temp_free_i32(t0); \
6291 tcg_temp_free_i32(t1); \
6294 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6295 static always_inline void gen_##name (DisasContext *ctx) \
6297 if (unlikely(!ctx->spe_enabled)) { \
6298 GEN_EXCP_NO_AP(ctx); \
6301 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
6302 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
6306 static always_inline void gen_op_evabs (TCGv_i32 ret, TCGv_i32 arg1)
6308 int l1 = gen_new_label();
6309 int l2 = gen_new_label();
6311 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
6312 tcg_gen_neg_i32(ret, arg1);
6315 tcg_gen_mov_i32(ret, arg1);
6318 GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6319 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6320 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6321 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6322 static always_inline void gen_op_evrndw (TCGv_i32 ret, TCGv_i32 arg1)
6324 tcg_gen_addi_i32(ret, arg1, 0x8000);
6325 tcg_gen_ext16u_i32(ret, ret);
6327 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6328 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
6329 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
6331 #if defined(TARGET_PPC64)
6332 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6333 static always_inline void gen_##name (DisasContext *ctx) \
6335 if (unlikely(!ctx->spe_enabled)) { \
6336 GEN_EXCP_NO_AP(ctx); \
6339 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6340 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6341 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
6342 TCGv_i64 t3 = tcg_temp_local_new(TCG_TYPE_I64); \
6343 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6344 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
6345 tcg_op(t0, t0, t2); \
6346 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
6347 tcg_gen_trunc_i64_i32(t1, t3); \
6348 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
6349 tcg_gen_trunc_i64_i32(t2, t3); \
6350 tcg_temp_free_i64(t3); \
6351 tcg_op(t1, t1, t2); \
6352 tcg_temp_free_i32(t2); \
6353 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6354 tcg_temp_free_i32(t0); \
6355 tcg_temp_free_i32(t1); \
6358 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6359 static always_inline void gen_##name (DisasContext *ctx) \
6361 if (unlikely(!ctx->spe_enabled)) { \
6362 GEN_EXCP_NO_AP(ctx); \
6365 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6366 cpu_gpr[rB(ctx->opcode)]); \
6367 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6368 cpu_gprh[rB(ctx->opcode)]); \
6372 static always_inline void gen_op_evsrwu (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6377 l1 = gen_new_label();
6378 l2 = gen_new_label();
6379 t0 = tcg_temp_local_new_i32();
6380 /* No error here: 6 bits are used */
6381 tcg_gen_andi_i32(t0, arg2, 0x3F);
6382 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6383 tcg_gen_shr_i32(ret, arg1, t0);
6386 tcg_gen_movi_i32(ret, 0);
6388 tcg_temp_free_i32(t0);
6390 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
6391 static always_inline void gen_op_evsrws (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6396 l1 = gen_new_label();
6397 l2 = gen_new_label();
6398 t0 = tcg_temp_local_new_i32();
6399 /* No error here: 6 bits are used */
6400 tcg_gen_andi_i32(t0, arg2, 0x3F);
6401 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6402 tcg_gen_sar_i32(ret, arg1, t0);
6405 tcg_gen_movi_i32(ret, 0);
6407 tcg_temp_free_i32(t0);
6409 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
6410 static always_inline void gen_op_evslw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6415 l1 = gen_new_label();
6416 l2 = gen_new_label();
6417 t0 = tcg_temp_local_new_i32();
6418 /* No error here: 6 bits are used */
6419 tcg_gen_andi_i32(t0, arg2, 0x3F);
6420 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6421 tcg_gen_shl_i32(ret, arg1, t0);
6424 tcg_gen_movi_i32(ret, 0);
6426 tcg_temp_free_i32(t0);
6428 GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
6429 static always_inline void gen_op_evrlw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6431 TCGv_i32 t0 = tcg_temp_new_i32();
6432 tcg_gen_andi_i32(t0, arg2, 0x1F);
6433 tcg_gen_rotl_i32(ret, arg1, t0);
6434 tcg_temp_free_i32(t0);
6436 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
6437 static always_inline void gen_evmergehi (DisasContext *ctx)
6439 if (unlikely(!ctx->spe_enabled)) {
6440 GEN_EXCP_NO_AP(ctx);
6443 #if defined(TARGET_PPC64)
6444 TCGv t0 = tcg_temp_new();
6445 TCGv t1 = tcg_temp_new();
6446 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6447 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6448 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6452 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6453 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6456 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
6457 static always_inline void gen_op_evsubf (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6459 tcg_gen_sub_i32(ret, arg2, arg1);
6461 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
6463 /* SPE arithmetic immediate */
6464 #if defined(TARGET_PPC64)
6465 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6466 static always_inline void gen_##name (DisasContext *ctx) \
6468 if (unlikely(!ctx->spe_enabled)) { \
6469 GEN_EXCP_NO_AP(ctx); \
6472 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6473 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6474 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6475 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
6476 tcg_op(t0, t0, rA(ctx->opcode)); \
6477 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6478 tcg_gen_trunc_i64_i32(t1, t2); \
6479 tcg_temp_free_i64(t2); \
6480 tcg_op(t1, t1, rA(ctx->opcode)); \
6481 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6482 tcg_temp_free_i32(t0); \
6483 tcg_temp_free_i32(t1); \
6486 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6487 static always_inline void gen_##name (DisasContext *ctx) \
6489 if (unlikely(!ctx->spe_enabled)) { \
6490 GEN_EXCP_NO_AP(ctx); \
6493 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
6495 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
6499 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
6500 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
6502 /* SPE comparison */
6503 #if defined(TARGET_PPC64)
6504 #define GEN_SPEOP_COMP(name, tcg_cond) \
6505 static always_inline void gen_##name (DisasContext *ctx) \
6507 if (unlikely(!ctx->spe_enabled)) { \
6508 GEN_EXCP_NO_AP(ctx); \
6511 int l1 = gen_new_label(); \
6512 int l2 = gen_new_label(); \
6513 int l3 = gen_new_label(); \
6514 int l4 = gen_new_label(); \
6515 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6516 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6517 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6518 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6519 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
6520 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
6521 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
6523 gen_set_label(l1); \
6524 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
6525 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
6526 gen_set_label(l2); \
6527 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6528 tcg_gen_trunc_i64_i32(t0, t2); \
6529 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6530 tcg_gen_trunc_i64_i32(t1, t2); \
6531 tcg_temp_free_i64(t2); \
6532 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
6533 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6534 ~(CRF_CH | CRF_CH_AND_CL)); \
6536 gen_set_label(l3); \
6537 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6538 CRF_CH | CRF_CH_OR_CL); \
6539 gen_set_label(l4); \
6540 tcg_temp_free_i32(t0); \
6541 tcg_temp_free_i32(t1); \
6544 #define GEN_SPEOP_COMP(name, tcg_cond) \
6545 static always_inline void gen_##name (DisasContext *ctx) \
6547 if (unlikely(!ctx->spe_enabled)) { \
6548 GEN_EXCP_NO_AP(ctx); \
6551 int l1 = gen_new_label(); \
6552 int l2 = gen_new_label(); \
6553 int l3 = gen_new_label(); \
6554 int l4 = gen_new_label(); \
6556 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
6557 cpu_gpr[rB(ctx->opcode)], l1); \
6558 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
6560 gen_set_label(l1); \
6561 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
6562 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
6563 gen_set_label(l2); \
6564 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
6565 cpu_gprh[rB(ctx->opcode)], l3); \
6566 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6567 ~(CRF_CH | CRF_CH_AND_CL)); \
6569 gen_set_label(l3); \
6570 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6571 CRF_CH | CRF_CH_OR_CL); \
6572 gen_set_label(l4); \
6575 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
6576 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
6577 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
6578 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
6579 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
6582 static always_inline void gen_brinc (DisasContext *ctx)
6584 /* Note: brinc is usable even if SPE is disabled */
6585 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
6586 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6588 static always_inline void gen_evmergelo (DisasContext *ctx)
6590 if (unlikely(!ctx->spe_enabled)) {
6591 GEN_EXCP_NO_AP(ctx);
6594 #if defined(TARGET_PPC64)
6595 TCGv t0 = tcg_temp_new();
6596 TCGv t1 = tcg_temp_new();
6597 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL);
6598 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
6599 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6603 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6604 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6607 static always_inline void gen_evmergehilo (DisasContext *ctx)
6609 if (unlikely(!ctx->spe_enabled)) {
6610 GEN_EXCP_NO_AP(ctx);
6613 #if defined(TARGET_PPC64)
6614 TCGv t0 = tcg_temp_new();
6615 TCGv t1 = tcg_temp_new();
6616 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL);
6617 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6618 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6622 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6623 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6626 static always_inline void gen_evmergelohi (DisasContext *ctx)
6628 if (unlikely(!ctx->spe_enabled)) {
6629 GEN_EXCP_NO_AP(ctx);
6632 #if defined(TARGET_PPC64)
6633 TCGv t0 = tcg_temp_new();
6634 TCGv t1 = tcg_temp_new();
6635 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6636 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
6637 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6641 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6642 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6645 static always_inline void gen_evsplati (DisasContext *ctx)
6647 uint64_t imm = ((int32_t)(rA(ctx->opcode) << 11)) >> 27;
6649 #if defined(TARGET_PPC64)
6650 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
6652 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
6653 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
6656 static always_inline void gen_evsplatfi (DisasContext *ctx)
6658 uint64_t imm = rA(ctx->opcode) << 11;
6660 #if defined(TARGET_PPC64)
6661 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
6663 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
6664 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
6668 static always_inline void gen_evsel (DisasContext *ctx)
6670 int l1 = gen_new_label();
6671 int l2 = gen_new_label();
6672 int l3 = gen_new_label();
6673 int l4 = gen_new_label();
6674 TCGv_i32 t0 = tcg_temp_local_new_i32();
6675 #if defined(TARGET_PPC64)
6676 TCGv t1 = tcg_temp_local_new();
6677 TCGv t2 = tcg_temp_local_new();
6679 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
6680 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
6681 #if defined(TARGET_PPC64)
6682 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
6684 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6688 #if defined(TARGET_PPC64)
6689 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
6691 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6694 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
6695 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
6696 #if defined(TARGET_PPC64)
6697 tcg_gen_andi_tl(t2, cpu_gpr[rA(ctx->opcode)], 0x00000000FFFFFFFFULL);
6699 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6703 #if defined(TARGET_PPC64)
6704 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFULL);
6706 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6709 tcg_temp_free_i32(t0);
6710 #if defined(TARGET_PPC64)
6711 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
6716 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE)
6720 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE)
6724 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE)
6728 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE)
6733 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
6734 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
6735 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
6736 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
6737 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
6738 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
6739 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
6740 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
6741 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
6742 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
6743 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
6744 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
6745 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
6746 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
6747 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
6748 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
6749 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
6750 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
6751 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
6752 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
6753 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
6754 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
6755 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
6756 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
6757 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
6759 /* SPE load and stores */
6760 static always_inline void gen_addr_spe_imm_index (TCGv EA, DisasContext *ctx, int sh)
6762 target_ulong uimm = rB(ctx->opcode);
6764 if (rA(ctx->opcode) == 0)
6765 tcg_gen_movi_tl(EA, uimm << sh);
6767 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
6770 static always_inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
6772 #if defined(TARGET_PPC64)
6773 gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6775 TCGv_i64 t0 = tcg_temp_new_i64();
6776 gen_qemu_ld64(t0, addr, ctx->mem_idx);
6777 tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
6778 tcg_gen_shri_i64(t0, t0, 32);
6779 tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
6780 tcg_temp_free_i64(t0);
6784 static always_inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
6786 #if defined(TARGET_PPC64)
6787 TCGv t0 = tcg_temp_new();
6788 gen_qemu_ld32u(t0, addr, ctx->mem_idx);
6789 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6790 tcg_gen_addi_tl(addr, addr, 4);
6791 gen_qemu_ld32u(t0, addr, ctx->mem_idx);
6792 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6795 gen_qemu_ld32u(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx);
6796 tcg_gen_addi_tl(addr, addr, 4);
6797 gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6801 static always_inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
6803 TCGv t0 = tcg_temp_new();
6804 #if defined(TARGET_PPC64)
6805 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6806 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6807 tcg_gen_addi_tl(addr, addr, 2);
6808 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6809 tcg_gen_shli_tl(t0, t0, 32);
6810 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6811 tcg_gen_addi_tl(addr, addr, 2);
6812 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6813 tcg_gen_shli_tl(t0, t0, 16);
6814 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6815 tcg_gen_addi_tl(addr, addr, 2);
6816 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6817 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6819 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6820 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6821 tcg_gen_addi_tl(addr, addr, 2);
6822 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6823 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
6824 tcg_gen_addi_tl(addr, addr, 2);
6825 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6826 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6827 tcg_gen_addi_tl(addr, addr, 2);
6828 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6829 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6834 static always_inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
6836 TCGv t0 = tcg_temp_new();
6837 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6838 #if defined(TARGET_PPC64)
6839 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6840 tcg_gen_shli_tl(t0, t0, 16);
6841 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6843 tcg_gen_shli_tl(t0, t0, 16);
6844 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6845 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6850 static always_inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
6852 TCGv t0 = tcg_temp_new();
6853 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6854 #if defined(TARGET_PPC64)
6855 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6856 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6858 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6859 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6864 static always_inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
6866 TCGv t0 = tcg_temp_new();
6867 gen_qemu_ld16s(t0, addr, ctx->mem_idx);
6868 #if defined(TARGET_PPC64)
6869 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6870 tcg_gen_ext32u_tl(t0, t0);
6871 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6873 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6874 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6879 static always_inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
6881 TCGv t0 = tcg_temp_new();
6882 #if defined(TARGET_PPC64)
6883 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6884 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6885 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6886 tcg_gen_shli_tl(t0, t0, 16);
6887 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6889 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6890 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6891 tcg_gen_addi_tl(addr, addr, 2);
6892 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6893 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
6898 static always_inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
6900 #if defined(TARGET_PPC64)
6901 TCGv t0 = tcg_temp_new();
6902 gen_qemu_ld16u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6903 tcg_gen_addi_tl(addr, addr, 2);
6904 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6905 tcg_gen_shli_tl(t0, t0, 32);
6906 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6909 gen_qemu_ld16u(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx);
6910 tcg_gen_addi_tl(addr, addr, 2);
6911 gen_qemu_ld16u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6915 static always_inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
6917 #if defined(TARGET_PPC64)
6918 TCGv t0 = tcg_temp_new();
6919 gen_qemu_ld16s(t0, addr, ctx->mem_idx);
6920 tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
6921 tcg_gen_addi_tl(addr, addr, 2);
6922 gen_qemu_ld16s(t0, addr, ctx->mem_idx);
6923 tcg_gen_shli_tl(t0, t0, 32);
6924 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6927 gen_qemu_ld16s(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx);
6928 tcg_gen_addi_tl(addr, addr, 2);
6929 gen_qemu_ld16s(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6933 static always_inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
6935 TCGv t0 = tcg_temp_new();
6936 gen_qemu_ld32u(t0, addr, ctx->mem_idx);
6937 #if defined(TARGET_PPC64)
6938 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6939 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6941 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6942 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6947 static always_inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
6949 TCGv t0 = tcg_temp_new();
6950 #if defined(TARGET_PPC64)
6951 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6952 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6953 tcg_gen_shli_tl(t0, t0, 32);
6954 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6955 tcg_gen_addi_tl(addr, addr, 2);
6956 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6957 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6958 tcg_gen_shli_tl(t0, t0, 16);
6959 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6961 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6962 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6963 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
6964 tcg_gen_addi_tl(addr, addr, 2);
6965 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6966 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
6967 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
6972 static always_inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
6974 #if defined(TARGET_PPC64)
6975 gen_qemu_st64(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
6977 TCGv_i64 t0 = tcg_temp_new_i64();
6978 tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
6979 gen_qemu_st64(t0, addr, ctx->mem_idx);
6980 tcg_temp_free_i64(t0);
6984 static always_inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
6986 #if defined(TARGET_PPC64)
6987 TCGv t0 = tcg_temp_new();
6988 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
6989 gen_qemu_st32(t0, addr, ctx->mem_idx);
6992 gen_qemu_st32(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
6994 tcg_gen_addi_tl(addr, addr, 4);
6995 gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
6998 static always_inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
7000 TCGv t0 = tcg_temp_new();
7001 #if defined(TARGET_PPC64)
7002 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7004 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7006 gen_qemu_st16(t0, addr, ctx->mem_idx);
7007 tcg_gen_addi_tl(addr, addr, 2);
7008 #if defined(TARGET_PPC64)
7009 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7010 gen_qemu_st16(t0, addr, ctx->mem_idx);
7012 gen_qemu_st16(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
7014 tcg_gen_addi_tl(addr, addr, 2);
7015 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7016 gen_qemu_st16(t0, addr, ctx->mem_idx);
7018 tcg_gen_addi_tl(addr, addr, 2);
7019 gen_qemu_st16(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
7022 static always_inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
7024 TCGv t0 = tcg_temp_new();
7025 #if defined(TARGET_PPC64)
7026 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7028 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7030 gen_qemu_st16(t0, addr, ctx->mem_idx);
7031 tcg_gen_addi_tl(addr, addr, 2);
7032 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7033 gen_qemu_st16(t0, addr, ctx->mem_idx);
7037 static always_inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
7039 #if defined(TARGET_PPC64)
7040 TCGv t0 = tcg_temp_new();
7041 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7042 gen_qemu_st16(t0, addr, ctx->mem_idx);
7045 gen_qemu_st16(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
7047 tcg_gen_addi_tl(addr, addr, 2);
7048 gen_qemu_st16(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
7051 static always_inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
7053 #if defined(TARGET_PPC64)
7054 TCGv t0 = tcg_temp_new();
7055 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7056 gen_qemu_st32(t0, addr, ctx->mem_idx);
7059 gen_qemu_st32(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
7063 static always_inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
7065 gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
7068 #define GEN_SPEOP_LDST(name, opc2, sh) \
7069 GEN_HANDLER(gen_##name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE) \
7072 if (unlikely(!ctx->spe_enabled)) { \
7073 GEN_EXCP_NO_AP(ctx); \
7076 t0 = tcg_temp_new(); \
7077 if (Rc(ctx->opcode)) { \
7078 gen_addr_spe_imm_index(t0, ctx, sh); \
7080 gen_addr_reg_index(t0, ctx); \
7082 gen_op_##name(ctx, t0); \
7083 tcg_temp_free(t0); \
7086 GEN_SPEOP_LDST(evldd, 0x00, 3);
7087 GEN_SPEOP_LDST(evldw, 0x01, 3);
7088 GEN_SPEOP_LDST(evldh, 0x02, 3);
7089 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
7090 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
7091 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
7092 GEN_SPEOP_LDST(evlwhe, 0x08, 2);
7093 GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
7094 GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
7095 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
7096 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
7098 GEN_SPEOP_LDST(evstdd, 0x10, 3);
7099 GEN_SPEOP_LDST(evstdw, 0x11, 3);
7100 GEN_SPEOP_LDST(evstdh, 0x12, 3);
7101 GEN_SPEOP_LDST(evstwhe, 0x18, 2);
7102 GEN_SPEOP_LDST(evstwho, 0x1A, 2);
7103 GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
7104 GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
7106 /* Multiply and add - TODO */
7108 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
7109 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
7110 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
7111 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
7112 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
7113 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
7114 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
7115 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
7116 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
7117 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
7118 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
7119 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
7121 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
7122 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
7123 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
7124 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
7125 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
7126 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
7127 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
7128 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
7129 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
7130 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
7131 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
7132 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
7133 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
7134 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
7136 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
7137 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
7138 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
7139 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
7140 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
7141 GEN_SPE(evmra, speundef, 0x07, 0x13, 0x0000F800, PPC_SPE);
7143 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
7144 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
7145 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
7146 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
7147 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
7148 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
7149 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
7150 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
7151 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
7152 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
7153 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
7154 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
7156 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
7157 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
7158 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
7159 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
7160 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
7162 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
7163 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
7164 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
7165 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
7166 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
7167 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
7168 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
7169 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
7170 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
7171 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
7172 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
7173 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
7175 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
7176 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
7177 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
7178 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
7179 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
7182 /*** SPE floating-point extension ***/
7183 #if defined(TARGET_PPC64)
7184 #define GEN_SPEFPUOP_CONV_32_32(name) \
7185 static always_inline void gen_##name (DisasContext *ctx) \
7189 t0 = tcg_temp_new_i32(); \
7190 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7191 gen_helper_##name(t0, t0); \
7192 t1 = tcg_temp_new(); \
7193 tcg_gen_extu_i32_tl(t1, t0); \
7194 tcg_temp_free_i32(t0); \
7195 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7196 0xFFFFFFFF00000000ULL); \
7197 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7198 tcg_temp_free(t1); \
7200 #define GEN_SPEFPUOP_CONV_32_64(name) \
7201 static always_inline void gen_##name (DisasContext *ctx) \
7205 t0 = tcg_temp_new_i32(); \
7206 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7207 t1 = tcg_temp_new(); \
7208 tcg_gen_extu_i32_tl(t1, t0); \
7209 tcg_temp_free_i32(t0); \
7210 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7211 0xFFFFFFFF00000000ULL); \
7212 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7213 tcg_temp_free(t1); \
7215 #define GEN_SPEFPUOP_CONV_64_32(name) \
7216 static always_inline void gen_##name (DisasContext *ctx) \
7218 TCGv_i32 t0 = tcg_temp_new_i32(); \
7219 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7220 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7221 tcg_temp_free_i32(t0); \
7223 #define GEN_SPEFPUOP_CONV_64_64(name) \
7224 static always_inline void gen_##name (DisasContext *ctx) \
7226 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7228 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7229 static always_inline void gen_##name (DisasContext *ctx) \
7233 if (unlikely(!ctx->spe_enabled)) { \
7234 GEN_EXCP_NO_AP(ctx); \
7237 t0 = tcg_temp_new_i32(); \
7238 t1 = tcg_temp_new_i32(); \
7239 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7240 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7241 gen_helper_##name(t0, t0, t1); \
7242 tcg_temp_free_i32(t1); \
7243 t2 = tcg_temp_new(); \
7244 tcg_gen_extu_i32_tl(t2, t0); \
7245 tcg_temp_free_i32(t0); \
7246 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7247 0xFFFFFFFF00000000ULL); \
7248 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
7249 tcg_temp_free(t2); \
7251 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
7252 static always_inline void gen_##name (DisasContext *ctx) \
7254 if (unlikely(!ctx->spe_enabled)) { \
7255 GEN_EXCP_NO_AP(ctx); \
7258 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7259 cpu_gpr[rB(ctx->opcode)]); \
7261 #define GEN_SPEFPUOP_COMP_32(name) \
7262 static always_inline void gen_##name (DisasContext *ctx) \
7265 if (unlikely(!ctx->spe_enabled)) { \
7266 GEN_EXCP_NO_AP(ctx); \
7269 t0 = tcg_temp_new_i32(); \
7270 t1 = tcg_temp_new_i32(); \
7271 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7272 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7273 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7274 tcg_temp_free_i32(t0); \
7275 tcg_temp_free_i32(t1); \
7277 #define GEN_SPEFPUOP_COMP_64(name) \
7278 static always_inline void gen_##name (DisasContext *ctx) \
7280 if (unlikely(!ctx->spe_enabled)) { \
7281 GEN_EXCP_NO_AP(ctx); \
7284 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7285 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7288 #define GEN_SPEFPUOP_CONV_32_32(name) \
7289 static always_inline void gen_##name (DisasContext *ctx) \
7291 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7293 #define GEN_SPEFPUOP_CONV_32_64(name) \
7294 static always_inline void gen_##name (DisasContext *ctx) \
7296 TCGv_i64 t0 = tcg_temp_new_i64(); \
7297 gen_load_gpr64(t0, rB(ctx->opcode)); \
7298 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7299 tcg_temp_free_i64(t0); \
7301 #define GEN_SPEFPUOP_CONV_64_32(name) \
7302 static always_inline void gen_##name (DisasContext *ctx) \
7304 TCGv_i64 t0 = tcg_temp_new_i64(); \
7305 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7306 gen_store_gpr64(rD(ctx->opcode), t0); \
7307 tcg_temp_free_i64(t0); \
7309 #define GEN_SPEFPUOP_CONV_64_64(name) \
7310 static always_inline void gen_##name (DisasContext *ctx) \
7312 TCGv_i64 t0 = tcg_temp_new_i64(); \
7313 gen_load_gpr64(t0, rB(ctx->opcode)); \
7314 gen_helper_##name(t0, t0); \
7315 gen_store_gpr64(rD(ctx->opcode), t0); \
7316 tcg_temp_free_i64(t0); \
7318 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7319 static always_inline void gen_##name (DisasContext *ctx) \
7321 if (unlikely(!ctx->spe_enabled)) { \
7322 GEN_EXCP_NO_AP(ctx); \
7325 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], \
7326 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7328 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
7329 static always_inline void gen_##name (DisasContext *ctx) \
7332 if (unlikely(!ctx->spe_enabled)) { \
7333 GEN_EXCP_NO_AP(ctx); \
7336 t0 = tcg_temp_new_i64(); \
7337 t1 = tcg_temp_new_i64(); \
7338 gen_load_gpr64(t0, rA(ctx->opcode)); \
7339 gen_load_gpr64(t1, rB(ctx->opcode)); \
7340 gen_helper_##name(t0, t0, t1); \
7341 gen_store_gpr64(rD(ctx->opcode), t0); \
7342 tcg_temp_free_i64(t0); \
7343 tcg_temp_free_i64(t1); \
7345 #define GEN_SPEFPUOP_COMP_32(name) \
7346 static always_inline void gen_##name (DisasContext *ctx) \
7348 if (unlikely(!ctx->spe_enabled)) { \
7349 GEN_EXCP_NO_AP(ctx); \
7352 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7353 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7355 #define GEN_SPEFPUOP_COMP_64(name) \
7356 static always_inline void gen_##name (DisasContext *ctx) \
7359 if (unlikely(!ctx->spe_enabled)) { \
7360 GEN_EXCP_NO_AP(ctx); \
7363 t0 = tcg_temp_new_i64(); \
7364 t1 = tcg_temp_new_i64(); \
7365 gen_load_gpr64(t0, rA(ctx->opcode)); \
7366 gen_load_gpr64(t1, rB(ctx->opcode)); \
7367 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7368 tcg_temp_free_i64(t0); \
7369 tcg_temp_free_i64(t1); \
7373 /* Single precision floating-point vectors operations */
7375 GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
7376 GEN_SPEFPUOP_ARITH2_64_64(evfssub);
7377 GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
7378 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
7379 static always_inline void gen_evfsabs (DisasContext *ctx)
7381 if (unlikely(!ctx->spe_enabled)) {
7382 GEN_EXCP_NO_AP(ctx);
7385 #if defined(TARGET_PPC64)
7386 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
7388 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
7389 tcg_gen_andi_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
7392 static always_inline void gen_evfsnabs (DisasContext *ctx)
7394 if (unlikely(!ctx->spe_enabled)) {
7395 GEN_EXCP_NO_AP(ctx);
7398 #if defined(TARGET_PPC64)
7399 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
7401 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7402 tcg_gen_ori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7405 static always_inline void gen_evfsneg (DisasContext *ctx)
7407 if (unlikely(!ctx->spe_enabled)) {
7408 GEN_EXCP_NO_AP(ctx);
7411 #if defined(TARGET_PPC64)
7412 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
7414 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7415 tcg_gen_xori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7420 GEN_SPEFPUOP_CONV_64_64(evfscfui);
7421 GEN_SPEFPUOP_CONV_64_64(evfscfsi);
7422 GEN_SPEFPUOP_CONV_64_64(evfscfuf);
7423 GEN_SPEFPUOP_CONV_64_64(evfscfsf);
7424 GEN_SPEFPUOP_CONV_64_64(evfsctui);
7425 GEN_SPEFPUOP_CONV_64_64(evfsctsi);
7426 GEN_SPEFPUOP_CONV_64_64(evfsctuf);
7427 GEN_SPEFPUOP_CONV_64_64(evfsctsf);
7428 GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
7429 GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
7432 GEN_SPEFPUOP_COMP_64(evfscmpgt);
7433 GEN_SPEFPUOP_COMP_64(evfscmplt);
7434 GEN_SPEFPUOP_COMP_64(evfscmpeq);
7435 GEN_SPEFPUOP_COMP_64(evfststgt);
7436 GEN_SPEFPUOP_COMP_64(evfststlt);
7437 GEN_SPEFPUOP_COMP_64(evfststeq);
7439 /* Opcodes definitions */
7440 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPEFPU); //
7441 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPEFPU); //
7442 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPEFPU); //
7443 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPEFPU); //
7444 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPEFPU); //
7445 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPEFPU); //
7446 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPEFPU); //
7447 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPEFPU); //
7448 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPEFPU); //
7449 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPEFPU); //
7450 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPEFPU); //
7451 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPEFPU); //
7452 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPEFPU); //
7453 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPEFPU); //
7455 /* Single precision floating-point operations */
7457 GEN_SPEFPUOP_ARITH2_32_32(efsadd);
7458 GEN_SPEFPUOP_ARITH2_32_32(efssub);
7459 GEN_SPEFPUOP_ARITH2_32_32(efsmul);
7460 GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
7461 static always_inline void gen_efsabs (DisasContext *ctx)
7463 if (unlikely(!ctx->spe_enabled)) {
7464 GEN_EXCP_NO_AP(ctx);
7467 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
7469 static always_inline void gen_efsnabs (DisasContext *ctx)
7471 if (unlikely(!ctx->spe_enabled)) {
7472 GEN_EXCP_NO_AP(ctx);
7475 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7477 static always_inline void gen_efsneg (DisasContext *ctx)
7479 if (unlikely(!ctx->spe_enabled)) {
7480 GEN_EXCP_NO_AP(ctx);
7483 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7487 GEN_SPEFPUOP_CONV_32_32(efscfui);
7488 GEN_SPEFPUOP_CONV_32_32(efscfsi);
7489 GEN_SPEFPUOP_CONV_32_32(efscfuf);
7490 GEN_SPEFPUOP_CONV_32_32(efscfsf);
7491 GEN_SPEFPUOP_CONV_32_32(efsctui);
7492 GEN_SPEFPUOP_CONV_32_32(efsctsi);
7493 GEN_SPEFPUOP_CONV_32_32(efsctuf);
7494 GEN_SPEFPUOP_CONV_32_32(efsctsf);
7495 GEN_SPEFPUOP_CONV_32_32(efsctuiz);
7496 GEN_SPEFPUOP_CONV_32_32(efsctsiz);
7497 GEN_SPEFPUOP_CONV_32_64(efscfd);
7500 GEN_SPEFPUOP_COMP_32(efscmpgt);
7501 GEN_SPEFPUOP_COMP_32(efscmplt);
7502 GEN_SPEFPUOP_COMP_32(efscmpeq);
7503 GEN_SPEFPUOP_COMP_32(efststgt);
7504 GEN_SPEFPUOP_COMP_32(efststlt);
7505 GEN_SPEFPUOP_COMP_32(efststeq);
7507 /* Opcodes definitions */
7508 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPEFPU); //
7509 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPEFPU); //
7510 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPEFPU); //
7511 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPEFPU); //
7512 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPEFPU); //
7513 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPEFPU); //
7514 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPEFPU); //
7515 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPEFPU); //
7516 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPEFPU); //
7517 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPEFPU); //
7518 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPEFPU); //
7519 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPEFPU); //
7520 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPEFPU); //
7521 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPEFPU); //
7523 /* Double precision floating-point operations */
7525 GEN_SPEFPUOP_ARITH2_64_64(efdadd);
7526 GEN_SPEFPUOP_ARITH2_64_64(efdsub);
7527 GEN_SPEFPUOP_ARITH2_64_64(efdmul);
7528 GEN_SPEFPUOP_ARITH2_64_64(efddiv);
7529 static always_inline void gen_efdabs (DisasContext *ctx)
7531 if (unlikely(!ctx->spe_enabled)) {
7532 GEN_EXCP_NO_AP(ctx);
7535 #if defined(TARGET_PPC64)
7536 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
7538 tcg_gen_andi_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
7541 static always_inline void gen_efdnabs (DisasContext *ctx)
7543 if (unlikely(!ctx->spe_enabled)) {
7544 GEN_EXCP_NO_AP(ctx);
7547 #if defined(TARGET_PPC64)
7548 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
7550 tcg_gen_ori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7553 static always_inline void gen_efdneg (DisasContext *ctx)
7555 if (unlikely(!ctx->spe_enabled)) {
7556 GEN_EXCP_NO_AP(ctx);
7559 #if defined(TARGET_PPC64)
7560 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
7562 tcg_gen_xori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7567 GEN_SPEFPUOP_CONV_64_32(efdcfui);
7568 GEN_SPEFPUOP_CONV_64_32(efdcfsi);
7569 GEN_SPEFPUOP_CONV_64_32(efdcfuf);
7570 GEN_SPEFPUOP_CONV_64_32(efdcfsf);
7571 GEN_SPEFPUOP_CONV_32_64(efdctui);
7572 GEN_SPEFPUOP_CONV_32_64(efdctsi);
7573 GEN_SPEFPUOP_CONV_32_64(efdctuf);
7574 GEN_SPEFPUOP_CONV_32_64(efdctsf);
7575 GEN_SPEFPUOP_CONV_32_64(efdctuiz);
7576 GEN_SPEFPUOP_CONV_32_64(efdctsiz);
7577 GEN_SPEFPUOP_CONV_64_32(efdcfs);
7578 GEN_SPEFPUOP_CONV_64_64(efdcfuid);
7579 GEN_SPEFPUOP_CONV_64_64(efdcfsid);
7580 GEN_SPEFPUOP_CONV_64_64(efdctuidz);
7581 GEN_SPEFPUOP_CONV_64_64(efdctsidz);
7584 GEN_SPEFPUOP_COMP_64(efdcmpgt);
7585 GEN_SPEFPUOP_COMP_64(efdcmplt);
7586 GEN_SPEFPUOP_COMP_64(efdcmpeq);
7587 GEN_SPEFPUOP_COMP_64(efdtstgt);
7588 GEN_SPEFPUOP_COMP_64(efdtstlt);
7589 GEN_SPEFPUOP_COMP_64(efdtsteq);
7591 /* Opcodes definitions */
7592 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPEFPU); //
7593 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPEFPU); //
7594 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPEFPU); //
7595 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPEFPU); //
7596 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPEFPU); //
7597 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPEFPU); //
7598 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPEFPU); //
7599 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPEFPU); //
7600 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPEFPU); //
7601 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPEFPU); //
7602 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPEFPU); //
7603 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPEFPU); //
7604 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPEFPU); //
7605 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPEFPU); //
7606 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPEFPU); //
7607 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPEFPU); //
7609 /* End opcode list */
7610 GEN_OPCODE_MARK(end);
7612 #include "translate_init.c"
7613 #include "helper_regs.h"
7615 /*****************************************************************************/
7616 /* Misc PowerPC helpers */
7617 void cpu_dump_state (CPUState *env, FILE *f,
7618 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
7626 cpu_fprintf(f, "NIP " ADDRX " LR " ADDRX " CTR " ADDRX " XER %08x\n",
7627 env->nip, env->lr, env->ctr, env->xer);
7628 cpu_fprintf(f, "MSR " ADDRX " HID0 " ADDRX " HF " ADDRX " idx %d\n",
7629 env->msr, env->spr[SPR_HID0], env->hflags, env->mmu_idx);
7630 #if !defined(NO_TIMER_DUMP)
7631 cpu_fprintf(f, "TB %08x %08x "
7632 #if !defined(CONFIG_USER_ONLY)
7636 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
7637 #if !defined(CONFIG_USER_ONLY)
7638 , cpu_ppc_load_decr(env)
7642 for (i = 0; i < 32; i++) {
7643 if ((i & (RGPL - 1)) == 0)
7644 cpu_fprintf(f, "GPR%02d", i);
7645 cpu_fprintf(f, " " REGX, ppc_dump_gpr(env, i));
7646 if ((i & (RGPL - 1)) == (RGPL - 1))
7647 cpu_fprintf(f, "\n");
7649 cpu_fprintf(f, "CR ");
7650 for (i = 0; i < 8; i++)
7651 cpu_fprintf(f, "%01x", env->crf[i]);
7652 cpu_fprintf(f, " [");
7653 for (i = 0; i < 8; i++) {
7655 if (env->crf[i] & 0x08)
7657 else if (env->crf[i] & 0x04)
7659 else if (env->crf[i] & 0x02)
7661 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
7663 cpu_fprintf(f, " ] RES " ADDRX "\n", env->reserve);
7664 for (i = 0; i < 32; i++) {
7665 if ((i & (RFPL - 1)) == 0)
7666 cpu_fprintf(f, "FPR%02d", i);
7667 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
7668 if ((i & (RFPL - 1)) == (RFPL - 1))
7669 cpu_fprintf(f, "\n");
7671 #if !defined(CONFIG_USER_ONLY)
7672 cpu_fprintf(f, "SRR0 " ADDRX " SRR1 " ADDRX " SDR1 " ADDRX "\n",
7673 env->spr[SPR_SRR0], env->spr[SPR_SRR1], env->sdr1);
7680 void cpu_dump_statistics (CPUState *env, FILE*f,
7681 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
7684 #if defined(DO_PPC_STATISTICS)
7685 opc_handler_t **t1, **t2, **t3, *handler;
7689 for (op1 = 0; op1 < 64; op1++) {
7691 if (is_indirect_opcode(handler)) {
7692 t2 = ind_table(handler);
7693 for (op2 = 0; op2 < 32; op2++) {
7695 if (is_indirect_opcode(handler)) {
7696 t3 = ind_table(handler);
7697 for (op3 = 0; op3 < 32; op3++) {
7699 if (handler->count == 0)
7701 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
7703 op1, op2, op3, op1, (op3 << 5) | op2,
7705 handler->count, handler->count);
7708 if (handler->count == 0)
7710 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
7712 op1, op2, op1, op2, handler->oname,
7713 handler->count, handler->count);
7717 if (handler->count == 0)
7719 cpu_fprintf(f, "%02x (%02x ) %16s: %016llx %lld\n",
7720 op1, op1, handler->oname,
7721 handler->count, handler->count);
7727 /*****************************************************************************/
7728 static always_inline void gen_intermediate_code_internal (CPUState *env,
7729 TranslationBlock *tb,
7732 DisasContext ctx, *ctxp = &ctx;
7733 opc_handler_t **table, *handler;
7734 target_ulong pc_start;
7735 uint16_t *gen_opc_end;
7736 int supervisor, little_endian;
7743 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7744 #if defined(OPTIMIZE_FPRF_UPDATE)
7745 gen_fprf_ptr = gen_fprf_buf;
7749 ctx.exception = POWERPC_EXCP_NONE;
7750 ctx.spr_cb = env->spr_cb;
7751 supervisor = env->mmu_idx;
7752 #if !defined(CONFIG_USER_ONLY)
7753 ctx.supervisor = supervisor;
7755 little_endian = env->hflags & (1 << MSR_LE) ? 1 : 0;
7756 #if defined(TARGET_PPC64)
7757 ctx.sf_mode = msr_sf;
7758 ctx.mem_idx = (supervisor << 2) | (msr_sf << 1) | little_endian;
7760 ctx.mem_idx = (supervisor << 1) | little_endian;
7762 ctx.fpu_enabled = msr_fp;
7763 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
7764 ctx.spe_enabled = msr_spe;
7766 ctx.spe_enabled = 0;
7767 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
7768 ctx.altivec_enabled = msr_vr;
7770 ctx.altivec_enabled = 0;
7771 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
7772 ctx.singlestep_enabled = CPU_SINGLE_STEP;
7774 ctx.singlestep_enabled = 0;
7775 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
7776 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
7777 if (unlikely(env->singlestep_enabled))
7778 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
7779 #if defined (DO_SINGLE_STEP) && 0
7780 /* Single step trace mode */
7784 max_insns = tb->cflags & CF_COUNT_MASK;
7786 max_insns = CF_COUNT_MASK;
7789 /* Set env in case of segfault during code fetch */
7790 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
7791 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
7792 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
7793 if (bp->pc == ctx.nip) {
7794 gen_update_nip(&ctx, ctx.nip);
7795 gen_helper_raise_debug();
7800 if (unlikely(search_pc)) {
7801 j = gen_opc_ptr - gen_opc_buf;
7805 gen_opc_instr_start[lj++] = 0;
7806 gen_opc_pc[lj] = ctx.nip;
7807 gen_opc_instr_start[lj] = 1;
7808 gen_opc_icount[lj] = num_insns;
7811 #if defined PPC_DEBUG_DISAS
7812 if (loglevel & CPU_LOG_TB_IN_ASM) {
7813 fprintf(logfile, "----------------\n");
7814 fprintf(logfile, "nip=" ADDRX " super=%d ir=%d\n",
7815 ctx.nip, supervisor, (int)msr_ir);
7818 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
7820 if (unlikely(little_endian)) {
7821 ctx.opcode = bswap32(ldl_code(ctx.nip));
7823 ctx.opcode = ldl_code(ctx.nip);
7825 #if defined PPC_DEBUG_DISAS
7826 if (loglevel & CPU_LOG_TB_IN_ASM) {
7827 fprintf(logfile, "translate opcode %08x (%02x %02x %02x) (%s)\n",
7828 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
7829 opc3(ctx.opcode), little_endian ? "little" : "big");
7833 table = env->opcodes;
7835 handler = table[opc1(ctx.opcode)];
7836 if (is_indirect_opcode(handler)) {
7837 table = ind_table(handler);
7838 handler = table[opc2(ctx.opcode)];
7839 if (is_indirect_opcode(handler)) {
7840 table = ind_table(handler);
7841 handler = table[opc3(ctx.opcode)];
7844 /* Is opcode *REALLY* valid ? */
7845 if (unlikely(handler->handler == &gen_invalid)) {
7846 if (loglevel != 0) {
7847 fprintf(logfile, "invalid/unsupported opcode: "
7848 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
7849 opc1(ctx.opcode), opc2(ctx.opcode),
7850 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
7852 printf("invalid/unsupported opcode: "
7853 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
7854 opc1(ctx.opcode), opc2(ctx.opcode),
7855 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
7858 if (unlikely((ctx.opcode & handler->inval) != 0)) {
7859 if (loglevel != 0) {
7860 fprintf(logfile, "invalid bits: %08x for opcode: "
7861 "%02x - %02x - %02x (%08x) " ADDRX "\n",
7862 ctx.opcode & handler->inval, opc1(ctx.opcode),
7863 opc2(ctx.opcode), opc3(ctx.opcode),
7864 ctx.opcode, ctx.nip - 4);
7866 printf("invalid bits: %08x for opcode: "
7867 "%02x - %02x - %02x (%08x) " ADDRX "\n",
7868 ctx.opcode & handler->inval, opc1(ctx.opcode),
7869 opc2(ctx.opcode), opc3(ctx.opcode),
7870 ctx.opcode, ctx.nip - 4);
7872 GEN_EXCP_INVAL(ctxp);
7876 (*(handler->handler))(&ctx);
7877 #if defined(DO_PPC_STATISTICS)
7880 /* Check trace mode exceptions */
7881 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
7882 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
7883 ctx.exception != POWERPC_SYSCALL &&
7884 ctx.exception != POWERPC_EXCP_TRAP &&
7885 ctx.exception != POWERPC_EXCP_BRANCH)) {
7886 GEN_EXCP(ctxp, POWERPC_EXCP_TRACE, 0);
7887 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
7888 (env->singlestep_enabled) ||
7889 num_insns >= max_insns)) {
7890 /* if we reach a page boundary or are single stepping, stop
7895 #if defined (DO_SINGLE_STEP)
7899 if (tb->cflags & CF_LAST_IO)
7901 if (ctx.exception == POWERPC_EXCP_NONE) {
7902 gen_goto_tb(&ctx, 0, ctx.nip);
7903 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
7904 if (unlikely(env->singlestep_enabled)) {
7905 gen_update_nip(&ctx, ctx.nip);
7906 gen_helper_raise_debug();
7908 /* Generate the return instruction */
7911 gen_icount_end(tb, num_insns);
7912 *gen_opc_ptr = INDEX_op_end;
7913 if (unlikely(search_pc)) {
7914 j = gen_opc_ptr - gen_opc_buf;
7917 gen_opc_instr_start[lj++] = 0;
7919 tb->size = ctx.nip - pc_start;
7920 tb->icount = num_insns;
7922 #if defined(DEBUG_DISAS)
7923 if (loglevel & CPU_LOG_TB_CPU) {
7924 fprintf(logfile, "---------------- excp: %04x\n", ctx.exception);
7925 cpu_dump_state(env, logfile, fprintf, 0);
7927 if (loglevel & CPU_LOG_TB_IN_ASM) {
7929 flags = env->bfd_mach;
7930 flags |= little_endian << 16;
7931 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7932 target_disas(logfile, pc_start, ctx.nip - pc_start, flags);
7933 fprintf(logfile, "\n");
7938 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
7940 gen_intermediate_code_internal(env, tb, 0);
7943 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
7945 gen_intermediate_code_internal(env, tb, 1);
7948 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7949 unsigned long searched_pc, int pc_pos, void *puc)
7951 env->nip = gen_opc_pc[pc_pos];