1 /*** VSX extension ***/
3 static inline TCGv_i64 cpu_vsrh(int n)
12 static inline TCGv_i64 cpu_vsrl(int n)
17 return cpu_avrl[n-32];
21 #define VSX_LOAD_SCALAR(name, operation) \
22 static void gen_##name(DisasContext *ctx) \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
37 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
38 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
39 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
40 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
41 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
42 VSX_LOAD_SCALAR(lxsspx, ld32fs)
44 static void gen_lxvd2x(DisasContext *ctx)
47 if (unlikely(!ctx->vsx_enabled)) {
48 gen_exception(ctx, POWERPC_EXCP_VSXU);
51 gen_set_access_type(ctx, ACCESS_INT);
53 gen_addr_reg_index(ctx, EA);
54 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
55 tcg_gen_addi_tl(EA, EA, 8);
56 gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
60 static void gen_lxvdsx(DisasContext *ctx)
63 if (unlikely(!ctx->vsx_enabled)) {
64 gen_exception(ctx, POWERPC_EXCP_VSXU);
67 gen_set_access_type(ctx, ACCESS_INT);
69 gen_addr_reg_index(ctx, EA);
70 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
71 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
75 static void gen_lxvw4x(DisasContext *ctx)
78 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
79 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
80 if (unlikely(!ctx->vsx_enabled)) {
81 gen_exception(ctx, POWERPC_EXCP_VSXU);
84 gen_set_access_type(ctx, ACCESS_INT);
87 gen_addr_reg_index(ctx, EA);
89 TCGv_i64 t0 = tcg_temp_new_i64();
90 TCGv_i64 t1 = tcg_temp_new_i64();
92 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
93 tcg_gen_shri_i64(t1, t0, 32);
94 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
95 tcg_gen_addi_tl(EA, EA, 8);
96 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
97 tcg_gen_shri_i64(t1, t0, 32);
98 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
99 tcg_temp_free_i64(t0);
100 tcg_temp_free_i64(t1);
102 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
103 tcg_gen_addi_tl(EA, EA, 8);
104 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
109 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
110 TCGv_i64 inh, TCGv_i64 inl)
112 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
113 TCGv_i64 t0 = tcg_temp_new_i64();
114 TCGv_i64 t1 = tcg_temp_new_i64();
116 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
117 tcg_gen_and_i64(t0, inh, mask);
118 tcg_gen_shli_i64(t0, t0, 8);
119 tcg_gen_shri_i64(t1, inh, 8);
120 tcg_gen_and_i64(t1, t1, mask);
121 tcg_gen_or_i64(outh, t0, t1);
123 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
124 tcg_gen_and_i64(t0, inl, mask);
125 tcg_gen_shli_i64(t0, t0, 8);
126 tcg_gen_shri_i64(t1, inl, 8);
127 tcg_gen_and_i64(t1, t1, mask);
128 tcg_gen_or_i64(outl, t0, t1);
130 tcg_temp_free_i64(t0);
131 tcg_temp_free_i64(t1);
132 tcg_temp_free_i64(mask);
135 static void gen_lxvh8x(DisasContext *ctx)
138 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
139 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
141 if (unlikely(!ctx->vsx_enabled)) {
142 gen_exception(ctx, POWERPC_EXCP_VSXU);
145 gen_set_access_type(ctx, ACCESS_INT);
148 gen_addr_reg_index(ctx, EA);
149 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
150 tcg_gen_addi_tl(EA, EA, 8);
151 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
153 gen_bswap16x8(xth, xtl, xth, xtl);
158 static void gen_lxvb16x(DisasContext *ctx)
161 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
162 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
164 if (unlikely(!ctx->vsx_enabled)) {
165 gen_exception(ctx, POWERPC_EXCP_VSXU);
168 gen_set_access_type(ctx, ACCESS_INT);
170 gen_addr_reg_index(ctx, EA);
171 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
172 tcg_gen_addi_tl(EA, EA, 8);
173 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
177 #define VSX_STORE_SCALAR(name, operation) \
178 static void gen_##name(DisasContext *ctx) \
181 if (unlikely(!ctx->vsx_enabled)) { \
182 gen_exception(ctx, POWERPC_EXCP_VSXU); \
185 gen_set_access_type(ctx, ACCESS_INT); \
186 EA = tcg_temp_new(); \
187 gen_addr_reg_index(ctx, EA); \
188 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
192 VSX_STORE_SCALAR(stxsdx, st64_i64)
194 VSX_STORE_SCALAR(stxsibx, st8_i64)
195 VSX_STORE_SCALAR(stxsihx, st16_i64)
196 VSX_STORE_SCALAR(stxsiwx, st32_i64)
197 VSX_STORE_SCALAR(stxsspx, st32fs)
199 static void gen_stxvd2x(DisasContext *ctx)
202 if (unlikely(!ctx->vsx_enabled)) {
203 gen_exception(ctx, POWERPC_EXCP_VSXU);
206 gen_set_access_type(ctx, ACCESS_INT);
208 gen_addr_reg_index(ctx, EA);
209 gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
210 tcg_gen_addi_tl(EA, EA, 8);
211 gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
215 static void gen_stxvw4x(DisasContext *ctx)
217 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
218 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
220 if (unlikely(!ctx->vsx_enabled)) {
221 gen_exception(ctx, POWERPC_EXCP_VSXU);
224 gen_set_access_type(ctx, ACCESS_INT);
226 gen_addr_reg_index(ctx, EA);
228 TCGv_i64 t0 = tcg_temp_new_i64();
229 TCGv_i64 t1 = tcg_temp_new_i64();
231 tcg_gen_shri_i64(t0, xsh, 32);
232 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
233 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
234 tcg_gen_addi_tl(EA, EA, 8);
235 tcg_gen_shri_i64(t0, xsl, 32);
236 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
237 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
238 tcg_temp_free_i64(t0);
239 tcg_temp_free_i64(t1);
241 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
242 tcg_gen_addi_tl(EA, EA, 8);
243 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
248 static void gen_stxvh8x(DisasContext *ctx)
250 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
251 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
254 if (unlikely(!ctx->vsx_enabled)) {
255 gen_exception(ctx, POWERPC_EXCP_VSXU);
258 gen_set_access_type(ctx, ACCESS_INT);
260 gen_addr_reg_index(ctx, EA);
262 TCGv_i64 outh = tcg_temp_new_i64();
263 TCGv_i64 outl = tcg_temp_new_i64();
265 gen_bswap16x8(outh, outl, xsh, xsl);
266 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
267 tcg_gen_addi_tl(EA, EA, 8);
268 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
269 tcg_temp_free_i64(outh);
270 tcg_temp_free_i64(outl);
272 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
273 tcg_gen_addi_tl(EA, EA, 8);
274 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
279 static void gen_stxvb16x(DisasContext *ctx)
281 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
282 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
285 if (unlikely(!ctx->vsx_enabled)) {
286 gen_exception(ctx, POWERPC_EXCP_VSXU);
289 gen_set_access_type(ctx, ACCESS_INT);
291 gen_addr_reg_index(ctx, EA);
292 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
293 tcg_gen_addi_tl(EA, EA, 8);
294 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
298 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
299 static void gen_##name(DisasContext *ctx) \
301 if (xS(ctx->opcode) < 32) { \
302 if (unlikely(!ctx->fpu_enabled)) { \
303 gen_exception(ctx, POWERPC_EXCP_FPU); \
307 if (unlikely(!ctx->altivec_enabled)) { \
308 gen_exception(ctx, POWERPC_EXCP_VPU); \
312 TCGv_i64 tmp = tcg_temp_new_i64(); \
313 tcg_gen_##tcgop1(tmp, source); \
314 tcg_gen_##tcgop2(target, tmp); \
315 tcg_temp_free_i64(tmp); \
319 MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
320 cpu_vsrh(xS(ctx->opcode)))
321 MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
322 cpu_gpr[rA(ctx->opcode)])
323 MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
324 cpu_gpr[rA(ctx->opcode)])
326 #if defined(TARGET_PPC64)
327 #define MV_VSRD(name, target, source) \
328 static void gen_##name(DisasContext *ctx) \
330 if (xS(ctx->opcode) < 32) { \
331 if (unlikely(!ctx->fpu_enabled)) { \
332 gen_exception(ctx, POWERPC_EXCP_FPU); \
336 if (unlikely(!ctx->altivec_enabled)) { \
337 gen_exception(ctx, POWERPC_EXCP_VPU); \
341 tcg_gen_mov_i64(target, source); \
344 MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
345 MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
347 static void gen_mfvsrld(DisasContext *ctx)
349 if (xS(ctx->opcode) < 32) {
350 if (unlikely(!ctx->vsx_enabled)) {
351 gen_exception(ctx, POWERPC_EXCP_VSXU);
355 if (unlikely(!ctx->altivec_enabled)) {
356 gen_exception(ctx, POWERPC_EXCP_VPU);
361 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
364 static void gen_mtvsrdd(DisasContext *ctx)
366 if (xT(ctx->opcode) < 32) {
367 if (unlikely(!ctx->vsx_enabled)) {
368 gen_exception(ctx, POWERPC_EXCP_VSXU);
372 if (unlikely(!ctx->altivec_enabled)) {
373 gen_exception(ctx, POWERPC_EXCP_VPU);
378 if (!rA(ctx->opcode)) {
379 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0);
381 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]);
384 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rB(ctx->opcode)]);
389 static void gen_xxpermdi(DisasContext *ctx)
391 if (unlikely(!ctx->vsx_enabled)) {
392 gen_exception(ctx, POWERPC_EXCP_VSXU);
396 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
397 (xT(ctx->opcode) == xB(ctx->opcode)))) {
400 xh = tcg_temp_new_i64();
401 xl = tcg_temp_new_i64();
403 if ((DM(ctx->opcode) & 2) == 0) {
404 tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
406 tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
408 if ((DM(ctx->opcode) & 1) == 0) {
409 tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
411 tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
414 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
415 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
417 tcg_temp_free_i64(xh);
418 tcg_temp_free_i64(xl);
420 if ((DM(ctx->opcode) & 2) == 0) {
421 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
423 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
425 if ((DM(ctx->opcode) & 1) == 0) {
426 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
428 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
437 #define SGN_MASK_DP 0x8000000000000000ull
438 #define SGN_MASK_SP 0x8000000080000000ull
440 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
441 static void glue(gen_, name)(DisasContext * ctx) \
444 if (unlikely(!ctx->vsx_enabled)) { \
445 gen_exception(ctx, POWERPC_EXCP_VSXU); \
448 xb = tcg_temp_new_i64(); \
449 sgm = tcg_temp_new_i64(); \
450 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
451 tcg_gen_movi_i64(sgm, sgn_mask); \
454 tcg_gen_andc_i64(xb, xb, sgm); \
458 tcg_gen_or_i64(xb, xb, sgm); \
462 tcg_gen_xor_i64(xb, xb, sgm); \
466 TCGv_i64 xa = tcg_temp_new_i64(); \
467 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
468 tcg_gen_and_i64(xa, xa, sgm); \
469 tcg_gen_andc_i64(xb, xb, sgm); \
470 tcg_gen_or_i64(xb, xb, xa); \
471 tcg_temp_free_i64(xa); \
475 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
476 tcg_temp_free_i64(xb); \
477 tcg_temp_free_i64(sgm); \
480 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
481 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
482 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
483 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
485 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
486 static void glue(gen_, name)(DisasContext * ctx) \
488 TCGv_i64 xbh, xbl, sgm; \
489 if (unlikely(!ctx->vsx_enabled)) { \
490 gen_exception(ctx, POWERPC_EXCP_VSXU); \
493 xbh = tcg_temp_new_i64(); \
494 xbl = tcg_temp_new_i64(); \
495 sgm = tcg_temp_new_i64(); \
496 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
497 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
498 tcg_gen_movi_i64(sgm, sgn_mask); \
501 tcg_gen_andc_i64(xbh, xbh, sgm); \
502 tcg_gen_andc_i64(xbl, xbl, sgm); \
506 tcg_gen_or_i64(xbh, xbh, sgm); \
507 tcg_gen_or_i64(xbl, xbl, sgm); \
511 tcg_gen_xor_i64(xbh, xbh, sgm); \
512 tcg_gen_xor_i64(xbl, xbl, sgm); \
516 TCGv_i64 xah = tcg_temp_new_i64(); \
517 TCGv_i64 xal = tcg_temp_new_i64(); \
518 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
519 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
520 tcg_gen_and_i64(xah, xah, sgm); \
521 tcg_gen_and_i64(xal, xal, sgm); \
522 tcg_gen_andc_i64(xbh, xbh, sgm); \
523 tcg_gen_andc_i64(xbl, xbl, sgm); \
524 tcg_gen_or_i64(xbh, xbh, xah); \
525 tcg_gen_or_i64(xbl, xbl, xal); \
526 tcg_temp_free_i64(xah); \
527 tcg_temp_free_i64(xal); \
531 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
532 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
533 tcg_temp_free_i64(xbh); \
534 tcg_temp_free_i64(xbl); \
535 tcg_temp_free_i64(sgm); \
538 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
539 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
540 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
541 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
542 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
543 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
544 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
545 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
547 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
548 static void gen_##name(DisasContext * ctx) \
551 if (unlikely(!ctx->vsx_enabled)) { \
552 gen_exception(ctx, POWERPC_EXCP_VSXU); \
555 opc = tcg_const_i32(ctx->opcode); \
556 gen_helper_##name(cpu_env, opc); \
557 tcg_temp_free_i32(opc); \
560 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
561 static void gen_##name(DisasContext * ctx) \
563 if (unlikely(!ctx->vsx_enabled)) { \
564 gen_exception(ctx, POWERPC_EXCP_VSXU); \
567 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
568 cpu_vsrh(xB(ctx->opcode))); \
571 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
572 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
573 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
574 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
575 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
576 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
577 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
578 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
579 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
580 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
581 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
582 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
583 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
584 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
585 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
586 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
587 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
588 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
589 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
590 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
591 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
592 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
593 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
594 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
595 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
596 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
597 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
598 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
599 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
600 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
601 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
602 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
603 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
604 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
605 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
606 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
607 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
609 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
610 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
611 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
612 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
613 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
614 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
615 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
616 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
617 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
618 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
619 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
620 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
621 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
622 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
623 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
624 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
625 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
627 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
628 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
629 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
630 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
631 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
632 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
633 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
634 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
635 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
636 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
637 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
638 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
639 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
640 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
641 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
642 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
643 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
644 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
645 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
646 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
647 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
648 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
649 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
650 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
651 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
652 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
653 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
654 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
655 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
656 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
657 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
658 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
659 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
660 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
661 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
662 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
664 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
665 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
666 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
667 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
668 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
669 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
670 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
671 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
672 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
673 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
674 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
675 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
676 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
677 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
678 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
679 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
680 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
681 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
682 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
683 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
684 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
685 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
686 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
687 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
688 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
689 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
690 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
691 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
692 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
693 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
694 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
695 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
696 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
697 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
698 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
699 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
701 #define VSX_LOGICAL(name, tcg_op) \
702 static void glue(gen_, name)(DisasContext * ctx) \
704 if (unlikely(!ctx->vsx_enabled)) { \
705 gen_exception(ctx, POWERPC_EXCP_VSXU); \
708 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
709 cpu_vsrh(xB(ctx->opcode))); \
710 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
711 cpu_vsrl(xB(ctx->opcode))); \
714 VSX_LOGICAL(xxland, tcg_gen_and_i64)
715 VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
716 VSX_LOGICAL(xxlor, tcg_gen_or_i64)
717 VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
718 VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
719 VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
720 VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
721 VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
723 #define VSX_XXMRG(name, high) \
724 static void glue(gen_, name)(DisasContext * ctx) \
726 TCGv_i64 a0, a1, b0, b1; \
727 if (unlikely(!ctx->vsx_enabled)) { \
728 gen_exception(ctx, POWERPC_EXCP_VSXU); \
731 a0 = tcg_temp_new_i64(); \
732 a1 = tcg_temp_new_i64(); \
733 b0 = tcg_temp_new_i64(); \
734 b1 = tcg_temp_new_i64(); \
736 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
737 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
738 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
739 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
741 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
742 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
743 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
744 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
746 tcg_gen_shri_i64(a0, a0, 32); \
747 tcg_gen_shri_i64(b0, b0, 32); \
748 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
750 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
752 tcg_temp_free_i64(a0); \
753 tcg_temp_free_i64(a1); \
754 tcg_temp_free_i64(b0); \
755 tcg_temp_free_i64(b1); \
758 VSX_XXMRG(xxmrghw, 1)
759 VSX_XXMRG(xxmrglw, 0)
761 static void gen_xxsel(DisasContext * ctx)
764 if (unlikely(!ctx->vsx_enabled)) {
765 gen_exception(ctx, POWERPC_EXCP_VSXU);
768 a = tcg_temp_new_i64();
769 b = tcg_temp_new_i64();
770 c = tcg_temp_new_i64();
772 tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
773 tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
774 tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
776 tcg_gen_and_i64(b, b, c);
777 tcg_gen_andc_i64(a, a, c);
778 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
780 tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
781 tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
782 tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
784 tcg_gen_and_i64(b, b, c);
785 tcg_gen_andc_i64(a, a, c);
786 tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
788 tcg_temp_free_i64(a);
789 tcg_temp_free_i64(b);
790 tcg_temp_free_i64(c);
793 static void gen_xxspltw(DisasContext *ctx)
796 TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
797 cpu_vsrl(xB(ctx->opcode)) :
798 cpu_vsrh(xB(ctx->opcode));
800 if (unlikely(!ctx->vsx_enabled)) {
801 gen_exception(ctx, POWERPC_EXCP_VSXU);
805 b = tcg_temp_new_i64();
806 b2 = tcg_temp_new_i64();
808 if (UIM(ctx->opcode) & 1) {
809 tcg_gen_ext32u_i64(b, vsr);
811 tcg_gen_shri_i64(b, vsr, 32);
814 tcg_gen_shli_i64(b2, b, 32);
815 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
816 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
818 tcg_temp_free_i64(b);
819 tcg_temp_free_i64(b2);
822 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
824 static void gen_xxspltib(DisasContext *ctx)
826 unsigned char uim8 = IMM8(ctx->opcode);
827 if (xS(ctx->opcode) < 32) {
828 if (unlikely(!ctx->altivec_enabled)) {
829 gen_exception(ctx, POWERPC_EXCP_VPU);
833 if (unlikely(!ctx->vsx_enabled)) {
834 gen_exception(ctx, POWERPC_EXCP_VSXU);
838 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
839 tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
842 static void gen_xxsldwi(DisasContext *ctx)
845 if (unlikely(!ctx->vsx_enabled)) {
846 gen_exception(ctx, POWERPC_EXCP_VSXU);
849 xth = tcg_temp_new_i64();
850 xtl = tcg_temp_new_i64();
852 switch (SHW(ctx->opcode)) {
854 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
855 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
859 TCGv_i64 t0 = tcg_temp_new_i64();
860 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
861 tcg_gen_shli_i64(xth, xth, 32);
862 tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
863 tcg_gen_shri_i64(t0, t0, 32);
864 tcg_gen_or_i64(xth, xth, t0);
865 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
866 tcg_gen_shli_i64(xtl, xtl, 32);
867 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
868 tcg_gen_shri_i64(t0, t0, 32);
869 tcg_gen_or_i64(xtl, xtl, t0);
870 tcg_temp_free_i64(t0);
874 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
875 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
879 TCGv_i64 t0 = tcg_temp_new_i64();
880 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
881 tcg_gen_shli_i64(xth, xth, 32);
882 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
883 tcg_gen_shri_i64(t0, t0, 32);
884 tcg_gen_or_i64(xth, xth, t0);
885 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
886 tcg_gen_shli_i64(xtl, xtl, 32);
887 tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
888 tcg_gen_shri_i64(t0, t0, 32);
889 tcg_gen_or_i64(xtl, xtl, t0);
890 tcg_temp_free_i64(t0);
895 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
896 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
898 tcg_temp_free_i64(xth);
899 tcg_temp_free_i64(xtl);
905 #undef GEN_XX3_RC_FORM
906 #undef GEN_XX3FORM_DM