1 /*** VSX extension ***/
3 static inline TCGv_i64 cpu_vsrh(int n)
12 static inline TCGv_i64 cpu_vsrl(int n)
17 return cpu_avrl[n-32];
21 #define VSX_LOAD_SCALAR(name, operation) \
22 static void gen_##name(DisasContext *ctx) \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
37 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
38 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
39 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
40 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
41 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
42 VSX_LOAD_SCALAR(lxsspx, ld32fs)
44 static void gen_lxvd2x(DisasContext *ctx)
47 if (unlikely(!ctx->vsx_enabled)) {
48 gen_exception(ctx, POWERPC_EXCP_VSXU);
51 gen_set_access_type(ctx, ACCESS_INT);
53 gen_addr_reg_index(ctx, EA);
54 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
55 tcg_gen_addi_tl(EA, EA, 8);
56 gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
60 static void gen_lxvdsx(DisasContext *ctx)
63 if (unlikely(!ctx->vsx_enabled)) {
64 gen_exception(ctx, POWERPC_EXCP_VSXU);
67 gen_set_access_type(ctx, ACCESS_INT);
69 gen_addr_reg_index(ctx, EA);
70 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
71 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
75 static void gen_lxvw4x(DisasContext *ctx)
78 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
79 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
80 if (unlikely(!ctx->vsx_enabled)) {
81 gen_exception(ctx, POWERPC_EXCP_VSXU);
84 gen_set_access_type(ctx, ACCESS_INT);
87 gen_addr_reg_index(ctx, EA);
89 TCGv_i64 t0 = tcg_temp_new_i64();
90 TCGv_i64 t1 = tcg_temp_new_i64();
92 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
93 tcg_gen_shri_i64(t1, t0, 32);
94 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
95 tcg_gen_addi_tl(EA, EA, 8);
96 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
97 tcg_gen_shri_i64(t1, t0, 32);
98 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
99 tcg_temp_free_i64(t0);
100 tcg_temp_free_i64(t1);
102 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
103 tcg_gen_addi_tl(EA, EA, 8);
104 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
109 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
110 TCGv_i64 inh, TCGv_i64 inl)
112 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
113 TCGv_i64 t0 = tcg_temp_new_i64();
114 TCGv_i64 t1 = tcg_temp_new_i64();
116 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
117 tcg_gen_and_i64(t0, inh, mask);
118 tcg_gen_shli_i64(t0, t0, 8);
119 tcg_gen_shri_i64(t1, inh, 8);
120 tcg_gen_and_i64(t1, t1, mask);
121 tcg_gen_or_i64(outh, t0, t1);
123 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
124 tcg_gen_and_i64(t0, inl, mask);
125 tcg_gen_shli_i64(t0, t0, 8);
126 tcg_gen_shri_i64(t1, inl, 8);
127 tcg_gen_and_i64(t1, t1, mask);
128 tcg_gen_or_i64(outl, t0, t1);
130 tcg_temp_free_i64(t0);
131 tcg_temp_free_i64(t1);
132 tcg_temp_free_i64(mask);
135 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
136 TCGv_i64 inh, TCGv_i64 inl)
138 TCGv_i64 hi = tcg_temp_new_i64();
139 TCGv_i64 lo = tcg_temp_new_i64();
141 tcg_gen_bswap64_i64(hi, inh);
142 tcg_gen_bswap64_i64(lo, inl);
143 tcg_gen_shri_i64(outh, hi, 32);
144 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
145 tcg_gen_shri_i64(outl, lo, 32);
146 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
148 tcg_temp_free_i64(hi);
149 tcg_temp_free_i64(lo);
151 static void gen_lxvh8x(DisasContext *ctx)
154 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
155 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
157 if (unlikely(!ctx->vsx_enabled)) {
158 gen_exception(ctx, POWERPC_EXCP_VSXU);
161 gen_set_access_type(ctx, ACCESS_INT);
164 gen_addr_reg_index(ctx, EA);
165 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
166 tcg_gen_addi_tl(EA, EA, 8);
167 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
169 gen_bswap16x8(xth, xtl, xth, xtl);
174 static void gen_lxvb16x(DisasContext *ctx)
177 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
178 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
180 if (unlikely(!ctx->vsx_enabled)) {
181 gen_exception(ctx, POWERPC_EXCP_VSXU);
184 gen_set_access_type(ctx, ACCESS_INT);
186 gen_addr_reg_index(ctx, EA);
187 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
188 tcg_gen_addi_tl(EA, EA, 8);
189 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
193 #define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
194 static void gen_##name(DisasContext *ctx) \
201 xt = xT(ctx->opcode); \
203 xt = DQxT(ctx->opcode); \
205 xth = cpu_vsrh(xt); \
206 xtl = cpu_vsrl(xt); \
209 if (unlikely(!ctx->vsx_enabled)) { \
210 gen_exception(ctx, POWERPC_EXCP_VSXU); \
214 if (unlikely(!ctx->altivec_enabled)) { \
215 gen_exception(ctx, POWERPC_EXCP_VPU); \
219 gen_set_access_type(ctx, ACCESS_INT); \
220 EA = tcg_temp_new(); \
222 gen_addr_reg_index(ctx, EA); \
224 gen_addr_imm_index(ctx, EA, 0x0F); \
226 if (ctx->le_mode) { \
227 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
228 tcg_gen_addi_tl(EA, EA, 8); \
229 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
231 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
232 tcg_gen_addi_tl(EA, EA, 8); \
233 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
238 VSX_VECTOR_LOAD_STORE(lxv, ld_i64, 0)
239 VSX_VECTOR_LOAD_STORE(stxv, st_i64, 0)
240 VSX_VECTOR_LOAD_STORE(lxvx, ld_i64, 1)
241 VSX_VECTOR_LOAD_STORE(stxvx, st_i64, 1)
243 #define VSX_LOAD_SCALAR_DS(name, operation) \
244 static void gen_##name(DisasContext *ctx) \
247 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
249 if (unlikely(!ctx->altivec_enabled)) { \
250 gen_exception(ctx, POWERPC_EXCP_VPU); \
253 gen_set_access_type(ctx, ACCESS_INT); \
254 EA = tcg_temp_new(); \
255 gen_addr_imm_index(ctx, EA, 0x03); \
256 gen_qemu_##operation(ctx, xth, EA); \
257 /* NOTE: cpu_vsrl is undefined */ \
261 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
262 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
264 #define VSX_STORE_SCALAR(name, operation) \
265 static void gen_##name(DisasContext *ctx) \
268 if (unlikely(!ctx->vsx_enabled)) { \
269 gen_exception(ctx, POWERPC_EXCP_VSXU); \
272 gen_set_access_type(ctx, ACCESS_INT); \
273 EA = tcg_temp_new(); \
274 gen_addr_reg_index(ctx, EA); \
275 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
279 VSX_STORE_SCALAR(stxsdx, st64_i64)
281 VSX_STORE_SCALAR(stxsibx, st8_i64)
282 VSX_STORE_SCALAR(stxsihx, st16_i64)
283 VSX_STORE_SCALAR(stxsiwx, st32_i64)
284 VSX_STORE_SCALAR(stxsspx, st32fs)
286 static void gen_stxvd2x(DisasContext *ctx)
289 if (unlikely(!ctx->vsx_enabled)) {
290 gen_exception(ctx, POWERPC_EXCP_VSXU);
293 gen_set_access_type(ctx, ACCESS_INT);
295 gen_addr_reg_index(ctx, EA);
296 gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
297 tcg_gen_addi_tl(EA, EA, 8);
298 gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
302 static void gen_stxvw4x(DisasContext *ctx)
304 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
305 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
307 if (unlikely(!ctx->vsx_enabled)) {
308 gen_exception(ctx, POWERPC_EXCP_VSXU);
311 gen_set_access_type(ctx, ACCESS_INT);
313 gen_addr_reg_index(ctx, EA);
315 TCGv_i64 t0 = tcg_temp_new_i64();
316 TCGv_i64 t1 = tcg_temp_new_i64();
318 tcg_gen_shri_i64(t0, xsh, 32);
319 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
320 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
321 tcg_gen_addi_tl(EA, EA, 8);
322 tcg_gen_shri_i64(t0, xsl, 32);
323 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
324 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
325 tcg_temp_free_i64(t0);
326 tcg_temp_free_i64(t1);
328 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
329 tcg_gen_addi_tl(EA, EA, 8);
330 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
335 static void gen_stxvh8x(DisasContext *ctx)
337 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
338 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
341 if (unlikely(!ctx->vsx_enabled)) {
342 gen_exception(ctx, POWERPC_EXCP_VSXU);
345 gen_set_access_type(ctx, ACCESS_INT);
347 gen_addr_reg_index(ctx, EA);
349 TCGv_i64 outh = tcg_temp_new_i64();
350 TCGv_i64 outl = tcg_temp_new_i64();
352 gen_bswap16x8(outh, outl, xsh, xsl);
353 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
354 tcg_gen_addi_tl(EA, EA, 8);
355 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
356 tcg_temp_free_i64(outh);
357 tcg_temp_free_i64(outl);
359 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
360 tcg_gen_addi_tl(EA, EA, 8);
361 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
366 static void gen_stxvb16x(DisasContext *ctx)
368 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
369 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
372 if (unlikely(!ctx->vsx_enabled)) {
373 gen_exception(ctx, POWERPC_EXCP_VSXU);
376 gen_set_access_type(ctx, ACCESS_INT);
378 gen_addr_reg_index(ctx, EA);
379 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
380 tcg_gen_addi_tl(EA, EA, 8);
381 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
385 #define VSX_STORE_SCALAR_DS(name, operation) \
386 static void gen_##name(DisasContext *ctx) \
389 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
391 if (unlikely(!ctx->altivec_enabled)) { \
392 gen_exception(ctx, POWERPC_EXCP_VPU); \
395 gen_set_access_type(ctx, ACCESS_INT); \
396 EA = tcg_temp_new(); \
397 gen_addr_imm_index(ctx, EA, 0x03); \
398 gen_qemu_##operation(ctx, xth, EA); \
399 /* NOTE: cpu_vsrl is undefined */ \
403 VSX_LOAD_SCALAR_DS(stxsd, st64_i64)
404 VSX_LOAD_SCALAR_DS(stxssp, st32fs)
406 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
407 static void gen_##name(DisasContext *ctx) \
409 if (xS(ctx->opcode) < 32) { \
410 if (unlikely(!ctx->fpu_enabled)) { \
411 gen_exception(ctx, POWERPC_EXCP_FPU); \
415 if (unlikely(!ctx->altivec_enabled)) { \
416 gen_exception(ctx, POWERPC_EXCP_VPU); \
420 TCGv_i64 tmp = tcg_temp_new_i64(); \
421 tcg_gen_##tcgop1(tmp, source); \
422 tcg_gen_##tcgop2(target, tmp); \
423 tcg_temp_free_i64(tmp); \
427 MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
428 cpu_vsrh(xS(ctx->opcode)))
429 MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
430 cpu_gpr[rA(ctx->opcode)])
431 MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
432 cpu_gpr[rA(ctx->opcode)])
434 #if defined(TARGET_PPC64)
435 #define MV_VSRD(name, target, source) \
436 static void gen_##name(DisasContext *ctx) \
438 if (xS(ctx->opcode) < 32) { \
439 if (unlikely(!ctx->fpu_enabled)) { \
440 gen_exception(ctx, POWERPC_EXCP_FPU); \
444 if (unlikely(!ctx->altivec_enabled)) { \
445 gen_exception(ctx, POWERPC_EXCP_VPU); \
449 tcg_gen_mov_i64(target, source); \
452 MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
453 MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
455 static void gen_mfvsrld(DisasContext *ctx)
457 if (xS(ctx->opcode) < 32) {
458 if (unlikely(!ctx->vsx_enabled)) {
459 gen_exception(ctx, POWERPC_EXCP_VSXU);
463 if (unlikely(!ctx->altivec_enabled)) {
464 gen_exception(ctx, POWERPC_EXCP_VPU);
469 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
472 static void gen_mtvsrdd(DisasContext *ctx)
474 if (xT(ctx->opcode) < 32) {
475 if (unlikely(!ctx->vsx_enabled)) {
476 gen_exception(ctx, POWERPC_EXCP_VSXU);
480 if (unlikely(!ctx->altivec_enabled)) {
481 gen_exception(ctx, POWERPC_EXCP_VPU);
486 if (!rA(ctx->opcode)) {
487 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0);
489 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]);
492 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rB(ctx->opcode)]);
495 static void gen_mtvsrws(DisasContext *ctx)
497 if (xT(ctx->opcode) < 32) {
498 if (unlikely(!ctx->vsx_enabled)) {
499 gen_exception(ctx, POWERPC_EXCP_VSXU);
503 if (unlikely(!ctx->altivec_enabled)) {
504 gen_exception(ctx, POWERPC_EXCP_VPU);
509 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)],
510 cpu_gpr[rA(ctx->opcode)], 32, 32);
511 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xT(ctx->opcode)));
516 static void gen_xxpermdi(DisasContext *ctx)
518 if (unlikely(!ctx->vsx_enabled)) {
519 gen_exception(ctx, POWERPC_EXCP_VSXU);
523 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
524 (xT(ctx->opcode) == xB(ctx->opcode)))) {
527 xh = tcg_temp_new_i64();
528 xl = tcg_temp_new_i64();
530 if ((DM(ctx->opcode) & 2) == 0) {
531 tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
533 tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
535 if ((DM(ctx->opcode) & 1) == 0) {
536 tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
538 tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
541 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
542 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
544 tcg_temp_free_i64(xh);
545 tcg_temp_free_i64(xl);
547 if ((DM(ctx->opcode) & 2) == 0) {
548 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
550 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
552 if ((DM(ctx->opcode) & 1) == 0) {
553 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
555 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
564 #define SGN_MASK_DP 0x8000000000000000ull
565 #define SGN_MASK_SP 0x8000000080000000ull
567 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
568 static void glue(gen_, name)(DisasContext * ctx) \
571 if (unlikely(!ctx->vsx_enabled)) { \
572 gen_exception(ctx, POWERPC_EXCP_VSXU); \
575 xb = tcg_temp_new_i64(); \
576 sgm = tcg_temp_new_i64(); \
577 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
578 tcg_gen_movi_i64(sgm, sgn_mask); \
581 tcg_gen_andc_i64(xb, xb, sgm); \
585 tcg_gen_or_i64(xb, xb, sgm); \
589 tcg_gen_xor_i64(xb, xb, sgm); \
593 TCGv_i64 xa = tcg_temp_new_i64(); \
594 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
595 tcg_gen_and_i64(xa, xa, sgm); \
596 tcg_gen_andc_i64(xb, xb, sgm); \
597 tcg_gen_or_i64(xb, xb, xa); \
598 tcg_temp_free_i64(xa); \
602 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
603 tcg_temp_free_i64(xb); \
604 tcg_temp_free_i64(sgm); \
607 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
608 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
609 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
610 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
612 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
613 static void glue(gen_, name)(DisasContext *ctx) \
616 int xt = rD(ctx->opcode) + 32; \
617 int xb = rB(ctx->opcode) + 32; \
618 TCGv_i64 xah, xbh, xbl, sgm; \
620 if (unlikely(!ctx->vsx_enabled)) { \
621 gen_exception(ctx, POWERPC_EXCP_VSXU); \
624 xbh = tcg_temp_new_i64(); \
625 xbl = tcg_temp_new_i64(); \
626 sgm = tcg_temp_new_i64(); \
627 tcg_gen_mov_i64(xbh, cpu_vsrh(xb)); \
628 tcg_gen_mov_i64(xbl, cpu_vsrl(xb)); \
629 tcg_gen_movi_i64(sgm, sgn_mask); \
632 tcg_gen_andc_i64(xbh, xbh, sgm); \
635 tcg_gen_or_i64(xbh, xbh, sgm); \
638 tcg_gen_xor_i64(xbh, xbh, sgm); \
641 xah = tcg_temp_new_i64(); \
642 xa = rA(ctx->opcode) + 32; \
643 tcg_gen_and_i64(xah, cpu_vsrh(xa), sgm); \
644 tcg_gen_andc_i64(xbh, xbh, sgm); \
645 tcg_gen_or_i64(xbh, xbh, xah); \
646 tcg_temp_free_i64(xah); \
649 tcg_gen_mov_i64(cpu_vsrh(xt), xbh); \
650 tcg_gen_mov_i64(cpu_vsrl(xt), xbl); \
651 tcg_temp_free_i64(xbl); \
652 tcg_temp_free_i64(xbh); \
653 tcg_temp_free_i64(sgm); \
656 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
657 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
658 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
659 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
661 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
662 static void glue(gen_, name)(DisasContext * ctx) \
664 TCGv_i64 xbh, xbl, sgm; \
665 if (unlikely(!ctx->vsx_enabled)) { \
666 gen_exception(ctx, POWERPC_EXCP_VSXU); \
669 xbh = tcg_temp_new_i64(); \
670 xbl = tcg_temp_new_i64(); \
671 sgm = tcg_temp_new_i64(); \
672 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
673 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
674 tcg_gen_movi_i64(sgm, sgn_mask); \
677 tcg_gen_andc_i64(xbh, xbh, sgm); \
678 tcg_gen_andc_i64(xbl, xbl, sgm); \
682 tcg_gen_or_i64(xbh, xbh, sgm); \
683 tcg_gen_or_i64(xbl, xbl, sgm); \
687 tcg_gen_xor_i64(xbh, xbh, sgm); \
688 tcg_gen_xor_i64(xbl, xbl, sgm); \
692 TCGv_i64 xah = tcg_temp_new_i64(); \
693 TCGv_i64 xal = tcg_temp_new_i64(); \
694 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
695 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
696 tcg_gen_and_i64(xah, xah, sgm); \
697 tcg_gen_and_i64(xal, xal, sgm); \
698 tcg_gen_andc_i64(xbh, xbh, sgm); \
699 tcg_gen_andc_i64(xbl, xbl, sgm); \
700 tcg_gen_or_i64(xbh, xbh, xah); \
701 tcg_gen_or_i64(xbl, xbl, xal); \
702 tcg_temp_free_i64(xah); \
703 tcg_temp_free_i64(xal); \
707 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
708 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
709 tcg_temp_free_i64(xbh); \
710 tcg_temp_free_i64(xbl); \
711 tcg_temp_free_i64(sgm); \
714 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
715 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
716 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
717 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
718 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
719 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
720 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
721 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
723 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
724 static void gen_##name(DisasContext * ctx) \
727 if (unlikely(!ctx->vsx_enabled)) { \
728 gen_exception(ctx, POWERPC_EXCP_VSXU); \
731 opc = tcg_const_i32(ctx->opcode); \
732 gen_helper_##name(cpu_env, opc); \
733 tcg_temp_free_i32(opc); \
736 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
737 static void gen_##name(DisasContext * ctx) \
739 if (unlikely(!ctx->vsx_enabled)) { \
740 gen_exception(ctx, POWERPC_EXCP_VSXU); \
743 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
744 cpu_vsrh(xB(ctx->opcode))); \
747 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
748 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
749 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
750 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
751 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
752 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
753 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
754 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
755 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
756 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
757 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
758 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
759 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
760 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
761 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
762 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
763 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
764 GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
765 GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
766 GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
767 GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
768 GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
769 GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
770 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
771 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
772 GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
773 GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
774 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
775 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
776 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
777 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
778 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
779 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
780 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
781 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
782 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
783 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
784 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
785 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
786 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
787 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
788 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
789 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
790 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
791 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
793 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
794 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
795 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
796 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
797 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
798 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
799 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
800 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
801 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
802 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
803 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
804 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
805 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
806 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
807 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
808 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
809 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
811 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
812 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
813 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
814 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
815 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
816 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
817 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
818 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
819 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
820 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
821 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
822 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
823 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
824 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
825 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
826 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
827 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
828 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
829 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
830 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
831 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
832 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
833 GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
834 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
835 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
836 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
837 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
838 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
839 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
840 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
841 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
842 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
843 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
844 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
845 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
846 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
847 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
849 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
850 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
851 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
852 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
853 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
854 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
855 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
856 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
857 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
858 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
859 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
860 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
861 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
862 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
863 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
864 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
865 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
866 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
867 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
868 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
869 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
870 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
871 GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
872 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
873 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
874 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
875 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
876 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
877 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
878 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
879 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
880 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
881 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
882 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
883 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
884 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
885 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
886 GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
887 GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
889 static void gen_xxbrd(DisasContext *ctx)
891 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
892 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
893 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
894 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
896 if (unlikely(!ctx->vsx_enabled)) {
897 gen_exception(ctx, POWERPC_EXCP_VSXU);
900 tcg_gen_bswap64_i64(xth, xbh);
901 tcg_gen_bswap64_i64(xtl, xbl);
904 static void gen_xxbrh(DisasContext *ctx)
906 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
907 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
908 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
909 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
911 if (unlikely(!ctx->vsx_enabled)) {
912 gen_exception(ctx, POWERPC_EXCP_VSXU);
915 gen_bswap16x8(xth, xtl, xbh, xbl);
918 static void gen_xxbrq(DisasContext *ctx)
920 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
921 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
922 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
923 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
924 TCGv_i64 t0 = tcg_temp_new_i64();
926 if (unlikely(!ctx->vsx_enabled)) {
927 gen_exception(ctx, POWERPC_EXCP_VSXU);
930 tcg_gen_bswap64_i64(t0, xbl);
931 tcg_gen_bswap64_i64(xtl, xbh);
932 tcg_gen_mov_i64(xth, t0);
933 tcg_temp_free_i64(t0);
936 static void gen_xxbrw(DisasContext *ctx)
938 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
939 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
940 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
941 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
943 if (unlikely(!ctx->vsx_enabled)) {
944 gen_exception(ctx, POWERPC_EXCP_VSXU);
947 gen_bswap32x4(xth, xtl, xbh, xbl);
950 #define VSX_LOGICAL(name, tcg_op) \
951 static void glue(gen_, name)(DisasContext * ctx) \
953 if (unlikely(!ctx->vsx_enabled)) { \
954 gen_exception(ctx, POWERPC_EXCP_VSXU); \
957 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
958 cpu_vsrh(xB(ctx->opcode))); \
959 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
960 cpu_vsrl(xB(ctx->opcode))); \
963 VSX_LOGICAL(xxland, tcg_gen_and_i64)
964 VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
965 VSX_LOGICAL(xxlor, tcg_gen_or_i64)
966 VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
967 VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
968 VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
969 VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
970 VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
972 #define VSX_XXMRG(name, high) \
973 static void glue(gen_, name)(DisasContext * ctx) \
975 TCGv_i64 a0, a1, b0, b1; \
976 if (unlikely(!ctx->vsx_enabled)) { \
977 gen_exception(ctx, POWERPC_EXCP_VSXU); \
980 a0 = tcg_temp_new_i64(); \
981 a1 = tcg_temp_new_i64(); \
982 b0 = tcg_temp_new_i64(); \
983 b1 = tcg_temp_new_i64(); \
985 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
986 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
987 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
988 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
990 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
991 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
992 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
993 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
995 tcg_gen_shri_i64(a0, a0, 32); \
996 tcg_gen_shri_i64(b0, b0, 32); \
997 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
999 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
1001 tcg_temp_free_i64(a0); \
1002 tcg_temp_free_i64(a1); \
1003 tcg_temp_free_i64(b0); \
1004 tcg_temp_free_i64(b1); \
1007 VSX_XXMRG(xxmrghw, 1)
1008 VSX_XXMRG(xxmrglw, 0)
1010 static void gen_xxsel(DisasContext * ctx)
1013 if (unlikely(!ctx->vsx_enabled)) {
1014 gen_exception(ctx, POWERPC_EXCP_VSXU);
1017 a = tcg_temp_new_i64();
1018 b = tcg_temp_new_i64();
1019 c = tcg_temp_new_i64();
1021 tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
1022 tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
1023 tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
1025 tcg_gen_and_i64(b, b, c);
1026 tcg_gen_andc_i64(a, a, c);
1027 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
1029 tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
1030 tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
1031 tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
1033 tcg_gen_and_i64(b, b, c);
1034 tcg_gen_andc_i64(a, a, c);
1035 tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
1037 tcg_temp_free_i64(a);
1038 tcg_temp_free_i64(b);
1039 tcg_temp_free_i64(c);
1042 static void gen_xxspltw(DisasContext *ctx)
1045 TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
1046 cpu_vsrl(xB(ctx->opcode)) :
1047 cpu_vsrh(xB(ctx->opcode));
1049 if (unlikely(!ctx->vsx_enabled)) {
1050 gen_exception(ctx, POWERPC_EXCP_VSXU);
1054 b = tcg_temp_new_i64();
1055 b2 = tcg_temp_new_i64();
1057 if (UIM(ctx->opcode) & 1) {
1058 tcg_gen_ext32u_i64(b, vsr);
1060 tcg_gen_shri_i64(b, vsr, 32);
1063 tcg_gen_shli_i64(b2, b, 32);
1064 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
1065 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
1067 tcg_temp_free_i64(b);
1068 tcg_temp_free_i64(b2);
1071 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1073 static void gen_xxspltib(DisasContext *ctx)
1075 unsigned char uim8 = IMM8(ctx->opcode);
1076 if (xS(ctx->opcode) < 32) {
1077 if (unlikely(!ctx->altivec_enabled)) {
1078 gen_exception(ctx, POWERPC_EXCP_VPU);
1082 if (unlikely(!ctx->vsx_enabled)) {
1083 gen_exception(ctx, POWERPC_EXCP_VSXU);
1087 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
1088 tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
1091 static void gen_xxsldwi(DisasContext *ctx)
1094 if (unlikely(!ctx->vsx_enabled)) {
1095 gen_exception(ctx, POWERPC_EXCP_VSXU);
1098 xth = tcg_temp_new_i64();
1099 xtl = tcg_temp_new_i64();
1101 switch (SHW(ctx->opcode)) {
1103 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1104 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1108 TCGv_i64 t0 = tcg_temp_new_i64();
1109 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1110 tcg_gen_shli_i64(xth, xth, 32);
1111 tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
1112 tcg_gen_shri_i64(t0, t0, 32);
1113 tcg_gen_or_i64(xth, xth, t0);
1114 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1115 tcg_gen_shli_i64(xtl, xtl, 32);
1116 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1117 tcg_gen_shri_i64(t0, t0, 32);
1118 tcg_gen_or_i64(xtl, xtl, t0);
1119 tcg_temp_free_i64(t0);
1123 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1124 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1128 TCGv_i64 t0 = tcg_temp_new_i64();
1129 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1130 tcg_gen_shli_i64(xth, xth, 32);
1131 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1132 tcg_gen_shri_i64(t0, t0, 32);
1133 tcg_gen_or_i64(xth, xth, t0);
1134 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1135 tcg_gen_shli_i64(xtl, xtl, 32);
1136 tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
1137 tcg_gen_shri_i64(t0, t0, 32);
1138 tcg_gen_or_i64(xtl, xtl, t0);
1139 tcg_temp_free_i64(t0);
1144 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
1145 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
1147 tcg_temp_free_i64(xth);
1148 tcg_temp_free_i64(xtl);
1154 #undef GEN_XX3_RC_FORM
1155 #undef GEN_XX3FORM_DM