1 /*** VSX extension ***/
3 static inline TCGv_i64 cpu_vsrh(int n)
12 static inline TCGv_i64 cpu_vsrl(int n)
17 return cpu_avrl[n-32];
21 #define VSX_LOAD_SCALAR(name, operation) \
22 static void gen_##name(DisasContext *ctx) \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
37 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
38 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
39 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
40 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
41 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
42 VSX_LOAD_SCALAR(lxsspx, ld32fs)
44 static void gen_lxvd2x(DisasContext *ctx)
47 if (unlikely(!ctx->vsx_enabled)) {
48 gen_exception(ctx, POWERPC_EXCP_VSXU);
51 gen_set_access_type(ctx, ACCESS_INT);
53 gen_addr_reg_index(ctx, EA);
54 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
55 tcg_gen_addi_tl(EA, EA, 8);
56 gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
60 static void gen_lxvdsx(DisasContext *ctx)
63 if (unlikely(!ctx->vsx_enabled)) {
64 gen_exception(ctx, POWERPC_EXCP_VSXU);
67 gen_set_access_type(ctx, ACCESS_INT);
69 gen_addr_reg_index(ctx, EA);
70 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
71 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
75 static void gen_lxvw4x(DisasContext *ctx)
79 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
80 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
81 if (unlikely(!ctx->vsx_enabled)) {
82 gen_exception(ctx, POWERPC_EXCP_VSXU);
85 gen_set_access_type(ctx, ACCESS_INT);
87 tmp = tcg_temp_new_i64();
89 gen_addr_reg_index(ctx, EA);
90 gen_qemu_ld32u_i64(ctx, tmp, EA);
91 tcg_gen_addi_tl(EA, EA, 4);
92 gen_qemu_ld32u_i64(ctx, xth, EA);
93 tcg_gen_deposit_i64(xth, xth, tmp, 32, 32);
95 tcg_gen_addi_tl(EA, EA, 4);
96 gen_qemu_ld32u_i64(ctx, tmp, EA);
97 tcg_gen_addi_tl(EA, EA, 4);
98 gen_qemu_ld32u_i64(ctx, xtl, EA);
99 tcg_gen_deposit_i64(xtl, xtl, tmp, 32, 32);
102 tcg_temp_free_i64(tmp);
105 #define VSX_STORE_SCALAR(name, operation) \
106 static void gen_##name(DisasContext *ctx) \
109 if (unlikely(!ctx->vsx_enabled)) { \
110 gen_exception(ctx, POWERPC_EXCP_VSXU); \
113 gen_set_access_type(ctx, ACCESS_INT); \
114 EA = tcg_temp_new(); \
115 gen_addr_reg_index(ctx, EA); \
116 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
120 VSX_STORE_SCALAR(stxsdx, st64_i64)
122 VSX_STORE_SCALAR(stxsibx, st8_i64)
123 VSX_STORE_SCALAR(stxsihx, st16_i64)
124 VSX_STORE_SCALAR(stxsiwx, st32_i64)
125 VSX_STORE_SCALAR(stxsspx, st32fs)
127 static void gen_stxvd2x(DisasContext *ctx)
130 if (unlikely(!ctx->vsx_enabled)) {
131 gen_exception(ctx, POWERPC_EXCP_VSXU);
134 gen_set_access_type(ctx, ACCESS_INT);
136 gen_addr_reg_index(ctx, EA);
137 gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
138 tcg_gen_addi_tl(EA, EA, 8);
139 gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
143 static void gen_stxvw4x(DisasContext *ctx)
147 if (unlikely(!ctx->vsx_enabled)) {
148 gen_exception(ctx, POWERPC_EXCP_VSXU);
151 gen_set_access_type(ctx, ACCESS_INT);
153 gen_addr_reg_index(ctx, EA);
154 tmp = tcg_temp_new_i64();
156 tcg_gen_shri_i64(tmp, cpu_vsrh(xS(ctx->opcode)), 32);
157 gen_qemu_st32_i64(ctx, tmp, EA);
158 tcg_gen_addi_tl(EA, EA, 4);
159 gen_qemu_st32_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
161 tcg_gen_shri_i64(tmp, cpu_vsrl(xS(ctx->opcode)), 32);
162 tcg_gen_addi_tl(EA, EA, 4);
163 gen_qemu_st32_i64(ctx, tmp, EA);
164 tcg_gen_addi_tl(EA, EA, 4);
165 gen_qemu_st32_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
168 tcg_temp_free_i64(tmp);
171 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
172 static void gen_##name(DisasContext *ctx) \
174 if (xS(ctx->opcode) < 32) { \
175 if (unlikely(!ctx->fpu_enabled)) { \
176 gen_exception(ctx, POWERPC_EXCP_FPU); \
180 if (unlikely(!ctx->altivec_enabled)) { \
181 gen_exception(ctx, POWERPC_EXCP_VPU); \
185 TCGv_i64 tmp = tcg_temp_new_i64(); \
186 tcg_gen_##tcgop1(tmp, source); \
187 tcg_gen_##tcgop2(target, tmp); \
188 tcg_temp_free_i64(tmp); \
192 MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
193 cpu_vsrh(xS(ctx->opcode)))
194 MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
195 cpu_gpr[rA(ctx->opcode)])
196 MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
197 cpu_gpr[rA(ctx->opcode)])
199 #if defined(TARGET_PPC64)
200 #define MV_VSRD(name, target, source) \
201 static void gen_##name(DisasContext *ctx) \
203 if (xS(ctx->opcode) < 32) { \
204 if (unlikely(!ctx->fpu_enabled)) { \
205 gen_exception(ctx, POWERPC_EXCP_FPU); \
209 if (unlikely(!ctx->altivec_enabled)) { \
210 gen_exception(ctx, POWERPC_EXCP_VPU); \
214 tcg_gen_mov_i64(target, source); \
217 MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
218 MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
220 static void gen_mfvsrld(DisasContext *ctx)
222 if (xS(ctx->opcode) < 32) {
223 if (unlikely(!ctx->vsx_enabled)) {
224 gen_exception(ctx, POWERPC_EXCP_VSXU);
228 if (unlikely(!ctx->altivec_enabled)) {
229 gen_exception(ctx, POWERPC_EXCP_VPU);
234 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
239 static void gen_xxpermdi(DisasContext *ctx)
241 if (unlikely(!ctx->vsx_enabled)) {
242 gen_exception(ctx, POWERPC_EXCP_VSXU);
246 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
247 (xT(ctx->opcode) == xB(ctx->opcode)))) {
250 xh = tcg_temp_new_i64();
251 xl = tcg_temp_new_i64();
253 if ((DM(ctx->opcode) & 2) == 0) {
254 tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
256 tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
258 if ((DM(ctx->opcode) & 1) == 0) {
259 tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
261 tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
264 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
265 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
267 tcg_temp_free_i64(xh);
268 tcg_temp_free_i64(xl);
270 if ((DM(ctx->opcode) & 2) == 0) {
271 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
273 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
275 if ((DM(ctx->opcode) & 1) == 0) {
276 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
278 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
287 #define SGN_MASK_DP 0x8000000000000000ull
288 #define SGN_MASK_SP 0x8000000080000000ull
290 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
291 static void glue(gen_, name)(DisasContext * ctx) \
294 if (unlikely(!ctx->vsx_enabled)) { \
295 gen_exception(ctx, POWERPC_EXCP_VSXU); \
298 xb = tcg_temp_new_i64(); \
299 sgm = tcg_temp_new_i64(); \
300 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
301 tcg_gen_movi_i64(sgm, sgn_mask); \
304 tcg_gen_andc_i64(xb, xb, sgm); \
308 tcg_gen_or_i64(xb, xb, sgm); \
312 tcg_gen_xor_i64(xb, xb, sgm); \
316 TCGv_i64 xa = tcg_temp_new_i64(); \
317 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
318 tcg_gen_and_i64(xa, xa, sgm); \
319 tcg_gen_andc_i64(xb, xb, sgm); \
320 tcg_gen_or_i64(xb, xb, xa); \
321 tcg_temp_free_i64(xa); \
325 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
326 tcg_temp_free_i64(xb); \
327 tcg_temp_free_i64(sgm); \
330 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
331 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
332 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
333 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
335 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
336 static void glue(gen_, name)(DisasContext * ctx) \
338 TCGv_i64 xbh, xbl, sgm; \
339 if (unlikely(!ctx->vsx_enabled)) { \
340 gen_exception(ctx, POWERPC_EXCP_VSXU); \
343 xbh = tcg_temp_new_i64(); \
344 xbl = tcg_temp_new_i64(); \
345 sgm = tcg_temp_new_i64(); \
346 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
347 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
348 tcg_gen_movi_i64(sgm, sgn_mask); \
351 tcg_gen_andc_i64(xbh, xbh, sgm); \
352 tcg_gen_andc_i64(xbl, xbl, sgm); \
356 tcg_gen_or_i64(xbh, xbh, sgm); \
357 tcg_gen_or_i64(xbl, xbl, sgm); \
361 tcg_gen_xor_i64(xbh, xbh, sgm); \
362 tcg_gen_xor_i64(xbl, xbl, sgm); \
366 TCGv_i64 xah = tcg_temp_new_i64(); \
367 TCGv_i64 xal = tcg_temp_new_i64(); \
368 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
369 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
370 tcg_gen_and_i64(xah, xah, sgm); \
371 tcg_gen_and_i64(xal, xal, sgm); \
372 tcg_gen_andc_i64(xbh, xbh, sgm); \
373 tcg_gen_andc_i64(xbl, xbl, sgm); \
374 tcg_gen_or_i64(xbh, xbh, xah); \
375 tcg_gen_or_i64(xbl, xbl, xal); \
376 tcg_temp_free_i64(xah); \
377 tcg_temp_free_i64(xal); \
381 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
382 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
383 tcg_temp_free_i64(xbh); \
384 tcg_temp_free_i64(xbl); \
385 tcg_temp_free_i64(sgm); \
388 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
389 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
390 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
391 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
392 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
393 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
394 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
395 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
397 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
398 static void gen_##name(DisasContext * ctx) \
401 if (unlikely(!ctx->vsx_enabled)) { \
402 gen_exception(ctx, POWERPC_EXCP_VSXU); \
405 opc = tcg_const_i32(ctx->opcode); \
406 gen_helper_##name(cpu_env, opc); \
407 tcg_temp_free_i32(opc); \
410 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
411 static void gen_##name(DisasContext * ctx) \
413 if (unlikely(!ctx->vsx_enabled)) { \
414 gen_exception(ctx, POWERPC_EXCP_VSXU); \
417 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
418 cpu_vsrh(xB(ctx->opcode))); \
421 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
422 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
423 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
424 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
425 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
426 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
427 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
428 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
429 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
430 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
431 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
432 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
433 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
434 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
435 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
436 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
437 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
438 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
439 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
440 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
441 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
442 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
443 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
444 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
445 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
446 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
447 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
448 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
449 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
450 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
451 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
452 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
453 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
454 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
455 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
456 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
457 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
459 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
460 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
461 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
462 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
463 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
464 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
465 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
466 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
467 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
468 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
469 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
470 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
471 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
472 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
473 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
474 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
475 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
477 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
478 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
479 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
480 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
481 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
482 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
483 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
484 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
485 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
486 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
487 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
488 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
489 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
490 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
491 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
492 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
493 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
494 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
495 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
496 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
497 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
498 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
499 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
500 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
501 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
502 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
503 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
504 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
505 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
506 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
507 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
508 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
509 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
510 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
511 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
512 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
514 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
515 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
516 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
517 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
518 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
519 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
520 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
521 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
522 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
523 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
524 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
525 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
526 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
527 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
528 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
529 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
530 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
531 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
532 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
533 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
534 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
535 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
536 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
537 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
538 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
539 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
540 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
541 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
542 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
543 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
544 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
545 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
546 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
547 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
548 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
549 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
551 #define VSX_LOGICAL(name, tcg_op) \
552 static void glue(gen_, name)(DisasContext * ctx) \
554 if (unlikely(!ctx->vsx_enabled)) { \
555 gen_exception(ctx, POWERPC_EXCP_VSXU); \
558 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
559 cpu_vsrh(xB(ctx->opcode))); \
560 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
561 cpu_vsrl(xB(ctx->opcode))); \
564 VSX_LOGICAL(xxland, tcg_gen_and_i64)
565 VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
566 VSX_LOGICAL(xxlor, tcg_gen_or_i64)
567 VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
568 VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
569 VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
570 VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
571 VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
573 #define VSX_XXMRG(name, high) \
574 static void glue(gen_, name)(DisasContext * ctx) \
576 TCGv_i64 a0, a1, b0, b1; \
577 if (unlikely(!ctx->vsx_enabled)) { \
578 gen_exception(ctx, POWERPC_EXCP_VSXU); \
581 a0 = tcg_temp_new_i64(); \
582 a1 = tcg_temp_new_i64(); \
583 b0 = tcg_temp_new_i64(); \
584 b1 = tcg_temp_new_i64(); \
586 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
587 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
588 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
589 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
591 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
592 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
593 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
594 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
596 tcg_gen_shri_i64(a0, a0, 32); \
597 tcg_gen_shri_i64(b0, b0, 32); \
598 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
600 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
602 tcg_temp_free_i64(a0); \
603 tcg_temp_free_i64(a1); \
604 tcg_temp_free_i64(b0); \
605 tcg_temp_free_i64(b1); \
608 VSX_XXMRG(xxmrghw, 1)
609 VSX_XXMRG(xxmrglw, 0)
611 static void gen_xxsel(DisasContext * ctx)
614 if (unlikely(!ctx->vsx_enabled)) {
615 gen_exception(ctx, POWERPC_EXCP_VSXU);
618 a = tcg_temp_new_i64();
619 b = tcg_temp_new_i64();
620 c = tcg_temp_new_i64();
622 tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
623 tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
624 tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
626 tcg_gen_and_i64(b, b, c);
627 tcg_gen_andc_i64(a, a, c);
628 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
630 tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
631 tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
632 tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
634 tcg_gen_and_i64(b, b, c);
635 tcg_gen_andc_i64(a, a, c);
636 tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
638 tcg_temp_free_i64(a);
639 tcg_temp_free_i64(b);
640 tcg_temp_free_i64(c);
643 static void gen_xxspltw(DisasContext *ctx)
646 TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
647 cpu_vsrl(xB(ctx->opcode)) :
648 cpu_vsrh(xB(ctx->opcode));
650 if (unlikely(!ctx->vsx_enabled)) {
651 gen_exception(ctx, POWERPC_EXCP_VSXU);
655 b = tcg_temp_new_i64();
656 b2 = tcg_temp_new_i64();
658 if (UIM(ctx->opcode) & 1) {
659 tcg_gen_ext32u_i64(b, vsr);
661 tcg_gen_shri_i64(b, vsr, 32);
664 tcg_gen_shli_i64(b2, b, 32);
665 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
666 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
668 tcg_temp_free_i64(b);
669 tcg_temp_free_i64(b2);
672 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
674 static void gen_xxspltib(DisasContext *ctx)
676 unsigned char uim8 = IMM8(ctx->opcode);
677 if (xS(ctx->opcode) < 32) {
678 if (unlikely(!ctx->altivec_enabled)) {
679 gen_exception(ctx, POWERPC_EXCP_VPU);
683 if (unlikely(!ctx->vsx_enabled)) {
684 gen_exception(ctx, POWERPC_EXCP_VSXU);
688 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
689 tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
692 static void gen_xxsldwi(DisasContext *ctx)
695 if (unlikely(!ctx->vsx_enabled)) {
696 gen_exception(ctx, POWERPC_EXCP_VSXU);
699 xth = tcg_temp_new_i64();
700 xtl = tcg_temp_new_i64();
702 switch (SHW(ctx->opcode)) {
704 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
705 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
709 TCGv_i64 t0 = tcg_temp_new_i64();
710 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
711 tcg_gen_shli_i64(xth, xth, 32);
712 tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
713 tcg_gen_shri_i64(t0, t0, 32);
714 tcg_gen_or_i64(xth, xth, t0);
715 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
716 tcg_gen_shli_i64(xtl, xtl, 32);
717 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
718 tcg_gen_shri_i64(t0, t0, 32);
719 tcg_gen_or_i64(xtl, xtl, t0);
720 tcg_temp_free_i64(t0);
724 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
725 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
729 TCGv_i64 t0 = tcg_temp_new_i64();
730 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
731 tcg_gen_shli_i64(xth, xth, 32);
732 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
733 tcg_gen_shri_i64(t0, t0, 32);
734 tcg_gen_or_i64(xth, xth, t0);
735 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
736 tcg_gen_shli_i64(xtl, xtl, 32);
737 tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
738 tcg_gen_shri_i64(t0, t0, 32);
739 tcg_gen_or_i64(xtl, xtl, t0);
740 tcg_temp_free_i64(t0);
745 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
746 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
748 tcg_temp_free_i64(xth);
749 tcg_temp_free_i64(xtl);
755 #undef GEN_XX3_RC_FORM
756 #undef GEN_XX3FORM_DM