]> Git Repo - qemu.git/blame - target/ppc/translate/vsx-impl.inc.c
target-ppc: Add xxperm and xxpermr instructions
[qemu.git] / target / ppc / translate / vsx-impl.inc.c
CommitLineData
3014427a
BH
1/*** VSX extension ***/
2
3static inline TCGv_i64 cpu_vsrh(int n)
4{
5 if (n < 32) {
6 return cpu_fpr[n];
7 } else {
8 return cpu_avrh[n-32];
9 }
10}
11
12static inline TCGv_i64 cpu_vsrl(int n)
13{
14 if (n < 32) {
15 return cpu_vsr[n];
16 } else {
17 return cpu_avrl[n-32];
18 }
19}
20
21#define VSX_LOAD_SCALAR(name, operation) \
22static void gen_##name(DisasContext *ctx) \
23{ \
24 TCGv EA; \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
27 return; \
28 } \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
34 tcg_temp_free(EA); \
35}
36
4f364fe7 37VSX_LOAD_SCALAR(lxsdx, ld64_i64)
3014427a 38VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
740ae9a2
ND
39VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
40VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
3014427a
BH
41VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
42VSX_LOAD_SCALAR(lxsspx, ld32fs)
43
44static void gen_lxvd2x(DisasContext *ctx)
45{
46 TCGv EA;
47 if (unlikely(!ctx->vsx_enabled)) {
48 gen_exception(ctx, POWERPC_EXCP_VSXU);
49 return;
50 }
51 gen_set_access_type(ctx, ACCESS_INT);
52 EA = tcg_temp_new();
53 gen_addr_reg_index(ctx, EA);
4f364fe7 54 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
3014427a 55 tcg_gen_addi_tl(EA, EA, 8);
4f364fe7 56 gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
3014427a
BH
57 tcg_temp_free(EA);
58}
59
60static void gen_lxvdsx(DisasContext *ctx)
61{
62 TCGv EA;
63 if (unlikely(!ctx->vsx_enabled)) {
64 gen_exception(ctx, POWERPC_EXCP_VSXU);
65 return;
66 }
67 gen_set_access_type(ctx, ACCESS_INT);
68 EA = tcg_temp_new();
69 gen_addr_reg_index(ctx, EA);
4f364fe7 70 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
3014427a
BH
71 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
72 tcg_temp_free(EA);
73}
74
75static void gen_lxvw4x(DisasContext *ctx)
76{
77 TCGv EA;
3014427a
BH
78 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
79 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
80 if (unlikely(!ctx->vsx_enabled)) {
81 gen_exception(ctx, POWERPC_EXCP_VSXU);
82 return;
83 }
84 gen_set_access_type(ctx, ACCESS_INT);
85 EA = tcg_temp_new();
3014427a
BH
86
87 gen_addr_reg_index(ctx, EA);
f34001ec
ND
88 if (ctx->le_mode) {
89 TCGv_i64 t0 = tcg_temp_new_i64();
90 TCGv_i64 t1 = tcg_temp_new_i64();
3014427a 91
f34001ec
ND
92 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
93 tcg_gen_shri_i64(t1, t0, 32);
94 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
95 tcg_gen_addi_tl(EA, EA, 8);
96 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
97 tcg_gen_shri_i64(t1, t0, 32);
98 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
99 tcg_temp_free_i64(t0);
100 tcg_temp_free_i64(t1);
101 } else {
102 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
103 tcg_gen_addi_tl(EA, EA, 8);
104 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
105 }
3014427a 106 tcg_temp_free(EA);
3014427a
BH
107}
108
1c074419
ND
109static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
110 TCGv_i64 inh, TCGv_i64 inl)
111{
112 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
113 TCGv_i64 t0 = tcg_temp_new_i64();
114 TCGv_i64 t1 = tcg_temp_new_i64();
115
116 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
117 tcg_gen_and_i64(t0, inh, mask);
118 tcg_gen_shli_i64(t0, t0, 8);
119 tcg_gen_shri_i64(t1, inh, 8);
120 tcg_gen_and_i64(t1, t1, mask);
121 tcg_gen_or_i64(outh, t0, t1);
122
123 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
124 tcg_gen_and_i64(t0, inl, mask);
125 tcg_gen_shli_i64(t0, t0, 8);
126 tcg_gen_shri_i64(t1, inl, 8);
127 tcg_gen_and_i64(t1, t1, mask);
128 tcg_gen_or_i64(outl, t0, t1);
129
130 tcg_temp_free_i64(t0);
131 tcg_temp_free_i64(t1);
132 tcg_temp_free_i64(mask);
133}
134
14fd8ab2
ND
135static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
136 TCGv_i64 inh, TCGv_i64 inl)
137{
138 TCGv_i64 hi = tcg_temp_new_i64();
139 TCGv_i64 lo = tcg_temp_new_i64();
140
141 tcg_gen_bswap64_i64(hi, inh);
142 tcg_gen_bswap64_i64(lo, inl);
143 tcg_gen_shri_i64(outh, hi, 32);
144 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
145 tcg_gen_shri_i64(outl, lo, 32);
146 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
147
148 tcg_temp_free_i64(hi);
149 tcg_temp_free_i64(lo);
150}
1c074419
ND
151static void gen_lxvh8x(DisasContext *ctx)
152{
153 TCGv EA;
154 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
155 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
156
157 if (unlikely(!ctx->vsx_enabled)) {
158 gen_exception(ctx, POWERPC_EXCP_VSXU);
159 return;
160 }
161 gen_set_access_type(ctx, ACCESS_INT);
162
163 EA = tcg_temp_new();
164 gen_addr_reg_index(ctx, EA);
165 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
166 tcg_gen_addi_tl(EA, EA, 8);
167 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
168 if (ctx->le_mode) {
169 gen_bswap16x8(xth, xtl, xth, xtl);
170 }
171 tcg_temp_free(EA);
172}
173
8ee38fac
ND
174static void gen_lxvb16x(DisasContext *ctx)
175{
176 TCGv EA;
177 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
178 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
179
180 if (unlikely(!ctx->vsx_enabled)) {
181 gen_exception(ctx, POWERPC_EXCP_VSXU);
182 return;
183 }
184 gen_set_access_type(ctx, ACCESS_INT);
185 EA = tcg_temp_new();
186 gen_addr_reg_index(ctx, EA);
187 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
188 tcg_gen_addi_tl(EA, EA, 8);
189 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
190 tcg_temp_free(EA);
191}
192
d59ba583
ND
193#define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
194static void gen_##name(DisasContext *ctx) \
195{ \
196 int xt; \
197 TCGv EA; \
198 TCGv_i64 xth, xtl; \
199 \
200 if (indexed) { \
201 xt = xT(ctx->opcode); \
202 } else { \
203 xt = DQxT(ctx->opcode); \
204 } \
205 xth = cpu_vsrh(xt); \
206 xtl = cpu_vsrl(xt); \
207 \
208 if (xt < 32) { \
209 if (unlikely(!ctx->vsx_enabled)) { \
210 gen_exception(ctx, POWERPC_EXCP_VSXU); \
211 return; \
212 } \
213 } else { \
214 if (unlikely(!ctx->altivec_enabled)) { \
215 gen_exception(ctx, POWERPC_EXCP_VPU); \
216 return; \
217 } \
218 } \
219 gen_set_access_type(ctx, ACCESS_INT); \
220 EA = tcg_temp_new(); \
221 if (indexed) { \
222 gen_addr_reg_index(ctx, EA); \
223 } else { \
224 gen_addr_imm_index(ctx, EA, 0x0F); \
225 } \
226 if (ctx->le_mode) { \
227 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
228 tcg_gen_addi_tl(EA, EA, 8); \
229 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
230 } else { \
231 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
232 tcg_gen_addi_tl(EA, EA, 8); \
233 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
234 } \
235 tcg_temp_free(EA); \
236}
237
238VSX_VECTOR_LOAD_STORE(lxv, ld_i64, 0)
239VSX_VECTOR_LOAD_STORE(stxv, st_i64, 0)
240VSX_VECTOR_LOAD_STORE(lxvx, ld_i64, 1)
241VSX_VECTOR_LOAD_STORE(stxvx, st_i64, 1)
242
5cb091a4
ND
243#define VSX_LOAD_SCALAR_DS(name, operation) \
244static void gen_##name(DisasContext *ctx) \
245{ \
246 TCGv EA; \
247 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
248 \
249 if (unlikely(!ctx->altivec_enabled)) { \
250 gen_exception(ctx, POWERPC_EXCP_VPU); \
251 return; \
252 } \
253 gen_set_access_type(ctx, ACCESS_INT); \
254 EA = tcg_temp_new(); \
255 gen_addr_imm_index(ctx, EA, 0x03); \
256 gen_qemu_##operation(ctx, xth, EA); \
257 /* NOTE: cpu_vsrl is undefined */ \
258 tcg_temp_free(EA); \
259}
260
261VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
262VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
263
3014427a
BH
264#define VSX_STORE_SCALAR(name, operation) \
265static void gen_##name(DisasContext *ctx) \
266{ \
267 TCGv EA; \
268 if (unlikely(!ctx->vsx_enabled)) { \
269 gen_exception(ctx, POWERPC_EXCP_VSXU); \
270 return; \
271 } \
272 gen_set_access_type(ctx, ACCESS_INT); \
273 EA = tcg_temp_new(); \
274 gen_addr_reg_index(ctx, EA); \
275 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
276 tcg_temp_free(EA); \
277}
278
2468f23d 279VSX_STORE_SCALAR(stxsdx, st64_i64)
ddb9ac50
ND
280
281VSX_STORE_SCALAR(stxsibx, st8_i64)
282VSX_STORE_SCALAR(stxsihx, st16_i64)
3014427a
BH
283VSX_STORE_SCALAR(stxsiwx, st32_i64)
284VSX_STORE_SCALAR(stxsspx, st32fs)
285
286static void gen_stxvd2x(DisasContext *ctx)
287{
288 TCGv EA;
289 if (unlikely(!ctx->vsx_enabled)) {
290 gen_exception(ctx, POWERPC_EXCP_VSXU);
291 return;
292 }
293 gen_set_access_type(ctx, ACCESS_INT);
294 EA = tcg_temp_new();
295 gen_addr_reg_index(ctx, EA);
2468f23d 296 gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
3014427a 297 tcg_gen_addi_tl(EA, EA, 8);
2468f23d 298 gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
3014427a
BH
299 tcg_temp_free(EA);
300}
301
302static void gen_stxvw4x(DisasContext *ctx)
303{
0aec21d8
ND
304 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
305 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
3014427a
BH
306 TCGv EA;
307 if (unlikely(!ctx->vsx_enabled)) {
308 gen_exception(ctx, POWERPC_EXCP_VSXU);
309 return;
310 }
311 gen_set_access_type(ctx, ACCESS_INT);
312 EA = tcg_temp_new();
313 gen_addr_reg_index(ctx, EA);
0aec21d8
ND
314 if (ctx->le_mode) {
315 TCGv_i64 t0 = tcg_temp_new_i64();
316 TCGv_i64 t1 = tcg_temp_new_i64();
3014427a 317
0aec21d8
ND
318 tcg_gen_shri_i64(t0, xsh, 32);
319 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
320 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
321 tcg_gen_addi_tl(EA, EA, 8);
322 tcg_gen_shri_i64(t0, xsl, 32);
323 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
324 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
325 tcg_temp_free_i64(t0);
326 tcg_temp_free_i64(t1);
327 } else {
328 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
329 tcg_gen_addi_tl(EA, EA, 8);
0b8ac648
ND
330 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
331 }
332 tcg_temp_free(EA);
333}
334
335static void gen_stxvh8x(DisasContext *ctx)
336{
337 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
338 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
339 TCGv EA;
340
341 if (unlikely(!ctx->vsx_enabled)) {
342 gen_exception(ctx, POWERPC_EXCP_VSXU);
343 return;
344 }
345 gen_set_access_type(ctx, ACCESS_INT);
346 EA = tcg_temp_new();
347 gen_addr_reg_index(ctx, EA);
348 if (ctx->le_mode) {
349 TCGv_i64 outh = tcg_temp_new_i64();
350 TCGv_i64 outl = tcg_temp_new_i64();
351
352 gen_bswap16x8(outh, outl, xsh, xsl);
353 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
354 tcg_gen_addi_tl(EA, EA, 8);
355 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
356 tcg_temp_free_i64(outh);
357 tcg_temp_free_i64(outl);
358 } else {
359 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
360 tcg_gen_addi_tl(EA, EA, 8);
0aec21d8
ND
361 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
362 }
3014427a 363 tcg_temp_free(EA);
3014427a
BH
364}
365
f3333ce0
ND
366static void gen_stxvb16x(DisasContext *ctx)
367{
368 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
369 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
370 TCGv EA;
371
372 if (unlikely(!ctx->vsx_enabled)) {
373 gen_exception(ctx, POWERPC_EXCP_VSXU);
374 return;
375 }
376 gen_set_access_type(ctx, ACCESS_INT);
377 EA = tcg_temp_new();
378 gen_addr_reg_index(ctx, EA);
379 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
380 tcg_gen_addi_tl(EA, EA, 8);
381 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
382 tcg_temp_free(EA);
383}
384
e3001664
ND
385#define VSX_STORE_SCALAR_DS(name, operation) \
386static void gen_##name(DisasContext *ctx) \
387{ \
388 TCGv EA; \
389 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
390 \
391 if (unlikely(!ctx->altivec_enabled)) { \
392 gen_exception(ctx, POWERPC_EXCP_VPU); \
393 return; \
394 } \
395 gen_set_access_type(ctx, ACCESS_INT); \
396 EA = tcg_temp_new(); \
397 gen_addr_imm_index(ctx, EA, 0x03); \
398 gen_qemu_##operation(ctx, xth, EA); \
399 /* NOTE: cpu_vsrl is undefined */ \
400 tcg_temp_free(EA); \
401}
402
403VSX_LOAD_SCALAR_DS(stxsd, st64_i64)
404VSX_LOAD_SCALAR_DS(stxssp, st32fs)
405
3014427a
BH
406#define MV_VSRW(name, tcgop1, tcgop2, target, source) \
407static void gen_##name(DisasContext *ctx) \
408{ \
409 if (xS(ctx->opcode) < 32) { \
410 if (unlikely(!ctx->fpu_enabled)) { \
411 gen_exception(ctx, POWERPC_EXCP_FPU); \
412 return; \
413 } \
414 } else { \
415 if (unlikely(!ctx->altivec_enabled)) { \
416 gen_exception(ctx, POWERPC_EXCP_VPU); \
417 return; \
418 } \
419 } \
420 TCGv_i64 tmp = tcg_temp_new_i64(); \
421 tcg_gen_##tcgop1(tmp, source); \
422 tcg_gen_##tcgop2(target, tmp); \
423 tcg_temp_free_i64(tmp); \
424}
425
426
427MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
428 cpu_vsrh(xS(ctx->opcode)))
429MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
430 cpu_gpr[rA(ctx->opcode)])
431MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
432 cpu_gpr[rA(ctx->opcode)])
433
434#if defined(TARGET_PPC64)
435#define MV_VSRD(name, target, source) \
436static void gen_##name(DisasContext *ctx) \
437{ \
438 if (xS(ctx->opcode) < 32) { \
439 if (unlikely(!ctx->fpu_enabled)) { \
440 gen_exception(ctx, POWERPC_EXCP_FPU); \
441 return; \
442 } \
443 } else { \
444 if (unlikely(!ctx->altivec_enabled)) { \
445 gen_exception(ctx, POWERPC_EXCP_VPU); \
446 return; \
447 } \
448 } \
449 tcg_gen_mov_i64(target, source); \
450}
451
452MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
453MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
454
63583202
RB
455static void gen_mfvsrld(DisasContext *ctx)
456{
457 if (xS(ctx->opcode) < 32) {
458 if (unlikely(!ctx->vsx_enabled)) {
459 gen_exception(ctx, POWERPC_EXCP_VSXU);
460 return;
461 }
462 } else {
463 if (unlikely(!ctx->altivec_enabled)) {
464 gen_exception(ctx, POWERPC_EXCP_VPU);
465 return;
466 }
467 }
468
469 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
470}
471
b9731075
RB
472static void gen_mtvsrdd(DisasContext *ctx)
473{
474 if (xT(ctx->opcode) < 32) {
475 if (unlikely(!ctx->vsx_enabled)) {
476 gen_exception(ctx, POWERPC_EXCP_VSXU);
477 return;
478 }
479 } else {
480 if (unlikely(!ctx->altivec_enabled)) {
481 gen_exception(ctx, POWERPC_EXCP_VPU);
482 return;
483 }
484 }
485
486 if (!rA(ctx->opcode)) {
487 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0);
488 } else {
489 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]);
490 }
491
492 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rB(ctx->opcode)]);
493}
494
1a136cdc
RB
495static void gen_mtvsrws(DisasContext *ctx)
496{
497 if (xT(ctx->opcode) < 32) {
498 if (unlikely(!ctx->vsx_enabled)) {
499 gen_exception(ctx, POWERPC_EXCP_VSXU);
500 return;
501 }
502 } else {
503 if (unlikely(!ctx->altivec_enabled)) {
504 gen_exception(ctx, POWERPC_EXCP_VPU);
505 return;
506 }
507 }
508
509 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)],
510 cpu_gpr[rA(ctx->opcode)], 32, 32);
511 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xT(ctx->opcode)));
512}
513
3014427a
BH
514#endif
515
516static void gen_xxpermdi(DisasContext *ctx)
517{
518 if (unlikely(!ctx->vsx_enabled)) {
519 gen_exception(ctx, POWERPC_EXCP_VSXU);
520 return;
521 }
522
523 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
524 (xT(ctx->opcode) == xB(ctx->opcode)))) {
525 TCGv_i64 xh, xl;
526
527 xh = tcg_temp_new_i64();
528 xl = tcg_temp_new_i64();
529
530 if ((DM(ctx->opcode) & 2) == 0) {
531 tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
532 } else {
533 tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
534 }
535 if ((DM(ctx->opcode) & 1) == 0) {
536 tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
537 } else {
538 tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
539 }
540
541 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
542 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
543
544 tcg_temp_free_i64(xh);
545 tcg_temp_free_i64(xl);
546 } else {
547 if ((DM(ctx->opcode) & 2) == 0) {
548 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
549 } else {
550 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
551 }
552 if ((DM(ctx->opcode) & 1) == 0) {
553 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
554 } else {
555 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
556 }
557 }
558}
559
560#define OP_ABS 1
561#define OP_NABS 2
562#define OP_NEG 3
563#define OP_CPSGN 4
564#define SGN_MASK_DP 0x8000000000000000ull
565#define SGN_MASK_SP 0x8000000080000000ull
566
567#define VSX_SCALAR_MOVE(name, op, sgn_mask) \
568static void glue(gen_, name)(DisasContext * ctx) \
569 { \
570 TCGv_i64 xb, sgm; \
571 if (unlikely(!ctx->vsx_enabled)) { \
572 gen_exception(ctx, POWERPC_EXCP_VSXU); \
573 return; \
574 } \
575 xb = tcg_temp_new_i64(); \
576 sgm = tcg_temp_new_i64(); \
577 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
578 tcg_gen_movi_i64(sgm, sgn_mask); \
579 switch (op) { \
580 case OP_ABS: { \
581 tcg_gen_andc_i64(xb, xb, sgm); \
582 break; \
583 } \
584 case OP_NABS: { \
585 tcg_gen_or_i64(xb, xb, sgm); \
586 break; \
587 } \
588 case OP_NEG: { \
589 tcg_gen_xor_i64(xb, xb, sgm); \
590 break; \
591 } \
592 case OP_CPSGN: { \
593 TCGv_i64 xa = tcg_temp_new_i64(); \
594 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
595 tcg_gen_and_i64(xa, xa, sgm); \
596 tcg_gen_andc_i64(xb, xb, sgm); \
597 tcg_gen_or_i64(xb, xb, xa); \
598 tcg_temp_free_i64(xa); \
599 break; \
600 } \
601 } \
602 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
603 tcg_temp_free_i64(xb); \
604 tcg_temp_free_i64(sgm); \
605 }
606
607VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
608VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
609VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
610VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
611
3259dbd9
DG
612#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
613static void glue(gen_, name)(DisasContext *ctx) \
614{ \
014ed3bb 615 int xa; \
3259dbd9
DG
616 int xt = rD(ctx->opcode) + 32; \
617 int xb = rB(ctx->opcode) + 32; \
014ed3bb 618 TCGv_i64 xah, xbh, xbl, sgm; \
3259dbd9
DG
619 \
620 if (unlikely(!ctx->vsx_enabled)) { \
621 gen_exception(ctx, POWERPC_EXCP_VSXU); \
622 return; \
623 } \
624 xbh = tcg_temp_new_i64(); \
625 xbl = tcg_temp_new_i64(); \
626 sgm = tcg_temp_new_i64(); \
627 tcg_gen_mov_i64(xbh, cpu_vsrh(xb)); \
628 tcg_gen_mov_i64(xbl, cpu_vsrl(xb)); \
629 tcg_gen_movi_i64(sgm, sgn_mask); \
630 switch (op) { \
631 case OP_ABS: \
632 tcg_gen_andc_i64(xbh, xbh, sgm); \
633 break; \
634 case OP_NABS: \
635 tcg_gen_or_i64(xbh, xbh, sgm); \
636 break; \
8497d7fc
ND
637 case OP_NEG: \
638 tcg_gen_xor_i64(xbh, xbh, sgm); \
639 break; \
014ed3bb
ND
640 case OP_CPSGN: \
641 xah = tcg_temp_new_i64(); \
642 xa = rA(ctx->opcode) + 32; \
643 tcg_gen_and_i64(xah, cpu_vsrh(xa), sgm); \
644 tcg_gen_andc_i64(xbh, xbh, sgm); \
645 tcg_gen_or_i64(xbh, xbh, xah); \
646 tcg_temp_free_i64(xah); \
647 break; \
3259dbd9
DG
648 } \
649 tcg_gen_mov_i64(cpu_vsrh(xt), xbh); \
650 tcg_gen_mov_i64(cpu_vsrl(xt), xbl); \
651 tcg_temp_free_i64(xbl); \
652 tcg_temp_free_i64(xbh); \
653 tcg_temp_free_i64(sgm); \
654}
655
656VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
657VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
8497d7fc 658VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
014ed3bb 659VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
3259dbd9 660
3014427a
BH
661#define VSX_VECTOR_MOVE(name, op, sgn_mask) \
662static void glue(gen_, name)(DisasContext * ctx) \
663 { \
664 TCGv_i64 xbh, xbl, sgm; \
665 if (unlikely(!ctx->vsx_enabled)) { \
666 gen_exception(ctx, POWERPC_EXCP_VSXU); \
667 return; \
668 } \
669 xbh = tcg_temp_new_i64(); \
670 xbl = tcg_temp_new_i64(); \
671 sgm = tcg_temp_new_i64(); \
672 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
673 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
674 tcg_gen_movi_i64(sgm, sgn_mask); \
675 switch (op) { \
676 case OP_ABS: { \
677 tcg_gen_andc_i64(xbh, xbh, sgm); \
678 tcg_gen_andc_i64(xbl, xbl, sgm); \
679 break; \
680 } \
681 case OP_NABS: { \
682 tcg_gen_or_i64(xbh, xbh, sgm); \
683 tcg_gen_or_i64(xbl, xbl, sgm); \
684 break; \
685 } \
686 case OP_NEG: { \
687 tcg_gen_xor_i64(xbh, xbh, sgm); \
688 tcg_gen_xor_i64(xbl, xbl, sgm); \
689 break; \
690 } \
691 case OP_CPSGN: { \
692 TCGv_i64 xah = tcg_temp_new_i64(); \
693 TCGv_i64 xal = tcg_temp_new_i64(); \
694 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
695 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
696 tcg_gen_and_i64(xah, xah, sgm); \
697 tcg_gen_and_i64(xal, xal, sgm); \
698 tcg_gen_andc_i64(xbh, xbh, sgm); \
699 tcg_gen_andc_i64(xbl, xbl, sgm); \
700 tcg_gen_or_i64(xbh, xbh, xah); \
701 tcg_gen_or_i64(xbl, xbl, xal); \
702 tcg_temp_free_i64(xah); \
703 tcg_temp_free_i64(xal); \
704 break; \
705 } \
706 } \
707 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
708 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
709 tcg_temp_free_i64(xbh); \
710 tcg_temp_free_i64(xbl); \
711 tcg_temp_free_i64(sgm); \
712 }
713
714VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
715VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
716VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
717VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
718VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
719VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
720VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
721VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
722
723#define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
724static void gen_##name(DisasContext * ctx) \
725{ \
726 TCGv_i32 opc; \
727 if (unlikely(!ctx->vsx_enabled)) { \
728 gen_exception(ctx, POWERPC_EXCP_VSXU); \
729 return; \
730 } \
3014427a
BH
731 opc = tcg_const_i32(ctx->opcode); \
732 gen_helper_##name(cpu_env, opc); \
733 tcg_temp_free_i32(opc); \
734}
735
736#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
737static void gen_##name(DisasContext * ctx) \
738{ \
739 if (unlikely(!ctx->vsx_enabled)) { \
740 gen_exception(ctx, POWERPC_EXCP_VSXU); \
741 return; \
742 } \
3014427a
BH
743 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
744 cpu_vsrh(xB(ctx->opcode))); \
745}
746
747GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
748GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
749GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
750GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
751GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
752GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
753GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
754GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
755GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
756GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
757GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
758GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
759GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
760GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
761GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
762GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
763GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
6d1ff9a7
SD
764GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
765GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
766GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
767GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
3a20d11d
BR
768GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
769GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
3014427a
BH
770GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
771GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
be0a4faf
BR
772GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
773GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
3014427a
BH
774GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
775GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
776GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
777GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
778GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
779GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
780GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
781GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
782GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
783GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
784GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
785GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
786GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
787GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
788GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
789GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
790GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
791GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
792
793GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
794GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
795GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
796GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
797GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
798GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
799GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
800GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
801GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
802GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
803GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
804GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
805GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
806GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
807GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
808GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
809GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
810
811GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
812GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
813GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
814GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
815GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
816GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
817GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
818GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
819GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
820GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
821GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
822GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
823GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
824GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
825GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
826GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
827GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
828GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
829GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
830GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
831GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
832GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
6db246f9 833GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
3014427a
BH
834GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
835GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
836GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
837GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
838GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
839GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
840GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
841GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
842GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
843GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
844GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
845GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
846GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
847GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
848
849GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
850GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
851GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
852GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
853GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
854GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
855GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
856GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
857GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
858GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
859GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
860GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
861GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
862GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
863GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
864GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
865GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
866GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
867GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
868GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
869GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
870GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
6db246f9 871GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
3014427a
BH
872GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
873GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
874GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
875GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
876GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
877GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
878GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
879GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
880GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
881GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
882GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
883GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
884GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
885GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
234068ab
BR
886GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
887GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
3014427a 888
14fd8ab2
ND
889static void gen_xxbrd(DisasContext *ctx)
890{
891 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
892 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
893 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
894 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
895
896 if (unlikely(!ctx->vsx_enabled)) {
897 gen_exception(ctx, POWERPC_EXCP_VSXU);
898 return;
899 }
900 tcg_gen_bswap64_i64(xth, xbh);
901 tcg_gen_bswap64_i64(xtl, xbl);
902}
903
904static void gen_xxbrh(DisasContext *ctx)
905{
906 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
907 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
908 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
909 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
910
911 if (unlikely(!ctx->vsx_enabled)) {
912 gen_exception(ctx, POWERPC_EXCP_VSXU);
913 return;
914 }
915 gen_bswap16x8(xth, xtl, xbh, xbl);
916}
917
918static void gen_xxbrq(DisasContext *ctx)
919{
920 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
921 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
922 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
923 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
924 TCGv_i64 t0 = tcg_temp_new_i64();
925
926 if (unlikely(!ctx->vsx_enabled)) {
927 gen_exception(ctx, POWERPC_EXCP_VSXU);
928 return;
929 }
930 tcg_gen_bswap64_i64(t0, xbl);
931 tcg_gen_bswap64_i64(xtl, xbh);
932 tcg_gen_mov_i64(xth, t0);
933 tcg_temp_free_i64(t0);
934}
935
936static void gen_xxbrw(DisasContext *ctx)
937{
938 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
939 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
940 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
941 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
942
943 if (unlikely(!ctx->vsx_enabled)) {
944 gen_exception(ctx, POWERPC_EXCP_VSXU);
945 return;
946 }
947 gen_bswap32x4(xth, xtl, xbh, xbl);
948}
949
3014427a
BH
950#define VSX_LOGICAL(name, tcg_op) \
951static void glue(gen_, name)(DisasContext * ctx) \
952 { \
953 if (unlikely(!ctx->vsx_enabled)) { \
954 gen_exception(ctx, POWERPC_EXCP_VSXU); \
955 return; \
956 } \
957 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
958 cpu_vsrh(xB(ctx->opcode))); \
959 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
960 cpu_vsrl(xB(ctx->opcode))); \
961 }
962
963VSX_LOGICAL(xxland, tcg_gen_and_i64)
964VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
965VSX_LOGICAL(xxlor, tcg_gen_or_i64)
966VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
967VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
968VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
969VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
970VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
971
972#define VSX_XXMRG(name, high) \
973static void glue(gen_, name)(DisasContext * ctx) \
974 { \
975 TCGv_i64 a0, a1, b0, b1; \
976 if (unlikely(!ctx->vsx_enabled)) { \
977 gen_exception(ctx, POWERPC_EXCP_VSXU); \
978 return; \
979 } \
980 a0 = tcg_temp_new_i64(); \
981 a1 = tcg_temp_new_i64(); \
982 b0 = tcg_temp_new_i64(); \
983 b1 = tcg_temp_new_i64(); \
984 if (high) { \
985 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
986 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
987 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
988 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
989 } else { \
990 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
991 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
992 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
993 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
994 } \
995 tcg_gen_shri_i64(a0, a0, 32); \
996 tcg_gen_shri_i64(b0, b0, 32); \
997 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
998 b0, a0, 32, 32); \
999 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
1000 b1, a1, 32, 32); \
1001 tcg_temp_free_i64(a0); \
1002 tcg_temp_free_i64(a1); \
1003 tcg_temp_free_i64(b0); \
1004 tcg_temp_free_i64(b1); \
1005 }
1006
1007VSX_XXMRG(xxmrghw, 1)
1008VSX_XXMRG(xxmrglw, 0)
1009
1010static void gen_xxsel(DisasContext * ctx)
1011{
1012 TCGv_i64 a, b, c;
1013 if (unlikely(!ctx->vsx_enabled)) {
1014 gen_exception(ctx, POWERPC_EXCP_VSXU);
1015 return;
1016 }
1017 a = tcg_temp_new_i64();
1018 b = tcg_temp_new_i64();
1019 c = tcg_temp_new_i64();
1020
1021 tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
1022 tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
1023 tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
1024
1025 tcg_gen_and_i64(b, b, c);
1026 tcg_gen_andc_i64(a, a, c);
1027 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
1028
1029 tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
1030 tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
1031 tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
1032
1033 tcg_gen_and_i64(b, b, c);
1034 tcg_gen_andc_i64(a, a, c);
1035 tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
1036
1037 tcg_temp_free_i64(a);
1038 tcg_temp_free_i64(b);
1039 tcg_temp_free_i64(c);
1040}
1041
1042static void gen_xxspltw(DisasContext *ctx)
1043{
1044 TCGv_i64 b, b2;
1045 TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
1046 cpu_vsrl(xB(ctx->opcode)) :
1047 cpu_vsrh(xB(ctx->opcode));
1048
1049 if (unlikely(!ctx->vsx_enabled)) {
1050 gen_exception(ctx, POWERPC_EXCP_VSXU);
1051 return;
1052 }
1053
1054 b = tcg_temp_new_i64();
1055 b2 = tcg_temp_new_i64();
1056
1057 if (UIM(ctx->opcode) & 1) {
1058 tcg_gen_ext32u_i64(b, vsr);
1059 } else {
1060 tcg_gen_shri_i64(b, vsr, 32);
1061 }
1062
1063 tcg_gen_shli_i64(b2, b, 32);
1064 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
1065 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
1066
1067 tcg_temp_free_i64(b);
1068 tcg_temp_free_i64(b2);
1069}
1070
f1132835
ND
1071#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1072
1073static void gen_xxspltib(DisasContext *ctx)
1074{
1075 unsigned char uim8 = IMM8(ctx->opcode);
1076 if (xS(ctx->opcode) < 32) {
1077 if (unlikely(!ctx->altivec_enabled)) {
1078 gen_exception(ctx, POWERPC_EXCP_VPU);
1079 return;
1080 }
1081 } else {
1082 if (unlikely(!ctx->vsx_enabled)) {
1083 gen_exception(ctx, POWERPC_EXCP_VSXU);
1084 return;
1085 }
1086 }
1087 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
1088 tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
1089}
1090
3014427a
BH
1091static void gen_xxsldwi(DisasContext *ctx)
1092{
1093 TCGv_i64 xth, xtl;
1094 if (unlikely(!ctx->vsx_enabled)) {
1095 gen_exception(ctx, POWERPC_EXCP_VSXU);
1096 return;
1097 }
1098 xth = tcg_temp_new_i64();
1099 xtl = tcg_temp_new_i64();
1100
1101 switch (SHW(ctx->opcode)) {
1102 case 0: {
1103 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1104 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1105 break;
1106 }
1107 case 1: {
1108 TCGv_i64 t0 = tcg_temp_new_i64();
1109 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1110 tcg_gen_shli_i64(xth, xth, 32);
1111 tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
1112 tcg_gen_shri_i64(t0, t0, 32);
1113 tcg_gen_or_i64(xth, xth, t0);
1114 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1115 tcg_gen_shli_i64(xtl, xtl, 32);
1116 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1117 tcg_gen_shri_i64(t0, t0, 32);
1118 tcg_gen_or_i64(xtl, xtl, t0);
1119 tcg_temp_free_i64(t0);
1120 break;
1121 }
1122 case 2: {
1123 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1124 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1125 break;
1126 }
1127 case 3: {
1128 TCGv_i64 t0 = tcg_temp_new_i64();
1129 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1130 tcg_gen_shli_i64(xth, xth, 32);
1131 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1132 tcg_gen_shri_i64(t0, t0, 32);
1133 tcg_gen_or_i64(xth, xth, t0);
1134 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1135 tcg_gen_shli_i64(xtl, xtl, 32);
1136 tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
1137 tcg_gen_shri_i64(t0, t0, 32);
1138 tcg_gen_or_i64(xtl, xtl, t0);
1139 tcg_temp_free_i64(t0);
1140 break;
1141 }
1142 }
1143
1144 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
1145 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
1146
1147 tcg_temp_free_i64(xth);
1148 tcg_temp_free_i64(xtl);
1149}
1150
1151#undef GEN_XX2FORM
1152#undef GEN_XX3FORM
1153#undef GEN_XX2IFORM
1154#undef GEN_XX3_RC_FORM
1155#undef GEN_XX3FORM_DM
1156#undef VSX_LOGICAL
This page took 0.192437 seconds and 4 git commands to generate.