]> Git Repo - qemu.git/blob - target/ppc/translate/vsx-impl.inc.c
7000035a253280c7ac1d37de248f17fec06d50c7
[qemu.git] / target / ppc / translate / vsx-impl.inc.c
1 /***                           VSX extension                               ***/
2
3 static inline TCGv_i64 cpu_vsrh(int n)
4 {
5     if (n < 32) {
6         return cpu_fpr[n];
7     } else {
8         return cpu_avrh[n-32];
9     }
10 }
11
12 static inline TCGv_i64 cpu_vsrl(int n)
13 {
14     if (n < 32) {
15         return cpu_vsr[n];
16     } else {
17         return cpu_avrl[n-32];
18     }
19 }
20
21 #define VSX_LOAD_SCALAR(name, operation)                      \
22 static void gen_##name(DisasContext *ctx)                     \
23 {                                                             \
24     TCGv EA;                                                  \
25     if (unlikely(!ctx->vsx_enabled)) {                        \
26         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
27         return;                                               \
28     }                                                         \
29     gen_set_access_type(ctx, ACCESS_INT);                     \
30     EA = tcg_temp_new();                                      \
31     gen_addr_reg_index(ctx, EA);                              \
32     gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33     /* NOTE: cpu_vsrl is undefined */                         \
34     tcg_temp_free(EA);                                        \
35 }
36
37 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
38 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
39 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
40 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
41 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
42 VSX_LOAD_SCALAR(lxsspx, ld32fs)
43
44 static void gen_lxvd2x(DisasContext *ctx)
45 {
46     TCGv EA;
47     if (unlikely(!ctx->vsx_enabled)) {
48         gen_exception(ctx, POWERPC_EXCP_VSXU);
49         return;
50     }
51     gen_set_access_type(ctx, ACCESS_INT);
52     EA = tcg_temp_new();
53     gen_addr_reg_index(ctx, EA);
54     gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
55     tcg_gen_addi_tl(EA, EA, 8);
56     gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
57     tcg_temp_free(EA);
58 }
59
60 static void gen_lxvdsx(DisasContext *ctx)
61 {
62     TCGv EA;
63     if (unlikely(!ctx->vsx_enabled)) {
64         gen_exception(ctx, POWERPC_EXCP_VSXU);
65         return;
66     }
67     gen_set_access_type(ctx, ACCESS_INT);
68     EA = tcg_temp_new();
69     gen_addr_reg_index(ctx, EA);
70     gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
71     tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
72     tcg_temp_free(EA);
73 }
74
75 static void gen_lxvw4x(DisasContext *ctx)
76 {
77     TCGv EA;
78     TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
79     TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
80     if (unlikely(!ctx->vsx_enabled)) {
81         gen_exception(ctx, POWERPC_EXCP_VSXU);
82         return;
83     }
84     gen_set_access_type(ctx, ACCESS_INT);
85     EA = tcg_temp_new();
86
87     gen_addr_reg_index(ctx, EA);
88     if (ctx->le_mode) {
89         TCGv_i64 t0 = tcg_temp_new_i64();
90         TCGv_i64 t1 = tcg_temp_new_i64();
91
92         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
93         tcg_gen_shri_i64(t1, t0, 32);
94         tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
95         tcg_gen_addi_tl(EA, EA, 8);
96         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
97         tcg_gen_shri_i64(t1, t0, 32);
98         tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
99         tcg_temp_free_i64(t0);
100         tcg_temp_free_i64(t1);
101     } else {
102         tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
103         tcg_gen_addi_tl(EA, EA, 8);
104         tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
105     }
106     tcg_temp_free(EA);
107 }
108
109 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
110                           TCGv_i64 inh, TCGv_i64 inl)
111 {
112     TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
113     TCGv_i64 t0 = tcg_temp_new_i64();
114     TCGv_i64 t1 = tcg_temp_new_i64();
115
116     /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
117     tcg_gen_and_i64(t0, inh, mask);
118     tcg_gen_shli_i64(t0, t0, 8);
119     tcg_gen_shri_i64(t1, inh, 8);
120     tcg_gen_and_i64(t1, t1, mask);
121     tcg_gen_or_i64(outh, t0, t1);
122
123     /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
124     tcg_gen_and_i64(t0, inl, mask);
125     tcg_gen_shli_i64(t0, t0, 8);
126     tcg_gen_shri_i64(t1, inl, 8);
127     tcg_gen_and_i64(t1, t1, mask);
128     tcg_gen_or_i64(outl, t0, t1);
129
130     tcg_temp_free_i64(t0);
131     tcg_temp_free_i64(t1);
132     tcg_temp_free_i64(mask);
133 }
134
135 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
136                           TCGv_i64 inh, TCGv_i64 inl)
137 {
138     TCGv_i64 hi = tcg_temp_new_i64();
139     TCGv_i64 lo = tcg_temp_new_i64();
140
141     tcg_gen_bswap64_i64(hi, inh);
142     tcg_gen_bswap64_i64(lo, inl);
143     tcg_gen_shri_i64(outh, hi, 32);
144     tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
145     tcg_gen_shri_i64(outl, lo, 32);
146     tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
147
148     tcg_temp_free_i64(hi);
149     tcg_temp_free_i64(lo);
150 }
151 static void gen_lxvh8x(DisasContext *ctx)
152 {
153     TCGv EA;
154     TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
155     TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
156
157     if (unlikely(!ctx->vsx_enabled)) {
158         gen_exception(ctx, POWERPC_EXCP_VSXU);
159         return;
160     }
161     gen_set_access_type(ctx, ACCESS_INT);
162
163     EA = tcg_temp_new();
164     gen_addr_reg_index(ctx, EA);
165     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
166     tcg_gen_addi_tl(EA, EA, 8);
167     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
168     if (ctx->le_mode) {
169         gen_bswap16x8(xth, xtl, xth, xtl);
170     }
171     tcg_temp_free(EA);
172 }
173
174 static void gen_lxvb16x(DisasContext *ctx)
175 {
176     TCGv EA;
177     TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
178     TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
179
180     if (unlikely(!ctx->vsx_enabled)) {
181         gen_exception(ctx, POWERPC_EXCP_VSXU);
182         return;
183     }
184     gen_set_access_type(ctx, ACCESS_INT);
185     EA = tcg_temp_new();
186     gen_addr_reg_index(ctx, EA);
187     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
188     tcg_gen_addi_tl(EA, EA, 8);
189     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
190     tcg_temp_free(EA);
191 }
192
193 #define VSX_VECTOR_LOAD_STORE(name, op, indexed)            \
194 static void gen_##name(DisasContext *ctx)                   \
195 {                                                           \
196     int xt;                                                 \
197     TCGv EA;                                                \
198     TCGv_i64 xth, xtl;                                      \
199                                                             \
200     if (indexed) {                                          \
201         xt = xT(ctx->opcode);                               \
202     } else {                                                \
203         xt = DQxT(ctx->opcode);                             \
204     }                                                       \
205     xth = cpu_vsrh(xt);                                     \
206     xtl = cpu_vsrl(xt);                                     \
207                                                             \
208     if (xt < 32) {                                          \
209         if (unlikely(!ctx->vsx_enabled)) {                  \
210             gen_exception(ctx, POWERPC_EXCP_VSXU);          \
211             return;                                         \
212         }                                                   \
213     } else {                                                \
214         if (unlikely(!ctx->altivec_enabled)) {              \
215             gen_exception(ctx, POWERPC_EXCP_VPU);           \
216             return;                                         \
217         }                                                   \
218     }                                                       \
219     gen_set_access_type(ctx, ACCESS_INT);                   \
220     EA = tcg_temp_new();                                    \
221     if (indexed) {                                          \
222         gen_addr_reg_index(ctx, EA);                        \
223     } else {                                                \
224         gen_addr_imm_index(ctx, EA, 0x0F);                  \
225     }                                                       \
226     if (ctx->le_mode) {                                     \
227         tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ);   \
228         tcg_gen_addi_tl(EA, EA, 8);                         \
229         tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ);   \
230     } else {                                                \
231         tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ);   \
232         tcg_gen_addi_tl(EA, EA, 8);                         \
233         tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ);   \
234     }                                                       \
235     tcg_temp_free(EA);                                      \
236 }
237
238 VSX_VECTOR_LOAD_STORE(lxv, ld_i64, 0)
239 VSX_VECTOR_LOAD_STORE(stxv, st_i64, 0)
240 VSX_VECTOR_LOAD_STORE(lxvx, ld_i64, 1)
241 VSX_VECTOR_LOAD_STORE(stxvx, st_i64, 1)
242
243 #define VSX_LOAD_SCALAR_DS(name, operation)                       \
244 static void gen_##name(DisasContext *ctx)                         \
245 {                                                                 \
246     TCGv EA;                                                      \
247     TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32);                \
248                                                                   \
249     if (unlikely(!ctx->altivec_enabled)) {                        \
250         gen_exception(ctx, POWERPC_EXCP_VPU);                     \
251         return;                                                   \
252     }                                                             \
253     gen_set_access_type(ctx, ACCESS_INT);                         \
254     EA = tcg_temp_new();                                          \
255     gen_addr_imm_index(ctx, EA, 0x03);                            \
256     gen_qemu_##operation(ctx, xth, EA);                           \
257     /* NOTE: cpu_vsrl is undefined */                             \
258     tcg_temp_free(EA);                                            \
259 }
260
261 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
262 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
263
264 #define VSX_STORE_SCALAR(name, operation)                     \
265 static void gen_##name(DisasContext *ctx)                     \
266 {                                                             \
267     TCGv EA;                                                  \
268     if (unlikely(!ctx->vsx_enabled)) {                        \
269         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
270         return;                                               \
271     }                                                         \
272     gen_set_access_type(ctx, ACCESS_INT);                     \
273     EA = tcg_temp_new();                                      \
274     gen_addr_reg_index(ctx, EA);                              \
275     gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
276     tcg_temp_free(EA);                                        \
277 }
278
279 VSX_STORE_SCALAR(stxsdx, st64_i64)
280
281 VSX_STORE_SCALAR(stxsibx, st8_i64)
282 VSX_STORE_SCALAR(stxsihx, st16_i64)
283 VSX_STORE_SCALAR(stxsiwx, st32_i64)
284 VSX_STORE_SCALAR(stxsspx, st32fs)
285
286 static void gen_stxvd2x(DisasContext *ctx)
287 {
288     TCGv EA;
289     if (unlikely(!ctx->vsx_enabled)) {
290         gen_exception(ctx, POWERPC_EXCP_VSXU);
291         return;
292     }
293     gen_set_access_type(ctx, ACCESS_INT);
294     EA = tcg_temp_new();
295     gen_addr_reg_index(ctx, EA);
296     gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
297     tcg_gen_addi_tl(EA, EA, 8);
298     gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
299     tcg_temp_free(EA);
300 }
301
302 static void gen_stxvw4x(DisasContext *ctx)
303 {
304     TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
305     TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
306     TCGv EA;
307     if (unlikely(!ctx->vsx_enabled)) {
308         gen_exception(ctx, POWERPC_EXCP_VSXU);
309         return;
310     }
311     gen_set_access_type(ctx, ACCESS_INT);
312     EA = tcg_temp_new();
313     gen_addr_reg_index(ctx, EA);
314     if (ctx->le_mode) {
315         TCGv_i64 t0 = tcg_temp_new_i64();
316         TCGv_i64 t1 = tcg_temp_new_i64();
317
318         tcg_gen_shri_i64(t0, xsh, 32);
319         tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
320         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
321         tcg_gen_addi_tl(EA, EA, 8);
322         tcg_gen_shri_i64(t0, xsl, 32);
323         tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
324         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
325         tcg_temp_free_i64(t0);
326         tcg_temp_free_i64(t1);
327     } else {
328         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
329         tcg_gen_addi_tl(EA, EA, 8);
330         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
331     }
332     tcg_temp_free(EA);
333 }
334
335 static void gen_stxvh8x(DisasContext *ctx)
336 {
337     TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
338     TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
339     TCGv EA;
340
341     if (unlikely(!ctx->vsx_enabled)) {
342         gen_exception(ctx, POWERPC_EXCP_VSXU);
343         return;
344     }
345     gen_set_access_type(ctx, ACCESS_INT);
346     EA = tcg_temp_new();
347     gen_addr_reg_index(ctx, EA);
348     if (ctx->le_mode) {
349         TCGv_i64 outh = tcg_temp_new_i64();
350         TCGv_i64 outl = tcg_temp_new_i64();
351
352         gen_bswap16x8(outh, outl, xsh, xsl);
353         tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
354         tcg_gen_addi_tl(EA, EA, 8);
355         tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
356         tcg_temp_free_i64(outh);
357         tcg_temp_free_i64(outl);
358     } else {
359         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
360         tcg_gen_addi_tl(EA, EA, 8);
361         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
362     }
363     tcg_temp_free(EA);
364 }
365
366 static void gen_stxvb16x(DisasContext *ctx)
367 {
368     TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
369     TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
370     TCGv EA;
371
372     if (unlikely(!ctx->vsx_enabled)) {
373         gen_exception(ctx, POWERPC_EXCP_VSXU);
374         return;
375     }
376     gen_set_access_type(ctx, ACCESS_INT);
377     EA = tcg_temp_new();
378     gen_addr_reg_index(ctx, EA);
379     tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
380     tcg_gen_addi_tl(EA, EA, 8);
381     tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
382     tcg_temp_free(EA);
383 }
384
385 #define VSX_STORE_SCALAR_DS(name, operation)                      \
386 static void gen_##name(DisasContext *ctx)                         \
387 {                                                                 \
388     TCGv EA;                                                      \
389     TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32);                \
390                                                                   \
391     if (unlikely(!ctx->altivec_enabled)) {                        \
392         gen_exception(ctx, POWERPC_EXCP_VPU);                     \
393         return;                                                   \
394     }                                                             \
395     gen_set_access_type(ctx, ACCESS_INT);                         \
396     EA = tcg_temp_new();                                          \
397     gen_addr_imm_index(ctx, EA, 0x03);                            \
398     gen_qemu_##operation(ctx, xth, EA);                           \
399     /* NOTE: cpu_vsrl is undefined */                             \
400     tcg_temp_free(EA);                                            \
401 }
402
403 VSX_LOAD_SCALAR_DS(stxsd, st64_i64)
404 VSX_LOAD_SCALAR_DS(stxssp, st32fs)
405
406 #define MV_VSRW(name, tcgop1, tcgop2, target, source)           \
407 static void gen_##name(DisasContext *ctx)                       \
408 {                                                               \
409     if (xS(ctx->opcode) < 32) {                                 \
410         if (unlikely(!ctx->fpu_enabled)) {                      \
411             gen_exception(ctx, POWERPC_EXCP_FPU);               \
412             return;                                             \
413         }                                                       \
414     } else {                                                    \
415         if (unlikely(!ctx->altivec_enabled)) {                  \
416             gen_exception(ctx, POWERPC_EXCP_VPU);               \
417             return;                                             \
418         }                                                       \
419     }                                                           \
420     TCGv_i64 tmp = tcg_temp_new_i64();                          \
421     tcg_gen_##tcgop1(tmp, source);                              \
422     tcg_gen_##tcgop2(target, tmp);                              \
423     tcg_temp_free_i64(tmp);                                     \
424 }
425
426
427 MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
428         cpu_vsrh(xS(ctx->opcode)))
429 MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
430         cpu_gpr[rA(ctx->opcode)])
431 MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
432         cpu_gpr[rA(ctx->opcode)])
433
434 #if defined(TARGET_PPC64)
435 #define MV_VSRD(name, target, source)                           \
436 static void gen_##name(DisasContext *ctx)                       \
437 {                                                               \
438     if (xS(ctx->opcode) < 32) {                                 \
439         if (unlikely(!ctx->fpu_enabled)) {                      \
440             gen_exception(ctx, POWERPC_EXCP_FPU);               \
441             return;                                             \
442         }                                                       \
443     } else {                                                    \
444         if (unlikely(!ctx->altivec_enabled)) {                  \
445             gen_exception(ctx, POWERPC_EXCP_VPU);               \
446             return;                                             \
447         }                                                       \
448     }                                                           \
449     tcg_gen_mov_i64(target, source);                            \
450 }
451
452 MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
453 MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
454
455 static void gen_mfvsrld(DisasContext *ctx)
456 {
457     if (xS(ctx->opcode) < 32) {
458         if (unlikely(!ctx->vsx_enabled)) {
459             gen_exception(ctx, POWERPC_EXCP_VSXU);
460             return;
461         }
462     } else {
463         if (unlikely(!ctx->altivec_enabled)) {
464             gen_exception(ctx, POWERPC_EXCP_VPU);
465             return;
466         }
467     }
468
469     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
470 }
471
472 static void gen_mtvsrdd(DisasContext *ctx)
473 {
474     if (xT(ctx->opcode) < 32) {
475         if (unlikely(!ctx->vsx_enabled)) {
476             gen_exception(ctx, POWERPC_EXCP_VSXU);
477             return;
478         }
479     } else {
480         if (unlikely(!ctx->altivec_enabled)) {
481             gen_exception(ctx, POWERPC_EXCP_VPU);
482             return;
483         }
484     }
485
486     if (!rA(ctx->opcode)) {
487         tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0);
488     } else {
489         tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]);
490     }
491
492     tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rB(ctx->opcode)]);
493 }
494
495 static void gen_mtvsrws(DisasContext *ctx)
496 {
497     if (xT(ctx->opcode) < 32) {
498         if (unlikely(!ctx->vsx_enabled)) {
499             gen_exception(ctx, POWERPC_EXCP_VSXU);
500             return;
501         }
502     } else {
503         if (unlikely(!ctx->altivec_enabled)) {
504             gen_exception(ctx, POWERPC_EXCP_VPU);
505             return;
506         }
507     }
508
509     tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)],
510                         cpu_gpr[rA(ctx->opcode)], 32, 32);
511     tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xT(ctx->opcode)));
512 }
513
514 #endif
515
516 static void gen_xxpermdi(DisasContext *ctx)
517 {
518     if (unlikely(!ctx->vsx_enabled)) {
519         gen_exception(ctx, POWERPC_EXCP_VSXU);
520         return;
521     }
522
523     if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
524                  (xT(ctx->opcode) == xB(ctx->opcode)))) {
525         TCGv_i64 xh, xl;
526
527         xh = tcg_temp_new_i64();
528         xl = tcg_temp_new_i64();
529
530         if ((DM(ctx->opcode) & 2) == 0) {
531             tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
532         } else {
533             tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
534         }
535         if ((DM(ctx->opcode) & 1) == 0) {
536             tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
537         } else {
538             tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
539         }
540
541         tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
542         tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
543
544         tcg_temp_free_i64(xh);
545         tcg_temp_free_i64(xl);
546     } else {
547         if ((DM(ctx->opcode) & 2) == 0) {
548             tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
549         } else {
550             tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
551         }
552         if ((DM(ctx->opcode) & 1) == 0) {
553             tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
554         } else {
555             tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
556         }
557     }
558 }
559
560 #define OP_ABS 1
561 #define OP_NABS 2
562 #define OP_NEG 3
563 #define OP_CPSGN 4
564 #define SGN_MASK_DP  0x8000000000000000ull
565 #define SGN_MASK_SP 0x8000000080000000ull
566
567 #define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
568 static void glue(gen_, name)(DisasContext * ctx)                  \
569     {                                                             \
570         TCGv_i64 xb, sgm;                                         \
571         if (unlikely(!ctx->vsx_enabled)) {                        \
572             gen_exception(ctx, POWERPC_EXCP_VSXU);                \
573             return;                                               \
574         }                                                         \
575         xb = tcg_temp_new_i64();                                  \
576         sgm = tcg_temp_new_i64();                                 \
577         tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode)));           \
578         tcg_gen_movi_i64(sgm, sgn_mask);                          \
579         switch (op) {                                             \
580             case OP_ABS: {                                        \
581                 tcg_gen_andc_i64(xb, xb, sgm);                    \
582                 break;                                            \
583             }                                                     \
584             case OP_NABS: {                                       \
585                 tcg_gen_or_i64(xb, xb, sgm);                      \
586                 break;                                            \
587             }                                                     \
588             case OP_NEG: {                                        \
589                 tcg_gen_xor_i64(xb, xb, sgm);                     \
590                 break;                                            \
591             }                                                     \
592             case OP_CPSGN: {                                      \
593                 TCGv_i64 xa = tcg_temp_new_i64();                 \
594                 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode)));   \
595                 tcg_gen_and_i64(xa, xa, sgm);                     \
596                 tcg_gen_andc_i64(xb, xb, sgm);                    \
597                 tcg_gen_or_i64(xb, xb, xa);                       \
598                 tcg_temp_free_i64(xa);                            \
599                 break;                                            \
600             }                                                     \
601         }                                                         \
602         tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb);           \
603         tcg_temp_free_i64(xb);                                    \
604         tcg_temp_free_i64(sgm);                                   \
605     }
606
607 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
608 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
609 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
610 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
611
612 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
613 static void glue(gen_, name)(DisasContext *ctx)                   \
614 {                                                                 \
615     int xa;                                                       \
616     int xt = rD(ctx->opcode) + 32;                                \
617     int xb = rB(ctx->opcode) + 32;                                \
618     TCGv_i64 xah, xbh, xbl, sgm;                                  \
619                                                                   \
620     if (unlikely(!ctx->vsx_enabled)) {                            \
621         gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
622         return;                                                   \
623     }                                                             \
624     xbh = tcg_temp_new_i64();                                     \
625     xbl = tcg_temp_new_i64();                                     \
626     sgm = tcg_temp_new_i64();                                     \
627     tcg_gen_mov_i64(xbh, cpu_vsrh(xb));                           \
628     tcg_gen_mov_i64(xbl, cpu_vsrl(xb));                           \
629     tcg_gen_movi_i64(sgm, sgn_mask);                              \
630     switch (op) {                                                 \
631     case OP_ABS:                                                  \
632         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
633         break;                                                    \
634     case OP_NABS:                                                 \
635         tcg_gen_or_i64(xbh, xbh, sgm);                            \
636         break;                                                    \
637     case OP_NEG:                                                  \
638         tcg_gen_xor_i64(xbh, xbh, sgm);                           \
639         break;                                                    \
640     case OP_CPSGN:                                                \
641         xah = tcg_temp_new_i64();                                 \
642         xa = rA(ctx->opcode) + 32;                                \
643         tcg_gen_and_i64(xah, cpu_vsrh(xa), sgm);                  \
644         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
645         tcg_gen_or_i64(xbh, xbh, xah);                            \
646         tcg_temp_free_i64(xah);                                   \
647         break;                                                    \
648     }                                                             \
649     tcg_gen_mov_i64(cpu_vsrh(xt), xbh);                           \
650     tcg_gen_mov_i64(cpu_vsrl(xt), xbl);                           \
651     tcg_temp_free_i64(xbl);                                       \
652     tcg_temp_free_i64(xbh);                                       \
653     tcg_temp_free_i64(sgm);                                       \
654 }
655
656 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
657 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
658 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
659 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
660
661 #define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
662 static void glue(gen_, name)(DisasContext * ctx)                 \
663     {                                                            \
664         TCGv_i64 xbh, xbl, sgm;                                  \
665         if (unlikely(!ctx->vsx_enabled)) {                       \
666             gen_exception(ctx, POWERPC_EXCP_VSXU);               \
667             return;                                              \
668         }                                                        \
669         xbh = tcg_temp_new_i64();                                \
670         xbl = tcg_temp_new_i64();                                \
671         sgm = tcg_temp_new_i64();                                \
672         tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode)));         \
673         tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode)));         \
674         tcg_gen_movi_i64(sgm, sgn_mask);                         \
675         switch (op) {                                            \
676             case OP_ABS: {                                       \
677                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
678                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
679                 break;                                           \
680             }                                                    \
681             case OP_NABS: {                                      \
682                 tcg_gen_or_i64(xbh, xbh, sgm);                   \
683                 tcg_gen_or_i64(xbl, xbl, sgm);                   \
684                 break;                                           \
685             }                                                    \
686             case OP_NEG: {                                       \
687                 tcg_gen_xor_i64(xbh, xbh, sgm);                  \
688                 tcg_gen_xor_i64(xbl, xbl, sgm);                  \
689                 break;                                           \
690             }                                                    \
691             case OP_CPSGN: {                                     \
692                 TCGv_i64 xah = tcg_temp_new_i64();               \
693                 TCGv_i64 xal = tcg_temp_new_i64();               \
694                 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
695                 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
696                 tcg_gen_and_i64(xah, xah, sgm);                  \
697                 tcg_gen_and_i64(xal, xal, sgm);                  \
698                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
699                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
700                 tcg_gen_or_i64(xbh, xbh, xah);                   \
701                 tcg_gen_or_i64(xbl, xbl, xal);                   \
702                 tcg_temp_free_i64(xah);                          \
703                 tcg_temp_free_i64(xal);                          \
704                 break;                                           \
705             }                                                    \
706         }                                                        \
707         tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh);         \
708         tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl);         \
709         tcg_temp_free_i64(xbh);                                  \
710         tcg_temp_free_i64(xbl);                                  \
711         tcg_temp_free_i64(sgm);                                  \
712     }
713
714 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
715 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
716 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
717 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
718 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
719 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
720 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
721 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
722
723 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
724 static void gen_##name(DisasContext * ctx)                                    \
725 {                                                                             \
726     TCGv_i32 opc;                                                             \
727     if (unlikely(!ctx->vsx_enabled)) {                                        \
728         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
729         return;                                                               \
730     }                                                                         \
731     opc = tcg_const_i32(ctx->opcode);                                         \
732     gen_helper_##name(cpu_env, opc);                                          \
733     tcg_temp_free_i32(opc);                                                   \
734 }
735
736 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
737 static void gen_##name(DisasContext * ctx)                    \
738 {                                                             \
739     if (unlikely(!ctx->vsx_enabled)) {                        \
740         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
741         return;                                               \
742     }                                                         \
743     gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env,     \
744                       cpu_vsrh(xB(ctx->opcode)));             \
745 }
746
747 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
748 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
749 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
750 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
751 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
752 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
753 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
754 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
755 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
756 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
757 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
758 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
759 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
760 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
761 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
762 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
763 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
764 GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
765 GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
766 GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
767 GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
768 GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
769 GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
770 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
771 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
772 GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
773 GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
774 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
775 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
776 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
777 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
778 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
779 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
780 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
781 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
782 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
783 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
784 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
785 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
786 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
787 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
788 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
789 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
790 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
791 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
792
793 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
794 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
795 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
796 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
797 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
798 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
799 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
800 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
801 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
802 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
803 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
804 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
805 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
806 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
807 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
808 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
809 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
810
811 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
812 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
813 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
814 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
815 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
816 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
817 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
818 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
819 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
820 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
821 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
822 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
823 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
824 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
825 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
826 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
827 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
828 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
829 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
830 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
831 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
832 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
833 GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
834 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
835 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
836 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
837 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
838 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
839 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
840 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
841 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
842 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
843 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
844 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
845 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
846 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
847 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
848
849 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
850 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
851 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
852 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
853 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
854 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
855 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
856 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
857 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
858 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
859 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
860 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
861 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
862 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
863 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
864 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
865 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
866 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
867 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
868 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
869 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
870 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
871 GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
872 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
873 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
874 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
875 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
876 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
877 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
878 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
879 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
880 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
881 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
882 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
883 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
884 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
885 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
886 GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
887 GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
888
889 static void gen_xxbrd(DisasContext *ctx)
890 {
891     TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
892     TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
893     TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
894     TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
895
896     if (unlikely(!ctx->vsx_enabled)) {
897         gen_exception(ctx, POWERPC_EXCP_VSXU);
898         return;
899     }
900     tcg_gen_bswap64_i64(xth, xbh);
901     tcg_gen_bswap64_i64(xtl, xbl);
902 }
903
904 static void gen_xxbrh(DisasContext *ctx)
905 {
906     TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
907     TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
908     TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
909     TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
910
911     if (unlikely(!ctx->vsx_enabled)) {
912         gen_exception(ctx, POWERPC_EXCP_VSXU);
913         return;
914     }
915     gen_bswap16x8(xth, xtl, xbh, xbl);
916 }
917
918 static void gen_xxbrq(DisasContext *ctx)
919 {
920     TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
921     TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
922     TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
923     TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
924     TCGv_i64 t0 = tcg_temp_new_i64();
925
926     if (unlikely(!ctx->vsx_enabled)) {
927         gen_exception(ctx, POWERPC_EXCP_VSXU);
928         return;
929     }
930     tcg_gen_bswap64_i64(t0, xbl);
931     tcg_gen_bswap64_i64(xtl, xbh);
932     tcg_gen_mov_i64(xth, t0);
933     tcg_temp_free_i64(t0);
934 }
935
936 static void gen_xxbrw(DisasContext *ctx)
937 {
938     TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
939     TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
940     TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
941     TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
942
943     if (unlikely(!ctx->vsx_enabled)) {
944         gen_exception(ctx, POWERPC_EXCP_VSXU);
945         return;
946     }
947     gen_bswap32x4(xth, xtl, xbh, xbl);
948 }
949
950 #define VSX_LOGICAL(name, tcg_op)                                    \
951 static void glue(gen_, name)(DisasContext * ctx)                     \
952     {                                                                \
953         if (unlikely(!ctx->vsx_enabled)) {                           \
954             gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
955             return;                                                  \
956         }                                                            \
957         tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
958             cpu_vsrh(xB(ctx->opcode)));                              \
959         tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
960             cpu_vsrl(xB(ctx->opcode)));                              \
961     }
962
963 VSX_LOGICAL(xxland, tcg_gen_and_i64)
964 VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
965 VSX_LOGICAL(xxlor, tcg_gen_or_i64)
966 VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
967 VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
968 VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
969 VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
970 VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
971
972 #define VSX_XXMRG(name, high)                               \
973 static void glue(gen_, name)(DisasContext * ctx)            \
974     {                                                       \
975         TCGv_i64 a0, a1, b0, b1;                            \
976         if (unlikely(!ctx->vsx_enabled)) {                  \
977             gen_exception(ctx, POWERPC_EXCP_VSXU);          \
978             return;                                         \
979         }                                                   \
980         a0 = tcg_temp_new_i64();                            \
981         a1 = tcg_temp_new_i64();                            \
982         b0 = tcg_temp_new_i64();                            \
983         b1 = tcg_temp_new_i64();                            \
984         if (high) {                                         \
985             tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
986             tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
987             tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
988             tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
989         } else {                                            \
990             tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
991             tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
992             tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
993             tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
994         }                                                   \
995         tcg_gen_shri_i64(a0, a0, 32);                       \
996         tcg_gen_shri_i64(b0, b0, 32);                       \
997         tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)),      \
998                             b0, a0, 32, 32);                \
999         tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)),      \
1000                             b1, a1, 32, 32);                \
1001         tcg_temp_free_i64(a0);                              \
1002         tcg_temp_free_i64(a1);                              \
1003         tcg_temp_free_i64(b0);                              \
1004         tcg_temp_free_i64(b1);                              \
1005     }
1006
1007 VSX_XXMRG(xxmrghw, 1)
1008 VSX_XXMRG(xxmrglw, 0)
1009
1010 static void gen_xxsel(DisasContext * ctx)
1011 {
1012     TCGv_i64 a, b, c;
1013     if (unlikely(!ctx->vsx_enabled)) {
1014         gen_exception(ctx, POWERPC_EXCP_VSXU);
1015         return;
1016     }
1017     a = tcg_temp_new_i64();
1018     b = tcg_temp_new_i64();
1019     c = tcg_temp_new_i64();
1020
1021     tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
1022     tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
1023     tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
1024
1025     tcg_gen_and_i64(b, b, c);
1026     tcg_gen_andc_i64(a, a, c);
1027     tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
1028
1029     tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
1030     tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
1031     tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
1032
1033     tcg_gen_and_i64(b, b, c);
1034     tcg_gen_andc_i64(a, a, c);
1035     tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
1036
1037     tcg_temp_free_i64(a);
1038     tcg_temp_free_i64(b);
1039     tcg_temp_free_i64(c);
1040 }
1041
1042 static void gen_xxspltw(DisasContext *ctx)
1043 {
1044     TCGv_i64 b, b2;
1045     TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
1046                    cpu_vsrl(xB(ctx->opcode)) :
1047                    cpu_vsrh(xB(ctx->opcode));
1048
1049     if (unlikely(!ctx->vsx_enabled)) {
1050         gen_exception(ctx, POWERPC_EXCP_VSXU);
1051         return;
1052     }
1053
1054     b = tcg_temp_new_i64();
1055     b2 = tcg_temp_new_i64();
1056
1057     if (UIM(ctx->opcode) & 1) {
1058         tcg_gen_ext32u_i64(b, vsr);
1059     } else {
1060         tcg_gen_shri_i64(b, vsr, 32);
1061     }
1062
1063     tcg_gen_shli_i64(b2, b, 32);
1064     tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
1065     tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
1066
1067     tcg_temp_free_i64(b);
1068     tcg_temp_free_i64(b2);
1069 }
1070
1071 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1072
1073 static void gen_xxspltib(DisasContext *ctx)
1074 {
1075     unsigned char uim8 = IMM8(ctx->opcode);
1076     if (xS(ctx->opcode) < 32) {
1077         if (unlikely(!ctx->altivec_enabled)) {
1078             gen_exception(ctx, POWERPC_EXCP_VPU);
1079             return;
1080         }
1081     } else {
1082         if (unlikely(!ctx->vsx_enabled)) {
1083             gen_exception(ctx, POWERPC_EXCP_VSXU);
1084             return;
1085         }
1086     }
1087     tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
1088     tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
1089 }
1090
1091 static void gen_xxsldwi(DisasContext *ctx)
1092 {
1093     TCGv_i64 xth, xtl;
1094     if (unlikely(!ctx->vsx_enabled)) {
1095         gen_exception(ctx, POWERPC_EXCP_VSXU);
1096         return;
1097     }
1098     xth = tcg_temp_new_i64();
1099     xtl = tcg_temp_new_i64();
1100
1101     switch (SHW(ctx->opcode)) {
1102         case 0: {
1103             tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1104             tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1105             break;
1106         }
1107         case 1: {
1108             TCGv_i64 t0 = tcg_temp_new_i64();
1109             tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1110             tcg_gen_shli_i64(xth, xth, 32);
1111             tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
1112             tcg_gen_shri_i64(t0, t0, 32);
1113             tcg_gen_or_i64(xth, xth, t0);
1114             tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1115             tcg_gen_shli_i64(xtl, xtl, 32);
1116             tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1117             tcg_gen_shri_i64(t0, t0, 32);
1118             tcg_gen_or_i64(xtl, xtl, t0);
1119             tcg_temp_free_i64(t0);
1120             break;
1121         }
1122         case 2: {
1123             tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1124             tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1125             break;
1126         }
1127         case 3: {
1128             TCGv_i64 t0 = tcg_temp_new_i64();
1129             tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1130             tcg_gen_shli_i64(xth, xth, 32);
1131             tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1132             tcg_gen_shri_i64(t0, t0, 32);
1133             tcg_gen_or_i64(xth, xth, t0);
1134             tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1135             tcg_gen_shli_i64(xtl, xtl, 32);
1136             tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
1137             tcg_gen_shri_i64(t0, t0, 32);
1138             tcg_gen_or_i64(xtl, xtl, t0);
1139             tcg_temp_free_i64(t0);
1140             break;
1141         }
1142     }
1143
1144     tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
1145     tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
1146
1147     tcg_temp_free_i64(xth);
1148     tcg_temp_free_i64(xtl);
1149 }
1150
1151 #undef GEN_XX2FORM
1152 #undef GEN_XX3FORM
1153 #undef GEN_XX2IFORM
1154 #undef GEN_XX3_RC_FORM
1155 #undef GEN_XX3FORM_DM
1156 #undef VSX_LOGICAL
This page took 0.081862 seconds and 2 git commands to generate.