]> Git Repo - qemu.git/blob - target-ppc/translate/vsx-impl.inc.c
target-ppc: Implement mfvsrld instruction
[qemu.git] / target-ppc / translate / vsx-impl.inc.c
1 /***                           VSX extension                               ***/
2
3 static inline TCGv_i64 cpu_vsrh(int n)
4 {
5     if (n < 32) {
6         return cpu_fpr[n];
7     } else {
8         return cpu_avrh[n-32];
9     }
10 }
11
12 static inline TCGv_i64 cpu_vsrl(int n)
13 {
14     if (n < 32) {
15         return cpu_vsr[n];
16     } else {
17         return cpu_avrl[n-32];
18     }
19 }
20
21 #define VSX_LOAD_SCALAR(name, operation)                      \
22 static void gen_##name(DisasContext *ctx)                     \
23 {                                                             \
24     TCGv EA;                                                  \
25     if (unlikely(!ctx->vsx_enabled)) {                        \
26         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
27         return;                                               \
28     }                                                         \
29     gen_set_access_type(ctx, ACCESS_INT);                     \
30     EA = tcg_temp_new();                                      \
31     gen_addr_reg_index(ctx, EA);                              \
32     gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33     /* NOTE: cpu_vsrl is undefined */                         \
34     tcg_temp_free(EA);                                        \
35 }
36
37 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
38 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
39 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
40 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
41 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
42 VSX_LOAD_SCALAR(lxsspx, ld32fs)
43
44 static void gen_lxvd2x(DisasContext *ctx)
45 {
46     TCGv EA;
47     if (unlikely(!ctx->vsx_enabled)) {
48         gen_exception(ctx, POWERPC_EXCP_VSXU);
49         return;
50     }
51     gen_set_access_type(ctx, ACCESS_INT);
52     EA = tcg_temp_new();
53     gen_addr_reg_index(ctx, EA);
54     gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
55     tcg_gen_addi_tl(EA, EA, 8);
56     gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
57     tcg_temp_free(EA);
58 }
59
60 static void gen_lxvdsx(DisasContext *ctx)
61 {
62     TCGv EA;
63     if (unlikely(!ctx->vsx_enabled)) {
64         gen_exception(ctx, POWERPC_EXCP_VSXU);
65         return;
66     }
67     gen_set_access_type(ctx, ACCESS_INT);
68     EA = tcg_temp_new();
69     gen_addr_reg_index(ctx, EA);
70     gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
71     tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
72     tcg_temp_free(EA);
73 }
74
75 static void gen_lxvw4x(DisasContext *ctx)
76 {
77     TCGv EA;
78     TCGv_i64 tmp;
79     TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
80     TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
81     if (unlikely(!ctx->vsx_enabled)) {
82         gen_exception(ctx, POWERPC_EXCP_VSXU);
83         return;
84     }
85     gen_set_access_type(ctx, ACCESS_INT);
86     EA = tcg_temp_new();
87     tmp = tcg_temp_new_i64();
88
89     gen_addr_reg_index(ctx, EA);
90     gen_qemu_ld32u_i64(ctx, tmp, EA);
91     tcg_gen_addi_tl(EA, EA, 4);
92     gen_qemu_ld32u_i64(ctx, xth, EA);
93     tcg_gen_deposit_i64(xth, xth, tmp, 32, 32);
94
95     tcg_gen_addi_tl(EA, EA, 4);
96     gen_qemu_ld32u_i64(ctx, tmp, EA);
97     tcg_gen_addi_tl(EA, EA, 4);
98     gen_qemu_ld32u_i64(ctx, xtl, EA);
99     tcg_gen_deposit_i64(xtl, xtl, tmp, 32, 32);
100
101     tcg_temp_free(EA);
102     tcg_temp_free_i64(tmp);
103 }
104
105 #define VSX_STORE_SCALAR(name, operation)                     \
106 static void gen_##name(DisasContext *ctx)                     \
107 {                                                             \
108     TCGv EA;                                                  \
109     if (unlikely(!ctx->vsx_enabled)) {                        \
110         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
111         return;                                               \
112     }                                                         \
113     gen_set_access_type(ctx, ACCESS_INT);                     \
114     EA = tcg_temp_new();                                      \
115     gen_addr_reg_index(ctx, EA);                              \
116     gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
117     tcg_temp_free(EA);                                        \
118 }
119
120 VSX_STORE_SCALAR(stxsdx, st64_i64)
121
122 VSX_STORE_SCALAR(stxsibx, st8_i64)
123 VSX_STORE_SCALAR(stxsihx, st16_i64)
124 VSX_STORE_SCALAR(stxsiwx, st32_i64)
125 VSX_STORE_SCALAR(stxsspx, st32fs)
126
127 static void gen_stxvd2x(DisasContext *ctx)
128 {
129     TCGv EA;
130     if (unlikely(!ctx->vsx_enabled)) {
131         gen_exception(ctx, POWERPC_EXCP_VSXU);
132         return;
133     }
134     gen_set_access_type(ctx, ACCESS_INT);
135     EA = tcg_temp_new();
136     gen_addr_reg_index(ctx, EA);
137     gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
138     tcg_gen_addi_tl(EA, EA, 8);
139     gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
140     tcg_temp_free(EA);
141 }
142
143 static void gen_stxvw4x(DisasContext *ctx)
144 {
145     TCGv_i64 tmp;
146     TCGv EA;
147     if (unlikely(!ctx->vsx_enabled)) {
148         gen_exception(ctx, POWERPC_EXCP_VSXU);
149         return;
150     }
151     gen_set_access_type(ctx, ACCESS_INT);
152     EA = tcg_temp_new();
153     gen_addr_reg_index(ctx, EA);
154     tmp = tcg_temp_new_i64();
155
156     tcg_gen_shri_i64(tmp, cpu_vsrh(xS(ctx->opcode)), 32);
157     gen_qemu_st32_i64(ctx, tmp, EA);
158     tcg_gen_addi_tl(EA, EA, 4);
159     gen_qemu_st32_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
160
161     tcg_gen_shri_i64(tmp, cpu_vsrl(xS(ctx->opcode)), 32);
162     tcg_gen_addi_tl(EA, EA, 4);
163     gen_qemu_st32_i64(ctx, tmp, EA);
164     tcg_gen_addi_tl(EA, EA, 4);
165     gen_qemu_st32_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
166
167     tcg_temp_free(EA);
168     tcg_temp_free_i64(tmp);
169 }
170
171 #define MV_VSRW(name, tcgop1, tcgop2, target, source)           \
172 static void gen_##name(DisasContext *ctx)                       \
173 {                                                               \
174     if (xS(ctx->opcode) < 32) {                                 \
175         if (unlikely(!ctx->fpu_enabled)) {                      \
176             gen_exception(ctx, POWERPC_EXCP_FPU);               \
177             return;                                             \
178         }                                                       \
179     } else {                                                    \
180         if (unlikely(!ctx->altivec_enabled)) {                  \
181             gen_exception(ctx, POWERPC_EXCP_VPU);               \
182             return;                                             \
183         }                                                       \
184     }                                                           \
185     TCGv_i64 tmp = tcg_temp_new_i64();                          \
186     tcg_gen_##tcgop1(tmp, source);                              \
187     tcg_gen_##tcgop2(target, tmp);                              \
188     tcg_temp_free_i64(tmp);                                     \
189 }
190
191
192 MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
193         cpu_vsrh(xS(ctx->opcode)))
194 MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
195         cpu_gpr[rA(ctx->opcode)])
196 MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
197         cpu_gpr[rA(ctx->opcode)])
198
199 #if defined(TARGET_PPC64)
200 #define MV_VSRD(name, target, source)                           \
201 static void gen_##name(DisasContext *ctx)                       \
202 {                                                               \
203     if (xS(ctx->opcode) < 32) {                                 \
204         if (unlikely(!ctx->fpu_enabled)) {                      \
205             gen_exception(ctx, POWERPC_EXCP_FPU);               \
206             return;                                             \
207         }                                                       \
208     } else {                                                    \
209         if (unlikely(!ctx->altivec_enabled)) {                  \
210             gen_exception(ctx, POWERPC_EXCP_VPU);               \
211             return;                                             \
212         }                                                       \
213     }                                                           \
214     tcg_gen_mov_i64(target, source);                            \
215 }
216
217 MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
218 MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
219
220 static void gen_mfvsrld(DisasContext *ctx)
221 {
222     if (xS(ctx->opcode) < 32) {
223         if (unlikely(!ctx->vsx_enabled)) {
224             gen_exception(ctx, POWERPC_EXCP_VSXU);
225             return;
226         }
227     } else {
228         if (unlikely(!ctx->altivec_enabled)) {
229             gen_exception(ctx, POWERPC_EXCP_VPU);
230             return;
231         }
232     }
233
234     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
235 }
236
237 #endif
238
239 static void gen_xxpermdi(DisasContext *ctx)
240 {
241     if (unlikely(!ctx->vsx_enabled)) {
242         gen_exception(ctx, POWERPC_EXCP_VSXU);
243         return;
244     }
245
246     if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
247                  (xT(ctx->opcode) == xB(ctx->opcode)))) {
248         TCGv_i64 xh, xl;
249
250         xh = tcg_temp_new_i64();
251         xl = tcg_temp_new_i64();
252
253         if ((DM(ctx->opcode) & 2) == 0) {
254             tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
255         } else {
256             tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
257         }
258         if ((DM(ctx->opcode) & 1) == 0) {
259             tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
260         } else {
261             tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
262         }
263
264         tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
265         tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
266
267         tcg_temp_free_i64(xh);
268         tcg_temp_free_i64(xl);
269     } else {
270         if ((DM(ctx->opcode) & 2) == 0) {
271             tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
272         } else {
273             tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
274         }
275         if ((DM(ctx->opcode) & 1) == 0) {
276             tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
277         } else {
278             tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
279         }
280     }
281 }
282
283 #define OP_ABS 1
284 #define OP_NABS 2
285 #define OP_NEG 3
286 #define OP_CPSGN 4
287 #define SGN_MASK_DP  0x8000000000000000ull
288 #define SGN_MASK_SP 0x8000000080000000ull
289
290 #define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
291 static void glue(gen_, name)(DisasContext * ctx)                  \
292     {                                                             \
293         TCGv_i64 xb, sgm;                                         \
294         if (unlikely(!ctx->vsx_enabled)) {                        \
295             gen_exception(ctx, POWERPC_EXCP_VSXU);                \
296             return;                                               \
297         }                                                         \
298         xb = tcg_temp_new_i64();                                  \
299         sgm = tcg_temp_new_i64();                                 \
300         tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode)));           \
301         tcg_gen_movi_i64(sgm, sgn_mask);                          \
302         switch (op) {                                             \
303             case OP_ABS: {                                        \
304                 tcg_gen_andc_i64(xb, xb, sgm);                    \
305                 break;                                            \
306             }                                                     \
307             case OP_NABS: {                                       \
308                 tcg_gen_or_i64(xb, xb, sgm);                      \
309                 break;                                            \
310             }                                                     \
311             case OP_NEG: {                                        \
312                 tcg_gen_xor_i64(xb, xb, sgm);                     \
313                 break;                                            \
314             }                                                     \
315             case OP_CPSGN: {                                      \
316                 TCGv_i64 xa = tcg_temp_new_i64();                 \
317                 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode)));   \
318                 tcg_gen_and_i64(xa, xa, sgm);                     \
319                 tcg_gen_andc_i64(xb, xb, sgm);                    \
320                 tcg_gen_or_i64(xb, xb, xa);                       \
321                 tcg_temp_free_i64(xa);                            \
322                 break;                                            \
323             }                                                     \
324         }                                                         \
325         tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb);           \
326         tcg_temp_free_i64(xb);                                    \
327         tcg_temp_free_i64(sgm);                                   \
328     }
329
330 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
331 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
332 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
333 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
334
335 #define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
336 static void glue(gen_, name)(DisasContext * ctx)                 \
337     {                                                            \
338         TCGv_i64 xbh, xbl, sgm;                                  \
339         if (unlikely(!ctx->vsx_enabled)) {                       \
340             gen_exception(ctx, POWERPC_EXCP_VSXU);               \
341             return;                                              \
342         }                                                        \
343         xbh = tcg_temp_new_i64();                                \
344         xbl = tcg_temp_new_i64();                                \
345         sgm = tcg_temp_new_i64();                                \
346         tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode)));         \
347         tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode)));         \
348         tcg_gen_movi_i64(sgm, sgn_mask);                         \
349         switch (op) {                                            \
350             case OP_ABS: {                                       \
351                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
352                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
353                 break;                                           \
354             }                                                    \
355             case OP_NABS: {                                      \
356                 tcg_gen_or_i64(xbh, xbh, sgm);                   \
357                 tcg_gen_or_i64(xbl, xbl, sgm);                   \
358                 break;                                           \
359             }                                                    \
360             case OP_NEG: {                                       \
361                 tcg_gen_xor_i64(xbh, xbh, sgm);                  \
362                 tcg_gen_xor_i64(xbl, xbl, sgm);                  \
363                 break;                                           \
364             }                                                    \
365             case OP_CPSGN: {                                     \
366                 TCGv_i64 xah = tcg_temp_new_i64();               \
367                 TCGv_i64 xal = tcg_temp_new_i64();               \
368                 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
369                 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
370                 tcg_gen_and_i64(xah, xah, sgm);                  \
371                 tcg_gen_and_i64(xal, xal, sgm);                  \
372                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
373                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
374                 tcg_gen_or_i64(xbh, xbh, xah);                   \
375                 tcg_gen_or_i64(xbl, xbl, xal);                   \
376                 tcg_temp_free_i64(xah);                          \
377                 tcg_temp_free_i64(xal);                          \
378                 break;                                           \
379             }                                                    \
380         }                                                        \
381         tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh);         \
382         tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl);         \
383         tcg_temp_free_i64(xbh);                                  \
384         tcg_temp_free_i64(xbl);                                  \
385         tcg_temp_free_i64(sgm);                                  \
386     }
387
388 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
389 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
390 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
391 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
392 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
393 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
394 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
395 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
396
397 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
398 static void gen_##name(DisasContext * ctx)                                    \
399 {                                                                             \
400     TCGv_i32 opc;                                                             \
401     if (unlikely(!ctx->vsx_enabled)) {                                        \
402         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
403         return;                                                               \
404     }                                                                         \
405     opc = tcg_const_i32(ctx->opcode);                                         \
406     gen_helper_##name(cpu_env, opc);                                          \
407     tcg_temp_free_i32(opc);                                                   \
408 }
409
410 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
411 static void gen_##name(DisasContext * ctx)                    \
412 {                                                             \
413     if (unlikely(!ctx->vsx_enabled)) {                        \
414         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
415         return;                                               \
416     }                                                         \
417     gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env,     \
418                       cpu_vsrh(xB(ctx->opcode)));             \
419 }
420
421 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
422 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
423 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
424 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
425 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
426 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
427 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
428 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
429 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
430 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
431 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
432 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
433 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
434 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
435 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
436 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
437 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
438 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
439 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
440 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
441 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
442 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
443 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
444 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
445 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
446 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
447 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
448 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
449 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
450 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
451 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
452 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
453 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
454 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
455 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
456 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
457 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
458
459 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
460 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
461 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
462 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
463 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
464 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
465 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
466 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
467 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
468 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
469 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
470 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
471 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
472 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
473 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
474 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
475 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
476
477 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
478 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
479 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
480 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
481 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
482 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
483 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
484 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
485 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
486 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
487 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
488 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
489 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
490 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
491 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
492 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
493 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
494 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
495 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
496 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
497 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
498 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
499 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
500 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
501 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
502 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
503 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
504 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
505 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
506 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
507 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
508 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
509 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
510 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
511 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
512 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
513
514 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
515 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
516 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
517 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
518 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
519 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
520 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
521 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
522 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
523 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
524 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
525 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
526 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
527 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
528 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
529 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
530 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
531 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
532 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
533 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
534 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
535 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
536 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
537 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
538 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
539 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
540 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
541 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
542 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
543 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
544 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
545 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
546 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
547 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
548 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
549 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
550
551 #define VSX_LOGICAL(name, tcg_op)                                    \
552 static void glue(gen_, name)(DisasContext * ctx)                     \
553     {                                                                \
554         if (unlikely(!ctx->vsx_enabled)) {                           \
555             gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
556             return;                                                  \
557         }                                                            \
558         tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
559             cpu_vsrh(xB(ctx->opcode)));                              \
560         tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
561             cpu_vsrl(xB(ctx->opcode)));                              \
562     }
563
564 VSX_LOGICAL(xxland, tcg_gen_and_i64)
565 VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
566 VSX_LOGICAL(xxlor, tcg_gen_or_i64)
567 VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
568 VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
569 VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
570 VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
571 VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
572
573 #define VSX_XXMRG(name, high)                               \
574 static void glue(gen_, name)(DisasContext * ctx)            \
575     {                                                       \
576         TCGv_i64 a0, a1, b0, b1;                            \
577         if (unlikely(!ctx->vsx_enabled)) {                  \
578             gen_exception(ctx, POWERPC_EXCP_VSXU);          \
579             return;                                         \
580         }                                                   \
581         a0 = tcg_temp_new_i64();                            \
582         a1 = tcg_temp_new_i64();                            \
583         b0 = tcg_temp_new_i64();                            \
584         b1 = tcg_temp_new_i64();                            \
585         if (high) {                                         \
586             tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
587             tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
588             tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
589             tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
590         } else {                                            \
591             tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
592             tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
593             tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
594             tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
595         }                                                   \
596         tcg_gen_shri_i64(a0, a0, 32);                       \
597         tcg_gen_shri_i64(b0, b0, 32);                       \
598         tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)),      \
599                             b0, a0, 32, 32);                \
600         tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)),      \
601                             b1, a1, 32, 32);                \
602         tcg_temp_free_i64(a0);                              \
603         tcg_temp_free_i64(a1);                              \
604         tcg_temp_free_i64(b0);                              \
605         tcg_temp_free_i64(b1);                              \
606     }
607
608 VSX_XXMRG(xxmrghw, 1)
609 VSX_XXMRG(xxmrglw, 0)
610
611 static void gen_xxsel(DisasContext * ctx)
612 {
613     TCGv_i64 a, b, c;
614     if (unlikely(!ctx->vsx_enabled)) {
615         gen_exception(ctx, POWERPC_EXCP_VSXU);
616         return;
617     }
618     a = tcg_temp_new_i64();
619     b = tcg_temp_new_i64();
620     c = tcg_temp_new_i64();
621
622     tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
623     tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
624     tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
625
626     tcg_gen_and_i64(b, b, c);
627     tcg_gen_andc_i64(a, a, c);
628     tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
629
630     tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
631     tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
632     tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
633
634     tcg_gen_and_i64(b, b, c);
635     tcg_gen_andc_i64(a, a, c);
636     tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
637
638     tcg_temp_free_i64(a);
639     tcg_temp_free_i64(b);
640     tcg_temp_free_i64(c);
641 }
642
643 static void gen_xxspltw(DisasContext *ctx)
644 {
645     TCGv_i64 b, b2;
646     TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
647                    cpu_vsrl(xB(ctx->opcode)) :
648                    cpu_vsrh(xB(ctx->opcode));
649
650     if (unlikely(!ctx->vsx_enabled)) {
651         gen_exception(ctx, POWERPC_EXCP_VSXU);
652         return;
653     }
654
655     b = tcg_temp_new_i64();
656     b2 = tcg_temp_new_i64();
657
658     if (UIM(ctx->opcode) & 1) {
659         tcg_gen_ext32u_i64(b, vsr);
660     } else {
661         tcg_gen_shri_i64(b, vsr, 32);
662     }
663
664     tcg_gen_shli_i64(b2, b, 32);
665     tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
666     tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
667
668     tcg_temp_free_i64(b);
669     tcg_temp_free_i64(b2);
670 }
671
672 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
673
674 static void gen_xxspltib(DisasContext *ctx)
675 {
676     unsigned char uim8 = IMM8(ctx->opcode);
677     if (xS(ctx->opcode) < 32) {
678         if (unlikely(!ctx->altivec_enabled)) {
679             gen_exception(ctx, POWERPC_EXCP_VPU);
680             return;
681         }
682     } else {
683         if (unlikely(!ctx->vsx_enabled)) {
684             gen_exception(ctx, POWERPC_EXCP_VSXU);
685             return;
686         }
687     }
688     tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
689     tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
690 }
691
692 static void gen_xxsldwi(DisasContext *ctx)
693 {
694     TCGv_i64 xth, xtl;
695     if (unlikely(!ctx->vsx_enabled)) {
696         gen_exception(ctx, POWERPC_EXCP_VSXU);
697         return;
698     }
699     xth = tcg_temp_new_i64();
700     xtl = tcg_temp_new_i64();
701
702     switch (SHW(ctx->opcode)) {
703         case 0: {
704             tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
705             tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
706             break;
707         }
708         case 1: {
709             TCGv_i64 t0 = tcg_temp_new_i64();
710             tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
711             tcg_gen_shli_i64(xth, xth, 32);
712             tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
713             tcg_gen_shri_i64(t0, t0, 32);
714             tcg_gen_or_i64(xth, xth, t0);
715             tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
716             tcg_gen_shli_i64(xtl, xtl, 32);
717             tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
718             tcg_gen_shri_i64(t0, t0, 32);
719             tcg_gen_or_i64(xtl, xtl, t0);
720             tcg_temp_free_i64(t0);
721             break;
722         }
723         case 2: {
724             tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
725             tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
726             break;
727         }
728         case 3: {
729             TCGv_i64 t0 = tcg_temp_new_i64();
730             tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
731             tcg_gen_shli_i64(xth, xth, 32);
732             tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
733             tcg_gen_shri_i64(t0, t0, 32);
734             tcg_gen_or_i64(xth, xth, t0);
735             tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
736             tcg_gen_shli_i64(xtl, xtl, 32);
737             tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
738             tcg_gen_shri_i64(t0, t0, 32);
739             tcg_gen_or_i64(xtl, xtl, t0);
740             tcg_temp_free_i64(t0);
741             break;
742         }
743     }
744
745     tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
746     tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
747
748     tcg_temp_free_i64(xth);
749     tcg_temp_free_i64(xtl);
750 }
751
752 #undef GEN_XX2FORM
753 #undef GEN_XX3FORM
754 #undef GEN_XX2IFORM
755 #undef GEN_XX3_RC_FORM
756 #undef GEN_XX3FORM_DM
757 #undef VSX_LOGICAL
This page took 0.070332 seconds and 4 git commands to generate.