]>
Commit | Line | Data |
---|---|---|
3014427a BH |
1 | /*** VSX extension ***/ |
2 | ||
3 | static inline TCGv_i64 cpu_vsrh(int n) | |
4 | { | |
5 | if (n < 32) { | |
6 | return cpu_fpr[n]; | |
7 | } else { | |
8 | return cpu_avrh[n-32]; | |
9 | } | |
10 | } | |
11 | ||
12 | static inline TCGv_i64 cpu_vsrl(int n) | |
13 | { | |
14 | if (n < 32) { | |
15 | return cpu_vsr[n]; | |
16 | } else { | |
17 | return cpu_avrl[n-32]; | |
18 | } | |
19 | } | |
20 | ||
21 | #define VSX_LOAD_SCALAR(name, operation) \ | |
22 | static void gen_##name(DisasContext *ctx) \ | |
23 | { \ | |
24 | TCGv EA; \ | |
25 | if (unlikely(!ctx->vsx_enabled)) { \ | |
26 | gen_exception(ctx, POWERPC_EXCP_VSXU); \ | |
27 | return; \ | |
28 | } \ | |
29 | gen_set_access_type(ctx, ACCESS_INT); \ | |
30 | EA = tcg_temp_new(); \ | |
31 | gen_addr_reg_index(ctx, EA); \ | |
32 | gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \ | |
33 | /* NOTE: cpu_vsrl is undefined */ \ | |
34 | tcg_temp_free(EA); \ | |
35 | } | |
36 | ||
4f364fe7 | 37 | VSX_LOAD_SCALAR(lxsdx, ld64_i64) |
3014427a | 38 | VSX_LOAD_SCALAR(lxsiwax, ld32s_i64) |
740ae9a2 ND |
39 | VSX_LOAD_SCALAR(lxsibzx, ld8u_i64) |
40 | VSX_LOAD_SCALAR(lxsihzx, ld16u_i64) | |
3014427a BH |
41 | VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64) |
42 | VSX_LOAD_SCALAR(lxsspx, ld32fs) | |
43 | ||
44 | static void gen_lxvd2x(DisasContext *ctx) | |
45 | { | |
46 | TCGv EA; | |
47 | if (unlikely(!ctx->vsx_enabled)) { | |
48 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
49 | return; | |
50 | } | |
51 | gen_set_access_type(ctx, ACCESS_INT); | |
52 | EA = tcg_temp_new(); | |
53 | gen_addr_reg_index(ctx, EA); | |
4f364fe7 | 54 | gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA); |
3014427a | 55 | tcg_gen_addi_tl(EA, EA, 8); |
4f364fe7 | 56 | gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA); |
3014427a BH |
57 | tcg_temp_free(EA); |
58 | } | |
59 | ||
60 | static void gen_lxvdsx(DisasContext *ctx) | |
61 | { | |
62 | TCGv EA; | |
63 | if (unlikely(!ctx->vsx_enabled)) { | |
64 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
65 | return; | |
66 | } | |
67 | gen_set_access_type(ctx, ACCESS_INT); | |
68 | EA = tcg_temp_new(); | |
69 | gen_addr_reg_index(ctx, EA); | |
4f364fe7 | 70 | gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA); |
3014427a BH |
71 | tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode))); |
72 | tcg_temp_free(EA); | |
73 | } | |
74 | ||
75 | static void gen_lxvw4x(DisasContext *ctx) | |
76 | { | |
77 | TCGv EA; | |
3014427a BH |
78 | TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode)); |
79 | TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode)); | |
80 | if (unlikely(!ctx->vsx_enabled)) { | |
81 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
82 | return; | |
83 | } | |
84 | gen_set_access_type(ctx, ACCESS_INT); | |
85 | EA = tcg_temp_new(); | |
3014427a BH |
86 | |
87 | gen_addr_reg_index(ctx, EA); | |
f34001ec ND |
88 | if (ctx->le_mode) { |
89 | TCGv_i64 t0 = tcg_temp_new_i64(); | |
90 | TCGv_i64 t1 = tcg_temp_new_i64(); | |
3014427a | 91 | |
f34001ec ND |
92 | tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ); |
93 | tcg_gen_shri_i64(t1, t0, 32); | |
94 | tcg_gen_deposit_i64(xth, t1, t0, 32, 32); | |
95 | tcg_gen_addi_tl(EA, EA, 8); | |
96 | tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ); | |
97 | tcg_gen_shri_i64(t1, t0, 32); | |
98 | tcg_gen_deposit_i64(xtl, t1, t0, 32, 32); | |
99 | tcg_temp_free_i64(t0); | |
100 | tcg_temp_free_i64(t1); | |
101 | } else { | |
102 | tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ); | |
103 | tcg_gen_addi_tl(EA, EA, 8); | |
104 | tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ); | |
105 | } | |
3014427a | 106 | tcg_temp_free(EA); |
3014427a BH |
107 | } |
108 | ||
1c074419 ND |
109 | static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl, |
110 | TCGv_i64 inh, TCGv_i64 inl) | |
111 | { | |
112 | TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF); | |
113 | TCGv_i64 t0 = tcg_temp_new_i64(); | |
114 | TCGv_i64 t1 = tcg_temp_new_i64(); | |
115 | ||
116 | /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */ | |
117 | tcg_gen_and_i64(t0, inh, mask); | |
118 | tcg_gen_shli_i64(t0, t0, 8); | |
119 | tcg_gen_shri_i64(t1, inh, 8); | |
120 | tcg_gen_and_i64(t1, t1, mask); | |
121 | tcg_gen_or_i64(outh, t0, t1); | |
122 | ||
123 | /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */ | |
124 | tcg_gen_and_i64(t0, inl, mask); | |
125 | tcg_gen_shli_i64(t0, t0, 8); | |
126 | tcg_gen_shri_i64(t1, inl, 8); | |
127 | tcg_gen_and_i64(t1, t1, mask); | |
128 | tcg_gen_or_i64(outl, t0, t1); | |
129 | ||
130 | tcg_temp_free_i64(t0); | |
131 | tcg_temp_free_i64(t1); | |
132 | tcg_temp_free_i64(mask); | |
133 | } | |
134 | ||
14fd8ab2 ND |
135 | static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl, |
136 | TCGv_i64 inh, TCGv_i64 inl) | |
137 | { | |
138 | TCGv_i64 hi = tcg_temp_new_i64(); | |
139 | TCGv_i64 lo = tcg_temp_new_i64(); | |
140 | ||
141 | tcg_gen_bswap64_i64(hi, inh); | |
142 | tcg_gen_bswap64_i64(lo, inl); | |
143 | tcg_gen_shri_i64(outh, hi, 32); | |
144 | tcg_gen_deposit_i64(outh, outh, hi, 32, 32); | |
145 | tcg_gen_shri_i64(outl, lo, 32); | |
146 | tcg_gen_deposit_i64(outl, outl, lo, 32, 32); | |
147 | ||
148 | tcg_temp_free_i64(hi); | |
149 | tcg_temp_free_i64(lo); | |
150 | } | |
1c074419 ND |
151 | static void gen_lxvh8x(DisasContext *ctx) |
152 | { | |
153 | TCGv EA; | |
154 | TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode)); | |
155 | TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode)); | |
156 | ||
157 | if (unlikely(!ctx->vsx_enabled)) { | |
158 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
159 | return; | |
160 | } | |
161 | gen_set_access_type(ctx, ACCESS_INT); | |
162 | ||
163 | EA = tcg_temp_new(); | |
164 | gen_addr_reg_index(ctx, EA); | |
165 | tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ); | |
166 | tcg_gen_addi_tl(EA, EA, 8); | |
167 | tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ); | |
168 | if (ctx->le_mode) { | |
169 | gen_bswap16x8(xth, xtl, xth, xtl); | |
170 | } | |
171 | tcg_temp_free(EA); | |
172 | } | |
173 | ||
8ee38fac ND |
174 | static void gen_lxvb16x(DisasContext *ctx) |
175 | { | |
176 | TCGv EA; | |
177 | TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode)); | |
178 | TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode)); | |
179 | ||
180 | if (unlikely(!ctx->vsx_enabled)) { | |
181 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
182 | return; | |
183 | } | |
184 | gen_set_access_type(ctx, ACCESS_INT); | |
185 | EA = tcg_temp_new(); | |
186 | gen_addr_reg_index(ctx, EA); | |
187 | tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ); | |
188 | tcg_gen_addi_tl(EA, EA, 8); | |
189 | tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ); | |
190 | tcg_temp_free(EA); | |
191 | } | |
192 | ||
3014427a BH |
193 | #define VSX_STORE_SCALAR(name, operation) \ |
194 | static void gen_##name(DisasContext *ctx) \ | |
195 | { \ | |
196 | TCGv EA; \ | |
197 | if (unlikely(!ctx->vsx_enabled)) { \ | |
198 | gen_exception(ctx, POWERPC_EXCP_VSXU); \ | |
199 | return; \ | |
200 | } \ | |
201 | gen_set_access_type(ctx, ACCESS_INT); \ | |
202 | EA = tcg_temp_new(); \ | |
203 | gen_addr_reg_index(ctx, EA); \ | |
204 | gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \ | |
205 | tcg_temp_free(EA); \ | |
206 | } | |
207 | ||
2468f23d | 208 | VSX_STORE_SCALAR(stxsdx, st64_i64) |
ddb9ac50 ND |
209 | |
210 | VSX_STORE_SCALAR(stxsibx, st8_i64) | |
211 | VSX_STORE_SCALAR(stxsihx, st16_i64) | |
3014427a BH |
212 | VSX_STORE_SCALAR(stxsiwx, st32_i64) |
213 | VSX_STORE_SCALAR(stxsspx, st32fs) | |
214 | ||
215 | static void gen_stxvd2x(DisasContext *ctx) | |
216 | { | |
217 | TCGv EA; | |
218 | if (unlikely(!ctx->vsx_enabled)) { | |
219 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
220 | return; | |
221 | } | |
222 | gen_set_access_type(ctx, ACCESS_INT); | |
223 | EA = tcg_temp_new(); | |
224 | gen_addr_reg_index(ctx, EA); | |
2468f23d | 225 | gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA); |
3014427a | 226 | tcg_gen_addi_tl(EA, EA, 8); |
2468f23d | 227 | gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA); |
3014427a BH |
228 | tcg_temp_free(EA); |
229 | } | |
230 | ||
231 | static void gen_stxvw4x(DisasContext *ctx) | |
232 | { | |
0aec21d8 ND |
233 | TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode)); |
234 | TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode)); | |
3014427a BH |
235 | TCGv EA; |
236 | if (unlikely(!ctx->vsx_enabled)) { | |
237 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
238 | return; | |
239 | } | |
240 | gen_set_access_type(ctx, ACCESS_INT); | |
241 | EA = tcg_temp_new(); | |
242 | gen_addr_reg_index(ctx, EA); | |
0aec21d8 ND |
243 | if (ctx->le_mode) { |
244 | TCGv_i64 t0 = tcg_temp_new_i64(); | |
245 | TCGv_i64 t1 = tcg_temp_new_i64(); | |
3014427a | 246 | |
0aec21d8 ND |
247 | tcg_gen_shri_i64(t0, xsh, 32); |
248 | tcg_gen_deposit_i64(t1, t0, xsh, 32, 32); | |
249 | tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ); | |
250 | tcg_gen_addi_tl(EA, EA, 8); | |
251 | tcg_gen_shri_i64(t0, xsl, 32); | |
252 | tcg_gen_deposit_i64(t1, t0, xsl, 32, 32); | |
253 | tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ); | |
254 | tcg_temp_free_i64(t0); | |
255 | tcg_temp_free_i64(t1); | |
256 | } else { | |
257 | tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ); | |
258 | tcg_gen_addi_tl(EA, EA, 8); | |
0b8ac648 ND |
259 | tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ); |
260 | } | |
261 | tcg_temp_free(EA); | |
262 | } | |
263 | ||
264 | static void gen_stxvh8x(DisasContext *ctx) | |
265 | { | |
266 | TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode)); | |
267 | TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode)); | |
268 | TCGv EA; | |
269 | ||
270 | if (unlikely(!ctx->vsx_enabled)) { | |
271 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
272 | return; | |
273 | } | |
274 | gen_set_access_type(ctx, ACCESS_INT); | |
275 | EA = tcg_temp_new(); | |
276 | gen_addr_reg_index(ctx, EA); | |
277 | if (ctx->le_mode) { | |
278 | TCGv_i64 outh = tcg_temp_new_i64(); | |
279 | TCGv_i64 outl = tcg_temp_new_i64(); | |
280 | ||
281 | gen_bswap16x8(outh, outl, xsh, xsl); | |
282 | tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ); | |
283 | tcg_gen_addi_tl(EA, EA, 8); | |
284 | tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ); | |
285 | tcg_temp_free_i64(outh); | |
286 | tcg_temp_free_i64(outl); | |
287 | } else { | |
288 | tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ); | |
289 | tcg_gen_addi_tl(EA, EA, 8); | |
0aec21d8 ND |
290 | tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ); |
291 | } | |
3014427a | 292 | tcg_temp_free(EA); |
3014427a BH |
293 | } |
294 | ||
f3333ce0 ND |
295 | static void gen_stxvb16x(DisasContext *ctx) |
296 | { | |
297 | TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode)); | |
298 | TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode)); | |
299 | TCGv EA; | |
300 | ||
301 | if (unlikely(!ctx->vsx_enabled)) { | |
302 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
303 | return; | |
304 | } | |
305 | gen_set_access_type(ctx, ACCESS_INT); | |
306 | EA = tcg_temp_new(); | |
307 | gen_addr_reg_index(ctx, EA); | |
308 | tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ); | |
309 | tcg_gen_addi_tl(EA, EA, 8); | |
310 | tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ); | |
311 | tcg_temp_free(EA); | |
312 | } | |
313 | ||
3014427a BH |
314 | #define MV_VSRW(name, tcgop1, tcgop2, target, source) \ |
315 | static void gen_##name(DisasContext *ctx) \ | |
316 | { \ | |
317 | if (xS(ctx->opcode) < 32) { \ | |
318 | if (unlikely(!ctx->fpu_enabled)) { \ | |
319 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
320 | return; \ | |
321 | } \ | |
322 | } else { \ | |
323 | if (unlikely(!ctx->altivec_enabled)) { \ | |
324 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
325 | return; \ | |
326 | } \ | |
327 | } \ | |
328 | TCGv_i64 tmp = tcg_temp_new_i64(); \ | |
329 | tcg_gen_##tcgop1(tmp, source); \ | |
330 | tcg_gen_##tcgop2(target, tmp); \ | |
331 | tcg_temp_free_i64(tmp); \ | |
332 | } | |
333 | ||
334 | ||
335 | MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \ | |
336 | cpu_vsrh(xS(ctx->opcode))) | |
337 | MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \ | |
338 | cpu_gpr[rA(ctx->opcode)]) | |
339 | MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \ | |
340 | cpu_gpr[rA(ctx->opcode)]) | |
341 | ||
342 | #if defined(TARGET_PPC64) | |
343 | #define MV_VSRD(name, target, source) \ | |
344 | static void gen_##name(DisasContext *ctx) \ | |
345 | { \ | |
346 | if (xS(ctx->opcode) < 32) { \ | |
347 | if (unlikely(!ctx->fpu_enabled)) { \ | |
348 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
349 | return; \ | |
350 | } \ | |
351 | } else { \ | |
352 | if (unlikely(!ctx->altivec_enabled)) { \ | |
353 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
354 | return; \ | |
355 | } \ | |
356 | } \ | |
357 | tcg_gen_mov_i64(target, source); \ | |
358 | } | |
359 | ||
360 | MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode))) | |
361 | MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]) | |
362 | ||
63583202 RB |
363 | static void gen_mfvsrld(DisasContext *ctx) |
364 | { | |
365 | if (xS(ctx->opcode) < 32) { | |
366 | if (unlikely(!ctx->vsx_enabled)) { | |
367 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
368 | return; | |
369 | } | |
370 | } else { | |
371 | if (unlikely(!ctx->altivec_enabled)) { | |
372 | gen_exception(ctx, POWERPC_EXCP_VPU); | |
373 | return; | |
374 | } | |
375 | } | |
376 | ||
377 | tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode))); | |
378 | } | |
379 | ||
b9731075 RB |
380 | static void gen_mtvsrdd(DisasContext *ctx) |
381 | { | |
382 | if (xT(ctx->opcode) < 32) { | |
383 | if (unlikely(!ctx->vsx_enabled)) { | |
384 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
385 | return; | |
386 | } | |
387 | } else { | |
388 | if (unlikely(!ctx->altivec_enabled)) { | |
389 | gen_exception(ctx, POWERPC_EXCP_VPU); | |
390 | return; | |
391 | } | |
392 | } | |
393 | ||
394 | if (!rA(ctx->opcode)) { | |
395 | tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0); | |
396 | } else { | |
397 | tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]); | |
398 | } | |
399 | ||
400 | tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rB(ctx->opcode)]); | |
401 | } | |
402 | ||
1a136cdc RB |
403 | static void gen_mtvsrws(DisasContext *ctx) |
404 | { | |
405 | if (xT(ctx->opcode) < 32) { | |
406 | if (unlikely(!ctx->vsx_enabled)) { | |
407 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
408 | return; | |
409 | } | |
410 | } else { | |
411 | if (unlikely(!ctx->altivec_enabled)) { | |
412 | gen_exception(ctx, POWERPC_EXCP_VPU); | |
413 | return; | |
414 | } | |
415 | } | |
416 | ||
417 | tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)], | |
418 | cpu_gpr[rA(ctx->opcode)], 32, 32); | |
419 | tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xT(ctx->opcode))); | |
420 | } | |
421 | ||
3014427a BH |
422 | #endif |
423 | ||
424 | static void gen_xxpermdi(DisasContext *ctx) | |
425 | { | |
426 | if (unlikely(!ctx->vsx_enabled)) { | |
427 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
428 | return; | |
429 | } | |
430 | ||
431 | if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) || | |
432 | (xT(ctx->opcode) == xB(ctx->opcode)))) { | |
433 | TCGv_i64 xh, xl; | |
434 | ||
435 | xh = tcg_temp_new_i64(); | |
436 | xl = tcg_temp_new_i64(); | |
437 | ||
438 | if ((DM(ctx->opcode) & 2) == 0) { | |
439 | tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode))); | |
440 | } else { | |
441 | tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode))); | |
442 | } | |
443 | if ((DM(ctx->opcode) & 1) == 0) { | |
444 | tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode))); | |
445 | } else { | |
446 | tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode))); | |
447 | } | |
448 | ||
449 | tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh); | |
450 | tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl); | |
451 | ||
452 | tcg_temp_free_i64(xh); | |
453 | tcg_temp_free_i64(xl); | |
454 | } else { | |
455 | if ((DM(ctx->opcode) & 2) == 0) { | |
456 | tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode))); | |
457 | } else { | |
458 | tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode))); | |
459 | } | |
460 | if ((DM(ctx->opcode) & 1) == 0) { | |
461 | tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode))); | |
462 | } else { | |
463 | tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode))); | |
464 | } | |
465 | } | |
466 | } | |
467 | ||
468 | #define OP_ABS 1 | |
469 | #define OP_NABS 2 | |
470 | #define OP_NEG 3 | |
471 | #define OP_CPSGN 4 | |
472 | #define SGN_MASK_DP 0x8000000000000000ull | |
473 | #define SGN_MASK_SP 0x8000000080000000ull | |
474 | ||
475 | #define VSX_SCALAR_MOVE(name, op, sgn_mask) \ | |
476 | static void glue(gen_, name)(DisasContext * ctx) \ | |
477 | { \ | |
478 | TCGv_i64 xb, sgm; \ | |
479 | if (unlikely(!ctx->vsx_enabled)) { \ | |
480 | gen_exception(ctx, POWERPC_EXCP_VSXU); \ | |
481 | return; \ | |
482 | } \ | |
483 | xb = tcg_temp_new_i64(); \ | |
484 | sgm = tcg_temp_new_i64(); \ | |
485 | tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \ | |
486 | tcg_gen_movi_i64(sgm, sgn_mask); \ | |
487 | switch (op) { \ | |
488 | case OP_ABS: { \ | |
489 | tcg_gen_andc_i64(xb, xb, sgm); \ | |
490 | break; \ | |
491 | } \ | |
492 | case OP_NABS: { \ | |
493 | tcg_gen_or_i64(xb, xb, sgm); \ | |
494 | break; \ | |
495 | } \ | |
496 | case OP_NEG: { \ | |
497 | tcg_gen_xor_i64(xb, xb, sgm); \ | |
498 | break; \ | |
499 | } \ | |
500 | case OP_CPSGN: { \ | |
501 | TCGv_i64 xa = tcg_temp_new_i64(); \ | |
502 | tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \ | |
503 | tcg_gen_and_i64(xa, xa, sgm); \ | |
504 | tcg_gen_andc_i64(xb, xb, sgm); \ | |
505 | tcg_gen_or_i64(xb, xb, xa); \ | |
506 | tcg_temp_free_i64(xa); \ | |
507 | break; \ | |
508 | } \ | |
509 | } \ | |
510 | tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \ | |
511 | tcg_temp_free_i64(xb); \ | |
512 | tcg_temp_free_i64(sgm); \ | |
513 | } | |
514 | ||
515 | VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP) | |
516 | VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP) | |
517 | VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP) | |
518 | VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP) | |
519 | ||
520 | #define VSX_VECTOR_MOVE(name, op, sgn_mask) \ | |
521 | static void glue(gen_, name)(DisasContext * ctx) \ | |
522 | { \ | |
523 | TCGv_i64 xbh, xbl, sgm; \ | |
524 | if (unlikely(!ctx->vsx_enabled)) { \ | |
525 | gen_exception(ctx, POWERPC_EXCP_VSXU); \ | |
526 | return; \ | |
527 | } \ | |
528 | xbh = tcg_temp_new_i64(); \ | |
529 | xbl = tcg_temp_new_i64(); \ | |
530 | sgm = tcg_temp_new_i64(); \ | |
531 | tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \ | |
532 | tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \ | |
533 | tcg_gen_movi_i64(sgm, sgn_mask); \ | |
534 | switch (op) { \ | |
535 | case OP_ABS: { \ | |
536 | tcg_gen_andc_i64(xbh, xbh, sgm); \ | |
537 | tcg_gen_andc_i64(xbl, xbl, sgm); \ | |
538 | break; \ | |
539 | } \ | |
540 | case OP_NABS: { \ | |
541 | tcg_gen_or_i64(xbh, xbh, sgm); \ | |
542 | tcg_gen_or_i64(xbl, xbl, sgm); \ | |
543 | break; \ | |
544 | } \ | |
545 | case OP_NEG: { \ | |
546 | tcg_gen_xor_i64(xbh, xbh, sgm); \ | |
547 | tcg_gen_xor_i64(xbl, xbl, sgm); \ | |
548 | break; \ | |
549 | } \ | |
550 | case OP_CPSGN: { \ | |
551 | TCGv_i64 xah = tcg_temp_new_i64(); \ | |
552 | TCGv_i64 xal = tcg_temp_new_i64(); \ | |
553 | tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \ | |
554 | tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \ | |
555 | tcg_gen_and_i64(xah, xah, sgm); \ | |
556 | tcg_gen_and_i64(xal, xal, sgm); \ | |
557 | tcg_gen_andc_i64(xbh, xbh, sgm); \ | |
558 | tcg_gen_andc_i64(xbl, xbl, sgm); \ | |
559 | tcg_gen_or_i64(xbh, xbh, xah); \ | |
560 | tcg_gen_or_i64(xbl, xbl, xal); \ | |
561 | tcg_temp_free_i64(xah); \ | |
562 | tcg_temp_free_i64(xal); \ | |
563 | break; \ | |
564 | } \ | |
565 | } \ | |
566 | tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \ | |
567 | tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \ | |
568 | tcg_temp_free_i64(xbh); \ | |
569 | tcg_temp_free_i64(xbl); \ | |
570 | tcg_temp_free_i64(sgm); \ | |
571 | } | |
572 | ||
573 | VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP) | |
574 | VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP) | |
575 | VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP) | |
576 | VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP) | |
577 | VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP) | |
578 | VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP) | |
579 | VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP) | |
580 | VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP) | |
581 | ||
582 | #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \ | |
583 | static void gen_##name(DisasContext * ctx) \ | |
584 | { \ | |
585 | TCGv_i32 opc; \ | |
586 | if (unlikely(!ctx->vsx_enabled)) { \ | |
587 | gen_exception(ctx, POWERPC_EXCP_VSXU); \ | |
588 | return; \ | |
589 | } \ | |
3014427a BH |
590 | opc = tcg_const_i32(ctx->opcode); \ |
591 | gen_helper_##name(cpu_env, opc); \ | |
592 | tcg_temp_free_i32(opc); \ | |
593 | } | |
594 | ||
595 | #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \ | |
596 | static void gen_##name(DisasContext * ctx) \ | |
597 | { \ | |
598 | if (unlikely(!ctx->vsx_enabled)) { \ | |
599 | gen_exception(ctx, POWERPC_EXCP_VSXU); \ | |
600 | return; \ | |
601 | } \ | |
3014427a BH |
602 | gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \ |
603 | cpu_vsrh(xB(ctx->opcode))); \ | |
604 | } | |
605 | ||
606 | GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX) | |
607 | GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX) | |
608 | GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX) | |
609 | GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX) | |
610 | GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX) | |
611 | GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX) | |
612 | GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX) | |
613 | GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX) | |
614 | GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX) | |
615 | GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX) | |
616 | GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX) | |
617 | GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX) | |
618 | GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX) | |
619 | GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX) | |
620 | GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX) | |
621 | GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX) | |
622 | GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX) | |
6d1ff9a7 SD |
623 | GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300) |
624 | GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300) | |
625 | GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300) | |
626 | GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300) | |
3014427a BH |
627 | GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX) |
628 | GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX) | |
629 | GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX) | |
630 | GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX) | |
631 | GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX) | |
632 | GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207) | |
633 | GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX) | |
634 | GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207) | |
635 | GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX) | |
636 | GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX) | |
637 | GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX) | |
638 | GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX) | |
639 | GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX) | |
640 | GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX) | |
641 | GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX) | |
642 | GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX) | |
643 | GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX) | |
644 | GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX) | |
645 | GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX) | |
646 | GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207) | |
647 | ||
648 | GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207) | |
649 | GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207) | |
650 | GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207) | |
651 | GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207) | |
652 | GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207) | |
653 | GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207) | |
654 | GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207) | |
655 | GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207) | |
656 | GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207) | |
657 | GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207) | |
658 | GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207) | |
659 | GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207) | |
660 | GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207) | |
661 | GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207) | |
662 | GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207) | |
663 | GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207) | |
664 | GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207) | |
665 | ||
666 | GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX) | |
667 | GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX) | |
668 | GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX) | |
669 | GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX) | |
670 | GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX) | |
671 | GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX) | |
672 | GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX) | |
673 | GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX) | |
674 | GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX) | |
675 | GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX) | |
676 | GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX) | |
677 | GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX) | |
678 | GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX) | |
679 | GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX) | |
680 | GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX) | |
681 | GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX) | |
682 | GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX) | |
683 | GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX) | |
684 | GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX) | |
685 | GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX) | |
686 | GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX) | |
687 | GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX) | |
688 | GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX) | |
689 | GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX) | |
690 | GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX) | |
691 | GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX) | |
692 | GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX) | |
693 | GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX) | |
694 | GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX) | |
695 | GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX) | |
696 | GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX) | |
697 | GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX) | |
698 | GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX) | |
699 | GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX) | |
700 | GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX) | |
701 | GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX) | |
702 | ||
703 | GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX) | |
704 | GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX) | |
705 | GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX) | |
706 | GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX) | |
707 | GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX) | |
708 | GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX) | |
709 | GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX) | |
710 | GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX) | |
711 | GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX) | |
712 | GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX) | |
713 | GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX) | |
714 | GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX) | |
715 | GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX) | |
716 | GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX) | |
717 | GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX) | |
718 | GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX) | |
719 | GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX) | |
720 | GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX) | |
721 | GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX) | |
722 | GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX) | |
723 | GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX) | |
724 | GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX) | |
725 | GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX) | |
726 | GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX) | |
727 | GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX) | |
728 | GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX) | |
729 | GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX) | |
730 | GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX) | |
731 | GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX) | |
732 | GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX) | |
733 | GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX) | |
734 | GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX) | |
735 | GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX) | |
736 | GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX) | |
737 | GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX) | |
738 | GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX) | |
739 | ||
14fd8ab2 ND |
740 | static void gen_xxbrd(DisasContext *ctx) |
741 | { | |
742 | TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode)); | |
743 | TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode)); | |
744 | TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode)); | |
745 | TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode)); | |
746 | ||
747 | if (unlikely(!ctx->vsx_enabled)) { | |
748 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
749 | return; | |
750 | } | |
751 | tcg_gen_bswap64_i64(xth, xbh); | |
752 | tcg_gen_bswap64_i64(xtl, xbl); | |
753 | } | |
754 | ||
755 | static void gen_xxbrh(DisasContext *ctx) | |
756 | { | |
757 | TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode)); | |
758 | TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode)); | |
759 | TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode)); | |
760 | TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode)); | |
761 | ||
762 | if (unlikely(!ctx->vsx_enabled)) { | |
763 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
764 | return; | |
765 | } | |
766 | gen_bswap16x8(xth, xtl, xbh, xbl); | |
767 | } | |
768 | ||
769 | static void gen_xxbrq(DisasContext *ctx) | |
770 | { | |
771 | TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode)); | |
772 | TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode)); | |
773 | TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode)); | |
774 | TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode)); | |
775 | TCGv_i64 t0 = tcg_temp_new_i64(); | |
776 | ||
777 | if (unlikely(!ctx->vsx_enabled)) { | |
778 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
779 | return; | |
780 | } | |
781 | tcg_gen_bswap64_i64(t0, xbl); | |
782 | tcg_gen_bswap64_i64(xtl, xbh); | |
783 | tcg_gen_mov_i64(xth, t0); | |
784 | tcg_temp_free_i64(t0); | |
785 | } | |
786 | ||
787 | static void gen_xxbrw(DisasContext *ctx) | |
788 | { | |
789 | TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode)); | |
790 | TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode)); | |
791 | TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode)); | |
792 | TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode)); | |
793 | ||
794 | if (unlikely(!ctx->vsx_enabled)) { | |
795 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
796 | return; | |
797 | } | |
798 | gen_bswap32x4(xth, xtl, xbh, xbl); | |
799 | } | |
800 | ||
3014427a BH |
801 | #define VSX_LOGICAL(name, tcg_op) \ |
802 | static void glue(gen_, name)(DisasContext * ctx) \ | |
803 | { \ | |
804 | if (unlikely(!ctx->vsx_enabled)) { \ | |
805 | gen_exception(ctx, POWERPC_EXCP_VSXU); \ | |
806 | return; \ | |
807 | } \ | |
808 | tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \ | |
809 | cpu_vsrh(xB(ctx->opcode))); \ | |
810 | tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \ | |
811 | cpu_vsrl(xB(ctx->opcode))); \ | |
812 | } | |
813 | ||
814 | VSX_LOGICAL(xxland, tcg_gen_and_i64) | |
815 | VSX_LOGICAL(xxlandc, tcg_gen_andc_i64) | |
816 | VSX_LOGICAL(xxlor, tcg_gen_or_i64) | |
817 | VSX_LOGICAL(xxlxor, tcg_gen_xor_i64) | |
818 | VSX_LOGICAL(xxlnor, tcg_gen_nor_i64) | |
819 | VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64) | |
820 | VSX_LOGICAL(xxlnand, tcg_gen_nand_i64) | |
821 | VSX_LOGICAL(xxlorc, tcg_gen_orc_i64) | |
822 | ||
823 | #define VSX_XXMRG(name, high) \ | |
824 | static void glue(gen_, name)(DisasContext * ctx) \ | |
825 | { \ | |
826 | TCGv_i64 a0, a1, b0, b1; \ | |
827 | if (unlikely(!ctx->vsx_enabled)) { \ | |
828 | gen_exception(ctx, POWERPC_EXCP_VSXU); \ | |
829 | return; \ | |
830 | } \ | |
831 | a0 = tcg_temp_new_i64(); \ | |
832 | a1 = tcg_temp_new_i64(); \ | |
833 | b0 = tcg_temp_new_i64(); \ | |
834 | b1 = tcg_temp_new_i64(); \ | |
835 | if (high) { \ | |
836 | tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \ | |
837 | tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \ | |
838 | tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \ | |
839 | tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \ | |
840 | } else { \ | |
841 | tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \ | |
842 | tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \ | |
843 | tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \ | |
844 | tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \ | |
845 | } \ | |
846 | tcg_gen_shri_i64(a0, a0, 32); \ | |
847 | tcg_gen_shri_i64(b0, b0, 32); \ | |
848 | tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \ | |
849 | b0, a0, 32, 32); \ | |
850 | tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \ | |
851 | b1, a1, 32, 32); \ | |
852 | tcg_temp_free_i64(a0); \ | |
853 | tcg_temp_free_i64(a1); \ | |
854 | tcg_temp_free_i64(b0); \ | |
855 | tcg_temp_free_i64(b1); \ | |
856 | } | |
857 | ||
858 | VSX_XXMRG(xxmrghw, 1) | |
859 | VSX_XXMRG(xxmrglw, 0) | |
860 | ||
861 | static void gen_xxsel(DisasContext * ctx) | |
862 | { | |
863 | TCGv_i64 a, b, c; | |
864 | if (unlikely(!ctx->vsx_enabled)) { | |
865 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
866 | return; | |
867 | } | |
868 | a = tcg_temp_new_i64(); | |
869 | b = tcg_temp_new_i64(); | |
870 | c = tcg_temp_new_i64(); | |
871 | ||
872 | tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode))); | |
873 | tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode))); | |
874 | tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode))); | |
875 | ||
876 | tcg_gen_and_i64(b, b, c); | |
877 | tcg_gen_andc_i64(a, a, c); | |
878 | tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b); | |
879 | ||
880 | tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode))); | |
881 | tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode))); | |
882 | tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode))); | |
883 | ||
884 | tcg_gen_and_i64(b, b, c); | |
885 | tcg_gen_andc_i64(a, a, c); | |
886 | tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b); | |
887 | ||
888 | tcg_temp_free_i64(a); | |
889 | tcg_temp_free_i64(b); | |
890 | tcg_temp_free_i64(c); | |
891 | } | |
892 | ||
893 | static void gen_xxspltw(DisasContext *ctx) | |
894 | { | |
895 | TCGv_i64 b, b2; | |
896 | TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ? | |
897 | cpu_vsrl(xB(ctx->opcode)) : | |
898 | cpu_vsrh(xB(ctx->opcode)); | |
899 | ||
900 | if (unlikely(!ctx->vsx_enabled)) { | |
901 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
902 | return; | |
903 | } | |
904 | ||
905 | b = tcg_temp_new_i64(); | |
906 | b2 = tcg_temp_new_i64(); | |
907 | ||
908 | if (UIM(ctx->opcode) & 1) { | |
909 | tcg_gen_ext32u_i64(b, vsr); | |
910 | } else { | |
911 | tcg_gen_shri_i64(b, vsr, 32); | |
912 | } | |
913 | ||
914 | tcg_gen_shli_i64(b2, b, 32); | |
915 | tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2); | |
916 | tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode))); | |
917 | ||
918 | tcg_temp_free_i64(b); | |
919 | tcg_temp_free_i64(b2); | |
920 | } | |
921 | ||
f1132835 ND |
922 | #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff)) |
923 | ||
924 | static void gen_xxspltib(DisasContext *ctx) | |
925 | { | |
926 | unsigned char uim8 = IMM8(ctx->opcode); | |
927 | if (xS(ctx->opcode) < 32) { | |
928 | if (unlikely(!ctx->altivec_enabled)) { | |
929 | gen_exception(ctx, POWERPC_EXCP_VPU); | |
930 | return; | |
931 | } | |
932 | } else { | |
933 | if (unlikely(!ctx->vsx_enabled)) { | |
934 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
935 | return; | |
936 | } | |
937 | } | |
938 | tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8)); | |
939 | tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8)); | |
940 | } | |
941 | ||
3014427a BH |
942 | static void gen_xxsldwi(DisasContext *ctx) |
943 | { | |
944 | TCGv_i64 xth, xtl; | |
945 | if (unlikely(!ctx->vsx_enabled)) { | |
946 | gen_exception(ctx, POWERPC_EXCP_VSXU); | |
947 | return; | |
948 | } | |
949 | xth = tcg_temp_new_i64(); | |
950 | xtl = tcg_temp_new_i64(); | |
951 | ||
952 | switch (SHW(ctx->opcode)) { | |
953 | case 0: { | |
954 | tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode))); | |
955 | tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode))); | |
956 | break; | |
957 | } | |
958 | case 1: { | |
959 | TCGv_i64 t0 = tcg_temp_new_i64(); | |
960 | tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode))); | |
961 | tcg_gen_shli_i64(xth, xth, 32); | |
962 | tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode))); | |
963 | tcg_gen_shri_i64(t0, t0, 32); | |
964 | tcg_gen_or_i64(xth, xth, t0); | |
965 | tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode))); | |
966 | tcg_gen_shli_i64(xtl, xtl, 32); | |
967 | tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode))); | |
968 | tcg_gen_shri_i64(t0, t0, 32); | |
969 | tcg_gen_or_i64(xtl, xtl, t0); | |
970 | tcg_temp_free_i64(t0); | |
971 | break; | |
972 | } | |
973 | case 2: { | |
974 | tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode))); | |
975 | tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode))); | |
976 | break; | |
977 | } | |
978 | case 3: { | |
979 | TCGv_i64 t0 = tcg_temp_new_i64(); | |
980 | tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode))); | |
981 | tcg_gen_shli_i64(xth, xth, 32); | |
982 | tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode))); | |
983 | tcg_gen_shri_i64(t0, t0, 32); | |
984 | tcg_gen_or_i64(xth, xth, t0); | |
985 | tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode))); | |
986 | tcg_gen_shli_i64(xtl, xtl, 32); | |
987 | tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode))); | |
988 | tcg_gen_shri_i64(t0, t0, 32); | |
989 | tcg_gen_or_i64(xtl, xtl, t0); | |
990 | tcg_temp_free_i64(t0); | |
991 | break; | |
992 | } | |
993 | } | |
994 | ||
995 | tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth); | |
996 | tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl); | |
997 | ||
998 | tcg_temp_free_i64(xth); | |
999 | tcg_temp_free_i64(xtl); | |
1000 | } | |
1001 | ||
1002 | #undef GEN_XX2FORM | |
1003 | #undef GEN_XX3FORM | |
1004 | #undef GEN_XX2IFORM | |
1005 | #undef GEN_XX3_RC_FORM | |
1006 | #undef GEN_XX3FORM_DM | |
1007 | #undef VSX_LOGICAL |