]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * PowerPC emulation for qemu: main translation routines. | |
3 | * | |
4 | * Copyright (c) 2003-2007 Jocelyn Mayer | |
5 | * | |
6 | * This library is free software; you can redistribute it and/or | |
7 | * modify it under the terms of the GNU Lesser General Public | |
8 | * License as published by the Free Software Foundation; either | |
9 | * version 2 of the License, or (at your option) any later version. | |
10 | * | |
11 | * This library is distributed in the hope that it will be useful, | |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
14 | * Lesser General Public License for more details. | |
15 | * | |
16 | * You should have received a copy of the GNU Lesser General Public | |
17 | * License along with this library; if not, write to the Free Software | |
18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA | |
19 | */ | |
20 | #include <stdarg.h> | |
21 | #include <stdlib.h> | |
22 | #include <stdio.h> | |
23 | #include <string.h> | |
24 | #include <inttypes.h> | |
25 | ||
26 | #include "cpu.h" | |
27 | #include "exec-all.h" | |
28 | #include "disas.h" | |
29 | #include "tcg-op.h" | |
30 | #include "qemu-common.h" | |
31 | ||
32 | #include "helper.h" | |
33 | #define GEN_HELPER 1 | |
34 | #include "helper.h" | |
35 | ||
36 | #define CPU_SINGLE_STEP 0x1 | |
37 | #define CPU_BRANCH_STEP 0x2 | |
38 | #define GDBSTUB_SINGLE_STEP 0x4 | |
39 | ||
40 | /* Include definitions for instructions classes and implementations flags */ | |
41 | //#define DO_SINGLE_STEP | |
42 | //#define PPC_DEBUG_DISAS | |
43 | //#define DO_PPC_STATISTICS | |
44 | ||
45 | #ifdef PPC_DEBUG_DISAS | |
46 | # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) | |
47 | #else | |
48 | # define LOG_DISAS(...) do { } while (0) | |
49 | #endif | |
50 | /*****************************************************************************/ | |
51 | /* Code translation helpers */ | |
52 | ||
53 | /* global register indexes */ | |
54 | static TCGv_ptr cpu_env; | |
55 | static char cpu_reg_names[10*3 + 22*4 /* GPR */ | |
56 | #if !defined(TARGET_PPC64) | |
57 | + 10*4 + 22*5 /* SPE GPRh */ | |
58 | #endif | |
59 | + 10*4 + 22*5 /* FPR */ | |
60 | + 2*(10*6 + 22*7) /* AVRh, AVRl */ | |
61 | + 8*5 /* CRF */]; | |
62 | static TCGv cpu_gpr[32]; | |
63 | #if !defined(TARGET_PPC64) | |
64 | static TCGv cpu_gprh[32]; | |
65 | #endif | |
66 | static TCGv_i64 cpu_fpr[32]; | |
67 | static TCGv_i64 cpu_avrh[32], cpu_avrl[32]; | |
68 | static TCGv_i32 cpu_crf[8]; | |
69 | static TCGv cpu_nip; | |
70 | static TCGv cpu_msr; | |
71 | static TCGv cpu_ctr; | |
72 | static TCGv cpu_lr; | |
73 | static TCGv cpu_xer; | |
74 | static TCGv cpu_reserve; | |
75 | static TCGv_i32 cpu_fpscr; | |
76 | static TCGv_i32 cpu_access_type; | |
77 | ||
78 | #include "gen-icount.h" | |
79 | ||
80 | void ppc_translate_init(void) | |
81 | { | |
82 | int i; | |
83 | char* p; | |
84 | static int done_init = 0; | |
85 | ||
86 | if (done_init) | |
87 | return; | |
88 | ||
89 | cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env"); | |
90 | ||
91 | p = cpu_reg_names; | |
92 | ||
93 | for (i = 0; i < 8; i++) { | |
94 | sprintf(p, "crf%d", i); | |
95 | cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0, | |
96 | offsetof(CPUState, crf[i]), p); | |
97 | p += 5; | |
98 | } | |
99 | ||
100 | for (i = 0; i < 32; i++) { | |
101 | sprintf(p, "r%d", i); | |
102 | cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0, | |
103 | offsetof(CPUState, gpr[i]), p); | |
104 | p += (i < 10) ? 3 : 4; | |
105 | #if !defined(TARGET_PPC64) | |
106 | sprintf(p, "r%dH", i); | |
107 | cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0, | |
108 | offsetof(CPUState, gprh[i]), p); | |
109 | p += (i < 10) ? 4 : 5; | |
110 | #endif | |
111 | ||
112 | sprintf(p, "fp%d", i); | |
113 | cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0, | |
114 | offsetof(CPUState, fpr[i]), p); | |
115 | p += (i < 10) ? 4 : 5; | |
116 | ||
117 | sprintf(p, "avr%dH", i); | |
118 | #ifdef WORDS_BIGENDIAN | |
119 | cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0, | |
120 | offsetof(CPUState, avr[i].u64[0]), p); | |
121 | #else | |
122 | cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0, | |
123 | offsetof(CPUState, avr[i].u64[1]), p); | |
124 | #endif | |
125 | p += (i < 10) ? 6 : 7; | |
126 | ||
127 | sprintf(p, "avr%dL", i); | |
128 | #ifdef WORDS_BIGENDIAN | |
129 | cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0, | |
130 | offsetof(CPUState, avr[i].u64[1]), p); | |
131 | #else | |
132 | cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0, | |
133 | offsetof(CPUState, avr[i].u64[0]), p); | |
134 | #endif | |
135 | p += (i < 10) ? 6 : 7; | |
136 | } | |
137 | ||
138 | cpu_nip = tcg_global_mem_new(TCG_AREG0, | |
139 | offsetof(CPUState, nip), "nip"); | |
140 | ||
141 | cpu_msr = tcg_global_mem_new(TCG_AREG0, | |
142 | offsetof(CPUState, msr), "msr"); | |
143 | ||
144 | cpu_ctr = tcg_global_mem_new(TCG_AREG0, | |
145 | offsetof(CPUState, ctr), "ctr"); | |
146 | ||
147 | cpu_lr = tcg_global_mem_new(TCG_AREG0, | |
148 | offsetof(CPUState, lr), "lr"); | |
149 | ||
150 | cpu_xer = tcg_global_mem_new(TCG_AREG0, | |
151 | offsetof(CPUState, xer), "xer"); | |
152 | ||
153 | cpu_reserve = tcg_global_mem_new(TCG_AREG0, | |
154 | offsetof(CPUState, reserve), "reserve"); | |
155 | ||
156 | cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0, | |
157 | offsetof(CPUState, fpscr), "fpscr"); | |
158 | ||
159 | cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0, | |
160 | offsetof(CPUState, access_type), "access_type"); | |
161 | ||
162 | /* register helpers */ | |
163 | #define GEN_HELPER 2 | |
164 | #include "helper.h" | |
165 | ||
166 | done_init = 1; | |
167 | } | |
168 | ||
169 | /* internal defines */ | |
170 | typedef struct DisasContext { | |
171 | struct TranslationBlock *tb; | |
172 | target_ulong nip; | |
173 | uint32_t opcode; | |
174 | uint32_t exception; | |
175 | /* Routine used to access memory */ | |
176 | int mem_idx; | |
177 | int access_type; | |
178 | /* Translation flags */ | |
179 | int le_mode; | |
180 | #if defined(TARGET_PPC64) | |
181 | int sf_mode; | |
182 | #endif | |
183 | int fpu_enabled; | |
184 | int altivec_enabled; | |
185 | int spe_enabled; | |
186 | ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ | |
187 | int singlestep_enabled; | |
188 | } DisasContext; | |
189 | ||
190 | struct opc_handler_t { | |
191 | /* invalid bits */ | |
192 | uint32_t inval; | |
193 | /* instruction type */ | |
194 | uint64_t type; | |
195 | /* handler */ | |
196 | void (*handler)(DisasContext *ctx); | |
197 | #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) | |
198 | const char *oname; | |
199 | #endif | |
200 | #if defined(DO_PPC_STATISTICS) | |
201 | uint64_t count; | |
202 | #endif | |
203 | }; | |
204 | ||
205 | static always_inline void gen_reset_fpstatus (void) | |
206 | { | |
207 | #ifdef CONFIG_SOFTFLOAT | |
208 | gen_helper_reset_fpstatus(); | |
209 | #endif | |
210 | } | |
211 | ||
212 | static always_inline void gen_compute_fprf (TCGv_i64 arg, int set_fprf, int set_rc) | |
213 | { | |
214 | TCGv_i32 t0 = tcg_temp_new_i32(); | |
215 | ||
216 | if (set_fprf != 0) { | |
217 | /* This case might be optimized later */ | |
218 | tcg_gen_movi_i32(t0, 1); | |
219 | gen_helper_compute_fprf(t0, arg, t0); | |
220 | if (unlikely(set_rc)) { | |
221 | tcg_gen_mov_i32(cpu_crf[1], t0); | |
222 | } | |
223 | gen_helper_float_check_status(); | |
224 | } else if (unlikely(set_rc)) { | |
225 | /* We always need to compute fpcc */ | |
226 | tcg_gen_movi_i32(t0, 0); | |
227 | gen_helper_compute_fprf(t0, arg, t0); | |
228 | tcg_gen_mov_i32(cpu_crf[1], t0); | |
229 | } | |
230 | ||
231 | tcg_temp_free_i32(t0); | |
232 | } | |
233 | ||
234 | static always_inline void gen_set_access_type (DisasContext *ctx, int access_type) | |
235 | { | |
236 | if (ctx->access_type != access_type) { | |
237 | tcg_gen_movi_i32(cpu_access_type, access_type); | |
238 | ctx->access_type = access_type; | |
239 | } | |
240 | } | |
241 | ||
242 | static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip) | |
243 | { | |
244 | #if defined(TARGET_PPC64) | |
245 | if (ctx->sf_mode) | |
246 | tcg_gen_movi_tl(cpu_nip, nip); | |
247 | else | |
248 | #endif | |
249 | tcg_gen_movi_tl(cpu_nip, (uint32_t)nip); | |
250 | } | |
251 | ||
252 | static always_inline void gen_exception_err (DisasContext *ctx, uint32_t excp, uint32_t error) | |
253 | { | |
254 | TCGv_i32 t0, t1; | |
255 | if (ctx->exception == POWERPC_EXCP_NONE) { | |
256 | gen_update_nip(ctx, ctx->nip); | |
257 | } | |
258 | t0 = tcg_const_i32(excp); | |
259 | t1 = tcg_const_i32(error); | |
260 | gen_helper_raise_exception_err(t0, t1); | |
261 | tcg_temp_free_i32(t0); | |
262 | tcg_temp_free_i32(t1); | |
263 | ctx->exception = (excp); | |
264 | } | |
265 | ||
266 | static always_inline void gen_exception (DisasContext *ctx, uint32_t excp) | |
267 | { | |
268 | TCGv_i32 t0; | |
269 | if (ctx->exception == POWERPC_EXCP_NONE) { | |
270 | gen_update_nip(ctx, ctx->nip); | |
271 | } | |
272 | t0 = tcg_const_i32(excp); | |
273 | gen_helper_raise_exception(t0); | |
274 | tcg_temp_free_i32(t0); | |
275 | ctx->exception = (excp); | |
276 | } | |
277 | ||
278 | static always_inline void gen_debug_exception (DisasContext *ctx) | |
279 | { | |
280 | TCGv_i32 t0; | |
281 | gen_update_nip(ctx, ctx->nip); | |
282 | t0 = tcg_const_i32(EXCP_DEBUG); | |
283 | gen_helper_raise_exception(t0); | |
284 | tcg_temp_free_i32(t0); | |
285 | } | |
286 | ||
287 | static always_inline void gen_inval_exception (DisasContext *ctx, uint32_t error) | |
288 | { | |
289 | gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error); | |
290 | } | |
291 | ||
292 | /* Stop translation */ | |
293 | static always_inline void gen_stop_exception (DisasContext *ctx) | |
294 | { | |
295 | gen_update_nip(ctx, ctx->nip); | |
296 | ctx->exception = POWERPC_EXCP_STOP; | |
297 | } | |
298 | ||
299 | /* No need to update nip here, as execution flow will change */ | |
300 | static always_inline void gen_sync_exception (DisasContext *ctx) | |
301 | { | |
302 | ctx->exception = POWERPC_EXCP_SYNC; | |
303 | } | |
304 | ||
305 | #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ | |
306 | static void gen_##name (DisasContext *ctx); \ | |
307 | GEN_OPCODE(name, opc1, opc2, opc3, inval, type); \ | |
308 | static void gen_##name (DisasContext *ctx) | |
309 | ||
310 | #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ | |
311 | static void gen_##name (DisasContext *ctx); \ | |
312 | GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type); \ | |
313 | static void gen_##name (DisasContext *ctx) | |
314 | ||
315 | typedef struct opcode_t { | |
316 | unsigned char opc1, opc2, opc3; | |
317 | #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ | |
318 | unsigned char pad[5]; | |
319 | #else | |
320 | unsigned char pad[1]; | |
321 | #endif | |
322 | opc_handler_t handler; | |
323 | const char *oname; | |
324 | } opcode_t; | |
325 | ||
326 | /*****************************************************************************/ | |
327 | /*** Instruction decoding ***/ | |
328 | #define EXTRACT_HELPER(name, shift, nb) \ | |
329 | static always_inline uint32_t name (uint32_t opcode) \ | |
330 | { \ | |
331 | return (opcode >> (shift)) & ((1 << (nb)) - 1); \ | |
332 | } | |
333 | ||
334 | #define EXTRACT_SHELPER(name, shift, nb) \ | |
335 | static always_inline int32_t name (uint32_t opcode) \ | |
336 | { \ | |
337 | return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \ | |
338 | } | |
339 | ||
340 | /* Opcode part 1 */ | |
341 | EXTRACT_HELPER(opc1, 26, 6); | |
342 | /* Opcode part 2 */ | |
343 | EXTRACT_HELPER(opc2, 1, 5); | |
344 | /* Opcode part 3 */ | |
345 | EXTRACT_HELPER(opc3, 6, 5); | |
346 | /* Update Cr0 flags */ | |
347 | EXTRACT_HELPER(Rc, 0, 1); | |
348 | /* Destination */ | |
349 | EXTRACT_HELPER(rD, 21, 5); | |
350 | /* Source */ | |
351 | EXTRACT_HELPER(rS, 21, 5); | |
352 | /* First operand */ | |
353 | EXTRACT_HELPER(rA, 16, 5); | |
354 | /* Second operand */ | |
355 | EXTRACT_HELPER(rB, 11, 5); | |
356 | /* Third operand */ | |
357 | EXTRACT_HELPER(rC, 6, 5); | |
358 | /*** Get CRn ***/ | |
359 | EXTRACT_HELPER(crfD, 23, 3); | |
360 | EXTRACT_HELPER(crfS, 18, 3); | |
361 | EXTRACT_HELPER(crbD, 21, 5); | |
362 | EXTRACT_HELPER(crbA, 16, 5); | |
363 | EXTRACT_HELPER(crbB, 11, 5); | |
364 | /* SPR / TBL */ | |
365 | EXTRACT_HELPER(_SPR, 11, 10); | |
366 | static always_inline uint32_t SPR (uint32_t opcode) | |
367 | { | |
368 | uint32_t sprn = _SPR(opcode); | |
369 | ||
370 | return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); | |
371 | } | |
372 | /*** Get constants ***/ | |
373 | EXTRACT_HELPER(IMM, 12, 8); | |
374 | /* 16 bits signed immediate value */ | |
375 | EXTRACT_SHELPER(SIMM, 0, 16); | |
376 | /* 16 bits unsigned immediate value */ | |
377 | EXTRACT_HELPER(UIMM, 0, 16); | |
378 | /* 5 bits signed immediate value */ | |
379 | EXTRACT_HELPER(SIMM5, 16, 5); | |
380 | /* 5 bits signed immediate value */ | |
381 | EXTRACT_HELPER(UIMM5, 16, 5); | |
382 | /* Bit count */ | |
383 | EXTRACT_HELPER(NB, 11, 5); | |
384 | /* Shift count */ | |
385 | EXTRACT_HELPER(SH, 11, 5); | |
386 | /* Vector shift count */ | |
387 | EXTRACT_HELPER(VSH, 6, 4); | |
388 | /* Mask start */ | |
389 | EXTRACT_HELPER(MB, 6, 5); | |
390 | /* Mask end */ | |
391 | EXTRACT_HELPER(ME, 1, 5); | |
392 | /* Trap operand */ | |
393 | EXTRACT_HELPER(TO, 21, 5); | |
394 | ||
395 | EXTRACT_HELPER(CRM, 12, 8); | |
396 | EXTRACT_HELPER(FM, 17, 8); | |
397 | EXTRACT_HELPER(SR, 16, 4); | |
398 | EXTRACT_HELPER(FPIMM, 12, 4); | |
399 | ||
400 | /*** Jump target decoding ***/ | |
401 | /* Displacement */ | |
402 | EXTRACT_SHELPER(d, 0, 16); | |
403 | /* Immediate address */ | |
404 | static always_inline target_ulong LI (uint32_t opcode) | |
405 | { | |
406 | return (opcode >> 0) & 0x03FFFFFC; | |
407 | } | |
408 | ||
409 | static always_inline uint32_t BD (uint32_t opcode) | |
410 | { | |
411 | return (opcode >> 0) & 0xFFFC; | |
412 | } | |
413 | ||
414 | EXTRACT_HELPER(BO, 21, 5); | |
415 | EXTRACT_HELPER(BI, 16, 5); | |
416 | /* Absolute/relative address */ | |
417 | EXTRACT_HELPER(AA, 1, 1); | |
418 | /* Link */ | |
419 | EXTRACT_HELPER(LK, 0, 1); | |
420 | ||
421 | /* Create a mask between <start> and <end> bits */ | |
422 | static always_inline target_ulong MASK (uint32_t start, uint32_t end) | |
423 | { | |
424 | target_ulong ret; | |
425 | ||
426 | #if defined(TARGET_PPC64) | |
427 | if (likely(start == 0)) { | |
428 | ret = UINT64_MAX << (63 - end); | |
429 | } else if (likely(end == 63)) { | |
430 | ret = UINT64_MAX >> start; | |
431 | } | |
432 | #else | |
433 | if (likely(start == 0)) { | |
434 | ret = UINT32_MAX << (31 - end); | |
435 | } else if (likely(end == 31)) { | |
436 | ret = UINT32_MAX >> start; | |
437 | } | |
438 | #endif | |
439 | else { | |
440 | ret = (((target_ulong)(-1ULL)) >> (start)) ^ | |
441 | (((target_ulong)(-1ULL) >> (end)) >> 1); | |
442 | if (unlikely(start > end)) | |
443 | return ~ret; | |
444 | } | |
445 | ||
446 | return ret; | |
447 | } | |
448 | ||
449 | /*****************************************************************************/ | |
450 | /* PowerPC Instructions types definitions */ | |
451 | enum { | |
452 | PPC_NONE = 0x0000000000000000ULL, | |
453 | /* PowerPC base instructions set */ | |
454 | PPC_INSNS_BASE = 0x0000000000000001ULL, | |
455 | /* integer operations instructions */ | |
456 | #define PPC_INTEGER PPC_INSNS_BASE | |
457 | /* flow control instructions */ | |
458 | #define PPC_FLOW PPC_INSNS_BASE | |
459 | /* virtual memory instructions */ | |
460 | #define PPC_MEM PPC_INSNS_BASE | |
461 | /* ld/st with reservation instructions */ | |
462 | #define PPC_RES PPC_INSNS_BASE | |
463 | /* spr/msr access instructions */ | |
464 | #define PPC_MISC PPC_INSNS_BASE | |
465 | /* Deprecated instruction sets */ | |
466 | /* Original POWER instruction set */ | |
467 | PPC_POWER = 0x0000000000000002ULL, | |
468 | /* POWER2 instruction set extension */ | |
469 | PPC_POWER2 = 0x0000000000000004ULL, | |
470 | /* Power RTC support */ | |
471 | PPC_POWER_RTC = 0x0000000000000008ULL, | |
472 | /* Power-to-PowerPC bridge (601) */ | |
473 | PPC_POWER_BR = 0x0000000000000010ULL, | |
474 | /* 64 bits PowerPC instruction set */ | |
475 | PPC_64B = 0x0000000000000020ULL, | |
476 | /* New 64 bits extensions (PowerPC 2.0x) */ | |
477 | PPC_64BX = 0x0000000000000040ULL, | |
478 | /* 64 bits hypervisor extensions */ | |
479 | PPC_64H = 0x0000000000000080ULL, | |
480 | /* New wait instruction (PowerPC 2.0x) */ | |
481 | PPC_WAIT = 0x0000000000000100ULL, | |
482 | /* Time base mftb instruction */ | |
483 | PPC_MFTB = 0x0000000000000200ULL, | |
484 | ||
485 | /* Fixed-point unit extensions */ | |
486 | /* PowerPC 602 specific */ | |
487 | PPC_602_SPEC = 0x0000000000000400ULL, | |
488 | /* isel instruction */ | |
489 | PPC_ISEL = 0x0000000000000800ULL, | |
490 | /* popcntb instruction */ | |
491 | PPC_POPCNTB = 0x0000000000001000ULL, | |
492 | /* string load / store */ | |
493 | PPC_STRING = 0x0000000000002000ULL, | |
494 | ||
495 | /* Floating-point unit extensions */ | |
496 | /* Optional floating point instructions */ | |
497 | PPC_FLOAT = 0x0000000000010000ULL, | |
498 | /* New floating-point extensions (PowerPC 2.0x) */ | |
499 | PPC_FLOAT_EXT = 0x0000000000020000ULL, | |
500 | PPC_FLOAT_FSQRT = 0x0000000000040000ULL, | |
501 | PPC_FLOAT_FRES = 0x0000000000080000ULL, | |
502 | PPC_FLOAT_FRSQRTE = 0x0000000000100000ULL, | |
503 | PPC_FLOAT_FRSQRTES = 0x0000000000200000ULL, | |
504 | PPC_FLOAT_FSEL = 0x0000000000400000ULL, | |
505 | PPC_FLOAT_STFIWX = 0x0000000000800000ULL, | |
506 | ||
507 | /* Vector/SIMD extensions */ | |
508 | /* Altivec support */ | |
509 | PPC_ALTIVEC = 0x0000000001000000ULL, | |
510 | /* PowerPC 2.03 SPE extension */ | |
511 | PPC_SPE = 0x0000000002000000ULL, | |
512 | /* PowerPC 2.03 SPE floating-point extension */ | |
513 | PPC_SPEFPU = 0x0000000004000000ULL, | |
514 | ||
515 | /* Optional memory control instructions */ | |
516 | PPC_MEM_TLBIA = 0x0000000010000000ULL, | |
517 | PPC_MEM_TLBIE = 0x0000000020000000ULL, | |
518 | PPC_MEM_TLBSYNC = 0x0000000040000000ULL, | |
519 | /* sync instruction */ | |
520 | PPC_MEM_SYNC = 0x0000000080000000ULL, | |
521 | /* eieio instruction */ | |
522 | PPC_MEM_EIEIO = 0x0000000100000000ULL, | |
523 | ||
524 | /* Cache control instructions */ | |
525 | PPC_CACHE = 0x0000000200000000ULL, | |
526 | /* icbi instruction */ | |
527 | PPC_CACHE_ICBI = 0x0000000400000000ULL, | |
528 | /* dcbz instruction with fixed cache line size */ | |
529 | PPC_CACHE_DCBZ = 0x0000000800000000ULL, | |
530 | /* dcbz instruction with tunable cache line size */ | |
531 | PPC_CACHE_DCBZT = 0x0000001000000000ULL, | |
532 | /* dcba instruction */ | |
533 | PPC_CACHE_DCBA = 0x0000002000000000ULL, | |
534 | /* Freescale cache locking instructions */ | |
535 | PPC_CACHE_LOCK = 0x0000004000000000ULL, | |
536 | ||
537 | /* MMU related extensions */ | |
538 | /* external control instructions */ | |
539 | PPC_EXTERN = 0x0000010000000000ULL, | |
540 | /* segment register access instructions */ | |
541 | PPC_SEGMENT = 0x0000020000000000ULL, | |
542 | /* PowerPC 6xx TLB management instructions */ | |
543 | PPC_6xx_TLB = 0x0000040000000000ULL, | |
544 | /* PowerPC 74xx TLB management instructions */ | |
545 | PPC_74xx_TLB = 0x0000080000000000ULL, | |
546 | /* PowerPC 40x TLB management instructions */ | |
547 | PPC_40x_TLB = 0x0000100000000000ULL, | |
548 | /* segment register access instructions for PowerPC 64 "bridge" */ | |
549 | PPC_SEGMENT_64B = 0x0000200000000000ULL, | |
550 | /* SLB management */ | |
551 | PPC_SLBI = 0x0000400000000000ULL, | |
552 | ||
553 | /* Embedded PowerPC dedicated instructions */ | |
554 | PPC_WRTEE = 0x0001000000000000ULL, | |
555 | /* PowerPC 40x exception model */ | |
556 | PPC_40x_EXCP = 0x0002000000000000ULL, | |
557 | /* PowerPC 405 Mac instructions */ | |
558 | PPC_405_MAC = 0x0004000000000000ULL, | |
559 | /* PowerPC 440 specific instructions */ | |
560 | PPC_440_SPEC = 0x0008000000000000ULL, | |
561 | /* BookE (embedded) PowerPC specification */ | |
562 | PPC_BOOKE = 0x0010000000000000ULL, | |
563 | /* mfapidi instruction */ | |
564 | PPC_MFAPIDI = 0x0020000000000000ULL, | |
565 | /* tlbiva instruction */ | |
566 | PPC_TLBIVA = 0x0040000000000000ULL, | |
567 | /* tlbivax instruction */ | |
568 | PPC_TLBIVAX = 0x0080000000000000ULL, | |
569 | /* PowerPC 4xx dedicated instructions */ | |
570 | PPC_4xx_COMMON = 0x0100000000000000ULL, | |
571 | /* PowerPC 40x ibct instructions */ | |
572 | PPC_40x_ICBT = 0x0200000000000000ULL, | |
573 | /* rfmci is not implemented in all BookE PowerPC */ | |
574 | PPC_RFMCI = 0x0400000000000000ULL, | |
575 | /* rfdi instruction */ | |
576 | PPC_RFDI = 0x0800000000000000ULL, | |
577 | /* DCR accesses */ | |
578 | PPC_DCR = 0x1000000000000000ULL, | |
579 | /* DCR extended accesse */ | |
580 | PPC_DCRX = 0x2000000000000000ULL, | |
581 | /* user-mode DCR access, implemented in PowerPC 460 */ | |
582 | PPC_DCRUX = 0x4000000000000000ULL, | |
583 | }; | |
584 | ||
585 | /*****************************************************************************/ | |
586 | /* PowerPC instructions table */ | |
587 | #if HOST_LONG_BITS == 64 | |
588 | #define OPC_ALIGN 8 | |
589 | #else | |
590 | #define OPC_ALIGN 4 | |
591 | #endif | |
592 | #if defined(__APPLE__) | |
593 | #define OPCODES_SECTION \ | |
594 | __attribute__ ((section("__TEXT,__opcodes"), unused, aligned (OPC_ALIGN) )) | |
595 | #else | |
596 | #define OPCODES_SECTION \ | |
597 | __attribute__ ((section(".opcodes"), unused, aligned (OPC_ALIGN) )) | |
598 | #endif | |
599 | ||
600 | #if defined(DO_PPC_STATISTICS) | |
601 | #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \ | |
602 | OPCODES_SECTION opcode_t opc_##name = { \ | |
603 | .opc1 = op1, \ | |
604 | .opc2 = op2, \ | |
605 | .opc3 = op3, \ | |
606 | .pad = { 0, }, \ | |
607 | .handler = { \ | |
608 | .inval = invl, \ | |
609 | .type = _typ, \ | |
610 | .handler = &gen_##name, \ | |
611 | .oname = stringify(name), \ | |
612 | }, \ | |
613 | .oname = stringify(name), \ | |
614 | } | |
615 | #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \ | |
616 | OPCODES_SECTION opcode_t opc_##name = { \ | |
617 | .opc1 = op1, \ | |
618 | .opc2 = op2, \ | |
619 | .opc3 = op3, \ | |
620 | .pad = { 0, }, \ | |
621 | .handler = { \ | |
622 | .inval = invl, \ | |
623 | .type = _typ, \ | |
624 | .handler = &gen_##name, \ | |
625 | .oname = onam, \ | |
626 | }, \ | |
627 | .oname = onam, \ | |
628 | } | |
629 | #else | |
630 | #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \ | |
631 | OPCODES_SECTION opcode_t opc_##name = { \ | |
632 | .opc1 = op1, \ | |
633 | .opc2 = op2, \ | |
634 | .opc3 = op3, \ | |
635 | .pad = { 0, }, \ | |
636 | .handler = { \ | |
637 | .inval = invl, \ | |
638 | .type = _typ, \ | |
639 | .handler = &gen_##name, \ | |
640 | }, \ | |
641 | .oname = stringify(name), \ | |
642 | } | |
643 | #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \ | |
644 | OPCODES_SECTION opcode_t opc_##name = { \ | |
645 | .opc1 = op1, \ | |
646 | .opc2 = op2, \ | |
647 | .opc3 = op3, \ | |
648 | .pad = { 0, }, \ | |
649 | .handler = { \ | |
650 | .inval = invl, \ | |
651 | .type = _typ, \ | |
652 | .handler = &gen_##name, \ | |
653 | }, \ | |
654 | .oname = onam, \ | |
655 | } | |
656 | #endif | |
657 | ||
658 | #define GEN_OPCODE_MARK(name) \ | |
659 | OPCODES_SECTION opcode_t opc_##name = { \ | |
660 | .opc1 = 0xFF, \ | |
661 | .opc2 = 0xFF, \ | |
662 | .opc3 = 0xFF, \ | |
663 | .pad = { 0, }, \ | |
664 | .handler = { \ | |
665 | .inval = 0x00000000, \ | |
666 | .type = 0x00, \ | |
667 | .handler = NULL, \ | |
668 | }, \ | |
669 | .oname = stringify(name), \ | |
670 | } | |
671 | ||
672 | /* SPR load/store helpers */ | |
673 | static always_inline void gen_load_spr(TCGv t, int reg) | |
674 | { | |
675 | tcg_gen_ld_tl(t, cpu_env, offsetof(CPUState, spr[reg])); | |
676 | } | |
677 | ||
678 | static always_inline void gen_store_spr(int reg, TCGv t) | |
679 | { | |
680 | tcg_gen_st_tl(t, cpu_env, offsetof(CPUState, spr[reg])); | |
681 | } | |
682 | ||
683 | /* Start opcode list */ | |
684 | GEN_OPCODE_MARK(start); | |
685 | ||
686 | /* Invalid instruction */ | |
687 | GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE) | |
688 | { | |
689 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
690 | } | |
691 | ||
692 | static opc_handler_t invalid_handler = { | |
693 | .inval = 0xFFFFFFFF, | |
694 | .type = PPC_NONE, | |
695 | .handler = gen_invalid, | |
696 | }; | |
697 | ||
698 | /*** Integer comparison ***/ | |
699 | ||
700 | static always_inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) | |
701 | { | |
702 | int l1, l2, l3; | |
703 | ||
704 | tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer); | |
705 | tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO); | |
706 | tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1); | |
707 | ||
708 | l1 = gen_new_label(); | |
709 | l2 = gen_new_label(); | |
710 | l3 = gen_new_label(); | |
711 | if (s) { | |
712 | tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1); | |
713 | tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2); | |
714 | } else { | |
715 | tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1); | |
716 | tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2); | |
717 | } | |
718 | tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ); | |
719 | tcg_gen_br(l3); | |
720 | gen_set_label(l1); | |
721 | tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT); | |
722 | tcg_gen_br(l3); | |
723 | gen_set_label(l2); | |
724 | tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT); | |
725 | gen_set_label(l3); | |
726 | } | |
727 | ||
728 | static always_inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) | |
729 | { | |
730 | TCGv t0 = tcg_const_local_tl(arg1); | |
731 | gen_op_cmp(arg0, t0, s, crf); | |
732 | tcg_temp_free(t0); | |
733 | } | |
734 | ||
735 | #if defined(TARGET_PPC64) | |
736 | static always_inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) | |
737 | { | |
738 | TCGv t0, t1; | |
739 | t0 = tcg_temp_local_new(); | |
740 | t1 = tcg_temp_local_new(); | |
741 | if (s) { | |
742 | tcg_gen_ext32s_tl(t0, arg0); | |
743 | tcg_gen_ext32s_tl(t1, arg1); | |
744 | } else { | |
745 | tcg_gen_ext32u_tl(t0, arg0); | |
746 | tcg_gen_ext32u_tl(t1, arg1); | |
747 | } | |
748 | gen_op_cmp(t0, t1, s, crf); | |
749 | tcg_temp_free(t1); | |
750 | tcg_temp_free(t0); | |
751 | } | |
752 | ||
753 | static always_inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) | |
754 | { | |
755 | TCGv t0 = tcg_const_local_tl(arg1); | |
756 | gen_op_cmp32(arg0, t0, s, crf); | |
757 | tcg_temp_free(t0); | |
758 | } | |
759 | #endif | |
760 | ||
761 | static always_inline void gen_set_Rc0 (DisasContext *ctx, TCGv reg) | |
762 | { | |
763 | #if defined(TARGET_PPC64) | |
764 | if (!(ctx->sf_mode)) | |
765 | gen_op_cmpi32(reg, 0, 1, 0); | |
766 | else | |
767 | #endif | |
768 | gen_op_cmpi(reg, 0, 1, 0); | |
769 | } | |
770 | ||
771 | /* cmp */ | |
772 | GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER) | |
773 | { | |
774 | #if defined(TARGET_PPC64) | |
775 | if (!(ctx->sf_mode && (ctx->opcode & 0x00200000))) | |
776 | gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], | |
777 | 1, crfD(ctx->opcode)); | |
778 | else | |
779 | #endif | |
780 | gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], | |
781 | 1, crfD(ctx->opcode)); | |
782 | } | |
783 | ||
784 | /* cmpi */ | |
785 | GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER) | |
786 | { | |
787 | #if defined(TARGET_PPC64) | |
788 | if (!(ctx->sf_mode && (ctx->opcode & 0x00200000))) | |
789 | gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), | |
790 | 1, crfD(ctx->opcode)); | |
791 | else | |
792 | #endif | |
793 | gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), | |
794 | 1, crfD(ctx->opcode)); | |
795 | } | |
796 | ||
797 | /* cmpl */ | |
798 | GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER) | |
799 | { | |
800 | #if defined(TARGET_PPC64) | |
801 | if (!(ctx->sf_mode && (ctx->opcode & 0x00200000))) | |
802 | gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], | |
803 | 0, crfD(ctx->opcode)); | |
804 | else | |
805 | #endif | |
806 | gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], | |
807 | 0, crfD(ctx->opcode)); | |
808 | } | |
809 | ||
810 | /* cmpli */ | |
811 | GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER) | |
812 | { | |
813 | #if defined(TARGET_PPC64) | |
814 | if (!(ctx->sf_mode && (ctx->opcode & 0x00200000))) | |
815 | gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), | |
816 | 0, crfD(ctx->opcode)); | |
817 | else | |
818 | #endif | |
819 | gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), | |
820 | 0, crfD(ctx->opcode)); | |
821 | } | |
822 | ||
823 | /* isel (PowerPC 2.03 specification) */ | |
824 | GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL) | |
825 | { | |
826 | int l1, l2; | |
827 | uint32_t bi = rC(ctx->opcode); | |
828 | uint32_t mask; | |
829 | TCGv_i32 t0; | |
830 | ||
831 | l1 = gen_new_label(); | |
832 | l2 = gen_new_label(); | |
833 | ||
834 | mask = 1 << (3 - (bi & 0x03)); | |
835 | t0 = tcg_temp_new_i32(); | |
836 | tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask); | |
837 | tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1); | |
838 | if (rA(ctx->opcode) == 0) | |
839 | tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); | |
840 | else | |
841 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
842 | tcg_gen_br(l2); | |
843 | gen_set_label(l1); | |
844 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
845 | gen_set_label(l2); | |
846 | tcg_temp_free_i32(t0); | |
847 | } | |
848 | ||
849 | /*** Integer arithmetic ***/ | |
850 | ||
851 | static always_inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, TCGv arg1, TCGv arg2, int sub) | |
852 | { | |
853 | int l1; | |
854 | TCGv t0; | |
855 | ||
856 | l1 = gen_new_label(); | |
857 | /* Start with XER OV disabled, the most likely case */ | |
858 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
859 | t0 = tcg_temp_local_new(); | |
860 | tcg_gen_xor_tl(t0, arg0, arg1); | |
861 | #if defined(TARGET_PPC64) | |
862 | if (!ctx->sf_mode) | |
863 | tcg_gen_ext32s_tl(t0, t0); | |
864 | #endif | |
865 | if (sub) | |
866 | tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1); | |
867 | else | |
868 | tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1); | |
869 | tcg_gen_xor_tl(t0, arg1, arg2); | |
870 | #if defined(TARGET_PPC64) | |
871 | if (!ctx->sf_mode) | |
872 | tcg_gen_ext32s_tl(t0, t0); | |
873 | #endif | |
874 | if (sub) | |
875 | tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1); | |
876 | else | |
877 | tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1); | |
878 | tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); | |
879 | gen_set_label(l1); | |
880 | tcg_temp_free(t0); | |
881 | } | |
882 | ||
883 | static always_inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1, TCGv arg2, int sub) | |
884 | { | |
885 | int l1 = gen_new_label(); | |
886 | ||
887 | #if defined(TARGET_PPC64) | |
888 | if (!(ctx->sf_mode)) { | |
889 | TCGv t0, t1; | |
890 | t0 = tcg_temp_new(); | |
891 | t1 = tcg_temp_new(); | |
892 | ||
893 | tcg_gen_ext32u_tl(t0, arg1); | |
894 | tcg_gen_ext32u_tl(t1, arg2); | |
895 | if (sub) { | |
896 | tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1); | |
897 | } else { | |
898 | tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); | |
899 | } | |
900 | tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA); | |
901 | gen_set_label(l1); | |
902 | tcg_temp_free(t0); | |
903 | tcg_temp_free(t1); | |
904 | } else | |
905 | #endif | |
906 | { | |
907 | if (sub) { | |
908 | tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1); | |
909 | } else { | |
910 | tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1); | |
911 | } | |
912 | tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA); | |
913 | gen_set_label(l1); | |
914 | } | |
915 | } | |
916 | ||
917 | /* Common add function */ | |
918 | static always_inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2, | |
919 | int add_ca, int compute_ca, int compute_ov) | |
920 | { | |
921 | TCGv t0, t1; | |
922 | ||
923 | if ((!compute_ca && !compute_ov) || | |
924 | (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) { | |
925 | t0 = ret; | |
926 | } else { | |
927 | t0 = tcg_temp_local_new(); | |
928 | } | |
929 | ||
930 | if (add_ca) { | |
931 | t1 = tcg_temp_local_new(); | |
932 | tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA)); | |
933 | tcg_gen_shri_tl(t1, t1, XER_CA); | |
934 | } | |
935 | ||
936 | if (compute_ca && compute_ov) { | |
937 | /* Start with XER CA and OV disabled, the most likely case */ | |
938 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV))); | |
939 | } else if (compute_ca) { | |
940 | /* Start with XER CA disabled, the most likely case */ | |
941 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); | |
942 | } else if (compute_ov) { | |
943 | /* Start with XER OV disabled, the most likely case */ | |
944 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
945 | } | |
946 | ||
947 | tcg_gen_add_tl(t0, arg1, arg2); | |
948 | ||
949 | if (compute_ca) { | |
950 | gen_op_arith_compute_ca(ctx, t0, arg1, 0); | |
951 | } | |
952 | if (add_ca) { | |
953 | tcg_gen_add_tl(t0, t0, t1); | |
954 | gen_op_arith_compute_ca(ctx, t0, t1, 0); | |
955 | tcg_temp_free(t1); | |
956 | } | |
957 | if (compute_ov) { | |
958 | gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); | |
959 | } | |
960 | ||
961 | if (unlikely(Rc(ctx->opcode) != 0)) | |
962 | gen_set_Rc0(ctx, t0); | |
963 | ||
964 | if (!TCGV_EQUAL(t0, ret)) { | |
965 | tcg_gen_mov_tl(ret, t0); | |
966 | tcg_temp_free(t0); | |
967 | } | |
968 | } | |
969 | /* Add functions with two operands */ | |
970 | #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ | |
971 | GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER) \ | |
972 | { \ | |
973 | gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ | |
974 | cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ | |
975 | add_ca, compute_ca, compute_ov); \ | |
976 | } | |
977 | /* Add functions with one operand and one immediate */ | |
978 | #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ | |
979 | add_ca, compute_ca, compute_ov) \ | |
980 | GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER) \ | |
981 | { \ | |
982 | TCGv t0 = tcg_const_local_tl(const_val); \ | |
983 | gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ | |
984 | cpu_gpr[rA(ctx->opcode)], t0, \ | |
985 | add_ca, compute_ca, compute_ov); \ | |
986 | tcg_temp_free(t0); \ | |
987 | } | |
988 | ||
989 | /* add add. addo addo. */ | |
990 | GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) | |
991 | GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) | |
992 | /* addc addc. addco addco. */ | |
993 | GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) | |
994 | GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) | |
995 | /* adde adde. addeo addeo. */ | |
996 | GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) | |
997 | GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) | |
998 | /* addme addme. addmeo addmeo. */ | |
999 | GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) | |
1000 | GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) | |
1001 | /* addze addze. addzeo addzeo.*/ | |
1002 | GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) | |
1003 | GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) | |
1004 | /* addi */ | |
1005 | GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1006 | { | |
1007 | target_long simm = SIMM(ctx->opcode); | |
1008 | ||
1009 | if (rA(ctx->opcode) == 0) { | |
1010 | /* li case */ | |
1011 | tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); | |
1012 | } else { | |
1013 | tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm); | |
1014 | } | |
1015 | } | |
1016 | /* addic addic.*/ | |
1017 | static always_inline void gen_op_addic (DisasContext *ctx, TCGv ret, TCGv arg1, | |
1018 | int compute_Rc0) | |
1019 | { | |
1020 | target_long simm = SIMM(ctx->opcode); | |
1021 | ||
1022 | /* Start with XER CA and OV disabled, the most likely case */ | |
1023 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); | |
1024 | ||
1025 | if (likely(simm != 0)) { | |
1026 | TCGv t0 = tcg_temp_local_new(); | |
1027 | tcg_gen_addi_tl(t0, arg1, simm); | |
1028 | gen_op_arith_compute_ca(ctx, t0, arg1, 0); | |
1029 | tcg_gen_mov_tl(ret, t0); | |
1030 | tcg_temp_free(t0); | |
1031 | } else { | |
1032 | tcg_gen_mov_tl(ret, arg1); | |
1033 | } | |
1034 | if (compute_Rc0) { | |
1035 | gen_set_Rc0(ctx, ret); | |
1036 | } | |
1037 | } | |
1038 | GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1039 | { | |
1040 | gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0); | |
1041 | } | |
1042 | GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1043 | { | |
1044 | gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1); | |
1045 | } | |
1046 | /* addis */ | |
1047 | GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1048 | { | |
1049 | target_long simm = SIMM(ctx->opcode); | |
1050 | ||
1051 | if (rA(ctx->opcode) == 0) { | |
1052 | /* lis case */ | |
1053 | tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); | |
1054 | } else { | |
1055 | tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16); | |
1056 | } | |
1057 | } | |
1058 | ||
1059 | static always_inline void gen_op_arith_divw (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2, | |
1060 | int sign, int compute_ov) | |
1061 | { | |
1062 | int l1 = gen_new_label(); | |
1063 | int l2 = gen_new_label(); | |
1064 | TCGv_i32 t0 = tcg_temp_local_new_i32(); | |
1065 | TCGv_i32 t1 = tcg_temp_local_new_i32(); | |
1066 | ||
1067 | tcg_gen_trunc_tl_i32(t0, arg1); | |
1068 | tcg_gen_trunc_tl_i32(t1, arg2); | |
1069 | tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1); | |
1070 | if (sign) { | |
1071 | int l3 = gen_new_label(); | |
1072 | tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3); | |
1073 | tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1); | |
1074 | gen_set_label(l3); | |
1075 | tcg_gen_div_i32(t0, t0, t1); | |
1076 | } else { | |
1077 | tcg_gen_divu_i32(t0, t0, t1); | |
1078 | } | |
1079 | if (compute_ov) { | |
1080 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
1081 | } | |
1082 | tcg_gen_br(l2); | |
1083 | gen_set_label(l1); | |
1084 | if (sign) { | |
1085 | tcg_gen_sari_i32(t0, t0, 31); | |
1086 | } else { | |
1087 | tcg_gen_movi_i32(t0, 0); | |
1088 | } | |
1089 | if (compute_ov) { | |
1090 | tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); | |
1091 | } | |
1092 | gen_set_label(l2); | |
1093 | tcg_gen_extu_i32_tl(ret, t0); | |
1094 | tcg_temp_free_i32(t0); | |
1095 | tcg_temp_free_i32(t1); | |
1096 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1097 | gen_set_Rc0(ctx, ret); | |
1098 | } | |
1099 | /* Div functions */ | |
1100 | #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ | |
1101 | GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) \ | |
1102 | { \ | |
1103 | gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ | |
1104 | cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ | |
1105 | sign, compute_ov); \ | |
1106 | } | |
1107 | /* divwu divwu. divwuo divwuo. */ | |
1108 | GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); | |
1109 | GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); | |
1110 | /* divw divw. divwo divwo. */ | |
1111 | GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); | |
1112 | GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); | |
1113 | #if defined(TARGET_PPC64) | |
1114 | static always_inline void gen_op_arith_divd (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2, | |
1115 | int sign, int compute_ov) | |
1116 | { | |
1117 | int l1 = gen_new_label(); | |
1118 | int l2 = gen_new_label(); | |
1119 | ||
1120 | tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1); | |
1121 | if (sign) { | |
1122 | int l3 = gen_new_label(); | |
1123 | tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3); | |
1124 | tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1); | |
1125 | gen_set_label(l3); | |
1126 | tcg_gen_div_i64(ret, arg1, arg2); | |
1127 | } else { | |
1128 | tcg_gen_divu_i64(ret, arg1, arg2); | |
1129 | } | |
1130 | if (compute_ov) { | |
1131 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
1132 | } | |
1133 | tcg_gen_br(l2); | |
1134 | gen_set_label(l1); | |
1135 | if (sign) { | |
1136 | tcg_gen_sari_i64(ret, arg1, 63); | |
1137 | } else { | |
1138 | tcg_gen_movi_i64(ret, 0); | |
1139 | } | |
1140 | if (compute_ov) { | |
1141 | tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); | |
1142 | } | |
1143 | gen_set_label(l2); | |
1144 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1145 | gen_set_Rc0(ctx, ret); | |
1146 | } | |
1147 | #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ | |
1148 | GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \ | |
1149 | { \ | |
1150 | gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ | |
1151 | cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ | |
1152 | sign, compute_ov); \ | |
1153 | } | |
1154 | /* divwu divwu. divwuo divwuo. */ | |
1155 | GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); | |
1156 | GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); | |
1157 | /* divw divw. divwo divwo. */ | |
1158 | GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); | |
1159 | GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); | |
1160 | #endif | |
1161 | ||
1162 | /* mulhw mulhw. */ | |
1163 | GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER) | |
1164 | { | |
1165 | TCGv_i64 t0, t1; | |
1166 | ||
1167 | t0 = tcg_temp_new_i64(); | |
1168 | t1 = tcg_temp_new_i64(); | |
1169 | #if defined(TARGET_PPC64) | |
1170 | tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); | |
1171 | tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); | |
1172 | tcg_gen_mul_i64(t0, t0, t1); | |
1173 | tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32); | |
1174 | #else | |
1175 | tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); | |
1176 | tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); | |
1177 | tcg_gen_mul_i64(t0, t0, t1); | |
1178 | tcg_gen_shri_i64(t0, t0, 32); | |
1179 | tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
1180 | #endif | |
1181 | tcg_temp_free_i64(t0); | |
1182 | tcg_temp_free_i64(t1); | |
1183 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1184 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
1185 | } | |
1186 | /* mulhwu mulhwu. */ | |
1187 | GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER) | |
1188 | { | |
1189 | TCGv_i64 t0, t1; | |
1190 | ||
1191 | t0 = tcg_temp_new_i64(); | |
1192 | t1 = tcg_temp_new_i64(); | |
1193 | #if defined(TARGET_PPC64) | |
1194 | tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]); | |
1195 | tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]); | |
1196 | tcg_gen_mul_i64(t0, t0, t1); | |
1197 | tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32); | |
1198 | #else | |
1199 | tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); | |
1200 | tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); | |
1201 | tcg_gen_mul_i64(t0, t0, t1); | |
1202 | tcg_gen_shri_i64(t0, t0, 32); | |
1203 | tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
1204 | #endif | |
1205 | tcg_temp_free_i64(t0); | |
1206 | tcg_temp_free_i64(t1); | |
1207 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1208 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
1209 | } | |
1210 | /* mullw mullw. */ | |
1211 | GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER) | |
1212 | { | |
1213 | tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], | |
1214 | cpu_gpr[rB(ctx->opcode)]); | |
1215 | tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]); | |
1216 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1217 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
1218 | } | |
1219 | /* mullwo mullwo. */ | |
1220 | GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER) | |
1221 | { | |
1222 | int l1; | |
1223 | TCGv_i64 t0, t1; | |
1224 | ||
1225 | t0 = tcg_temp_new_i64(); | |
1226 | t1 = tcg_temp_new_i64(); | |
1227 | l1 = gen_new_label(); | |
1228 | /* Start with XER OV disabled, the most likely case */ | |
1229 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
1230 | #if defined(TARGET_PPC64) | |
1231 | tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]); | |
1232 | tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]); | |
1233 | #else | |
1234 | tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); | |
1235 | tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); | |
1236 | #endif | |
1237 | tcg_gen_mul_i64(t0, t0, t1); | |
1238 | #if defined(TARGET_PPC64) | |
1239 | tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0); | |
1240 | tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1); | |
1241 | #else | |
1242 | tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
1243 | tcg_gen_ext32s_i64(t1, t0); | |
1244 | tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); | |
1245 | #endif | |
1246 | tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); | |
1247 | gen_set_label(l1); | |
1248 | tcg_temp_free_i64(t0); | |
1249 | tcg_temp_free_i64(t1); | |
1250 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1251 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
1252 | } | |
1253 | /* mulli */ | |
1254 | GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1255 | { | |
1256 | tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], | |
1257 | SIMM(ctx->opcode)); | |
1258 | } | |
1259 | #if defined(TARGET_PPC64) | |
1260 | #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ | |
1261 | GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \ | |
1262 | { \ | |
1263 | gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \ | |
1264 | cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ | |
1265 | if (unlikely(Rc(ctx->opcode) != 0)) \ | |
1266 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ | |
1267 | } | |
1268 | /* mulhd mulhd. */ | |
1269 | GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00); | |
1270 | /* mulhdu mulhdu. */ | |
1271 | GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02); | |
1272 | /* mulld mulld. */ | |
1273 | GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B) | |
1274 | { | |
1275 | tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], | |
1276 | cpu_gpr[rB(ctx->opcode)]); | |
1277 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1278 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
1279 | } | |
1280 | /* mulldo mulldo. */ | |
1281 | GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17); | |
1282 | #endif | |
1283 | ||
1284 | /* neg neg. nego nego. */ | |
1285 | static always_inline void gen_op_arith_neg (DisasContext *ctx, TCGv ret, TCGv arg1, int ov_check) | |
1286 | { | |
1287 | int l1 = gen_new_label(); | |
1288 | int l2 = gen_new_label(); | |
1289 | TCGv t0 = tcg_temp_local_new(); | |
1290 | #if defined(TARGET_PPC64) | |
1291 | if (ctx->sf_mode) { | |
1292 | tcg_gen_mov_tl(t0, arg1); | |
1293 | tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1); | |
1294 | } else | |
1295 | #endif | |
1296 | { | |
1297 | tcg_gen_ext32s_tl(t0, arg1); | |
1298 | tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1); | |
1299 | } | |
1300 | tcg_gen_neg_tl(ret, arg1); | |
1301 | if (ov_check) { | |
1302 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
1303 | } | |
1304 | tcg_gen_br(l2); | |
1305 | gen_set_label(l1); | |
1306 | tcg_gen_mov_tl(ret, t0); | |
1307 | if (ov_check) { | |
1308 | tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); | |
1309 | } | |
1310 | gen_set_label(l2); | |
1311 | tcg_temp_free(t0); | |
1312 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1313 | gen_set_Rc0(ctx, ret); | |
1314 | } | |
1315 | GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER) | |
1316 | { | |
1317 | gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0); | |
1318 | } | |
1319 | GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER) | |
1320 | { | |
1321 | gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1); | |
1322 | } | |
1323 | ||
1324 | /* Common subf function */ | |
1325 | static always_inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2, | |
1326 | int add_ca, int compute_ca, int compute_ov) | |
1327 | { | |
1328 | TCGv t0, t1; | |
1329 | ||
1330 | if ((!compute_ca && !compute_ov) || | |
1331 | (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) { | |
1332 | t0 = ret; | |
1333 | } else { | |
1334 | t0 = tcg_temp_local_new(); | |
1335 | } | |
1336 | ||
1337 | if (add_ca) { | |
1338 | t1 = tcg_temp_local_new(); | |
1339 | tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA)); | |
1340 | tcg_gen_shri_tl(t1, t1, XER_CA); | |
1341 | } | |
1342 | ||
1343 | if (compute_ca && compute_ov) { | |
1344 | /* Start with XER CA and OV disabled, the most likely case */ | |
1345 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV))); | |
1346 | } else if (compute_ca) { | |
1347 | /* Start with XER CA disabled, the most likely case */ | |
1348 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); | |
1349 | } else if (compute_ov) { | |
1350 | /* Start with XER OV disabled, the most likely case */ | |
1351 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
1352 | } | |
1353 | ||
1354 | if (add_ca) { | |
1355 | tcg_gen_not_tl(t0, arg1); | |
1356 | tcg_gen_add_tl(t0, t0, arg2); | |
1357 | gen_op_arith_compute_ca(ctx, t0, arg2, 0); | |
1358 | tcg_gen_add_tl(t0, t0, t1); | |
1359 | gen_op_arith_compute_ca(ctx, t0, t1, 0); | |
1360 | tcg_temp_free(t1); | |
1361 | } else { | |
1362 | tcg_gen_sub_tl(t0, arg2, arg1); | |
1363 | if (compute_ca) { | |
1364 | gen_op_arith_compute_ca(ctx, t0, arg2, 1); | |
1365 | } | |
1366 | } | |
1367 | if (compute_ov) { | |
1368 | gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); | |
1369 | } | |
1370 | ||
1371 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1372 | gen_set_Rc0(ctx, t0); | |
1373 | ||
1374 | if (!TCGV_EQUAL(t0, ret)) { | |
1375 | tcg_gen_mov_tl(ret, t0); | |
1376 | tcg_temp_free(t0); | |
1377 | } | |
1378 | } | |
1379 | /* Sub functions with Two operands functions */ | |
1380 | #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ | |
1381 | GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER) \ | |
1382 | { \ | |
1383 | gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ | |
1384 | cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ | |
1385 | add_ca, compute_ca, compute_ov); \ | |
1386 | } | |
1387 | /* Sub functions with one operand and one immediate */ | |
1388 | #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ | |
1389 | add_ca, compute_ca, compute_ov) \ | |
1390 | GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER) \ | |
1391 | { \ | |
1392 | TCGv t0 = tcg_const_local_tl(const_val); \ | |
1393 | gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ | |
1394 | cpu_gpr[rA(ctx->opcode)], t0, \ | |
1395 | add_ca, compute_ca, compute_ov); \ | |
1396 | tcg_temp_free(t0); \ | |
1397 | } | |
1398 | /* subf subf. subfo subfo. */ | |
1399 | GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) | |
1400 | GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) | |
1401 | /* subfc subfc. subfco subfco. */ | |
1402 | GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) | |
1403 | GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) | |
1404 | /* subfe subfe. subfeo subfo. */ | |
1405 | GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) | |
1406 | GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) | |
1407 | /* subfme subfme. subfmeo subfmeo. */ | |
1408 | GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) | |
1409 | GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) | |
1410 | /* subfze subfze. subfzeo subfzeo.*/ | |
1411 | GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) | |
1412 | GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) | |
1413 | /* subfic */ | |
1414 | GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1415 | { | |
1416 | /* Start with XER CA and OV disabled, the most likely case */ | |
1417 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); | |
1418 | TCGv t0 = tcg_temp_local_new(); | |
1419 | TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode)); | |
1420 | tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]); | |
1421 | gen_op_arith_compute_ca(ctx, t0, t1, 1); | |
1422 | tcg_temp_free(t1); | |
1423 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
1424 | tcg_temp_free(t0); | |
1425 | } | |
1426 | ||
1427 | /*** Integer logical ***/ | |
1428 | #define GEN_LOGICAL2(name, tcg_op, opc, type) \ | |
1429 | GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) \ | |
1430 | { \ | |
1431 | tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ | |
1432 | cpu_gpr[rB(ctx->opcode)]); \ | |
1433 | if (unlikely(Rc(ctx->opcode) != 0)) \ | |
1434 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ | |
1435 | } | |
1436 | ||
1437 | #define GEN_LOGICAL1(name, tcg_op, opc, type) \ | |
1438 | GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) \ | |
1439 | { \ | |
1440 | tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ | |
1441 | if (unlikely(Rc(ctx->opcode) != 0)) \ | |
1442 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ | |
1443 | } | |
1444 | ||
1445 | /* and & and. */ | |
1446 | GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); | |
1447 | /* andc & andc. */ | |
1448 | GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); | |
1449 | /* andi. */ | |
1450 | GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1451 | { | |
1452 | tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); | |
1453 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1454 | } | |
1455 | /* andis. */ | |
1456 | GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1457 | { | |
1458 | tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); | |
1459 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1460 | } | |
1461 | /* cntlzw */ | |
1462 | GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER) | |
1463 | { | |
1464 | gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
1465 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1466 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1467 | } | |
1468 | /* eqv & eqv. */ | |
1469 | GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); | |
1470 | /* extsb & extsb. */ | |
1471 | GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); | |
1472 | /* extsh & extsh. */ | |
1473 | GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); | |
1474 | /* nand & nand. */ | |
1475 | GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); | |
1476 | /* nor & nor. */ | |
1477 | GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); | |
1478 | /* or & or. */ | |
1479 | GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER) | |
1480 | { | |
1481 | int rs, ra, rb; | |
1482 | ||
1483 | rs = rS(ctx->opcode); | |
1484 | ra = rA(ctx->opcode); | |
1485 | rb = rB(ctx->opcode); | |
1486 | /* Optimisation for mr. ri case */ | |
1487 | if (rs != ra || rs != rb) { | |
1488 | if (rs != rb) | |
1489 | tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); | |
1490 | else | |
1491 | tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); | |
1492 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1493 | gen_set_Rc0(ctx, cpu_gpr[ra]); | |
1494 | } else if (unlikely(Rc(ctx->opcode) != 0)) { | |
1495 | gen_set_Rc0(ctx, cpu_gpr[rs]); | |
1496 | #if defined(TARGET_PPC64) | |
1497 | } else { | |
1498 | int prio = 0; | |
1499 | ||
1500 | switch (rs) { | |
1501 | case 1: | |
1502 | /* Set process priority to low */ | |
1503 | prio = 2; | |
1504 | break; | |
1505 | case 6: | |
1506 | /* Set process priority to medium-low */ | |
1507 | prio = 3; | |
1508 | break; | |
1509 | case 2: | |
1510 | /* Set process priority to normal */ | |
1511 | prio = 4; | |
1512 | break; | |
1513 | #if !defined(CONFIG_USER_ONLY) | |
1514 | case 31: | |
1515 | if (ctx->mem_idx > 0) { | |
1516 | /* Set process priority to very low */ | |
1517 | prio = 1; | |
1518 | } | |
1519 | break; | |
1520 | case 5: | |
1521 | if (ctx->mem_idx > 0) { | |
1522 | /* Set process priority to medium-hight */ | |
1523 | prio = 5; | |
1524 | } | |
1525 | break; | |
1526 | case 3: | |
1527 | if (ctx->mem_idx > 0) { | |
1528 | /* Set process priority to high */ | |
1529 | prio = 6; | |
1530 | } | |
1531 | break; | |
1532 | case 7: | |
1533 | if (ctx->mem_idx > 1) { | |
1534 | /* Set process priority to very high */ | |
1535 | prio = 7; | |
1536 | } | |
1537 | break; | |
1538 | #endif | |
1539 | default: | |
1540 | /* nop */ | |
1541 | break; | |
1542 | } | |
1543 | if (prio) { | |
1544 | TCGv t0 = tcg_temp_new(); | |
1545 | gen_load_spr(t0, SPR_PPR); | |
1546 | tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); | |
1547 | tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); | |
1548 | gen_store_spr(SPR_PPR, t0); | |
1549 | tcg_temp_free(t0); | |
1550 | } | |
1551 | #endif | |
1552 | } | |
1553 | } | |
1554 | /* orc & orc. */ | |
1555 | GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); | |
1556 | /* xor & xor. */ | |
1557 | GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER) | |
1558 | { | |
1559 | /* Optimisation for "set to zero" case */ | |
1560 | if (rS(ctx->opcode) != rB(ctx->opcode)) | |
1561 | tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
1562 | else | |
1563 | tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); | |
1564 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1565 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1566 | } | |
1567 | /* ori */ | |
1568 | GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1569 | { | |
1570 | target_ulong uimm = UIMM(ctx->opcode); | |
1571 | ||
1572 | if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { | |
1573 | /* NOP */ | |
1574 | /* XXX: should handle special NOPs for POWER series */ | |
1575 | return; | |
1576 | } | |
1577 | tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); | |
1578 | } | |
1579 | /* oris */ | |
1580 | GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1581 | { | |
1582 | target_ulong uimm = UIMM(ctx->opcode); | |
1583 | ||
1584 | if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { | |
1585 | /* NOP */ | |
1586 | return; | |
1587 | } | |
1588 | tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); | |
1589 | } | |
1590 | /* xori */ | |
1591 | GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1592 | { | |
1593 | target_ulong uimm = UIMM(ctx->opcode); | |
1594 | ||
1595 | if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { | |
1596 | /* NOP */ | |
1597 | return; | |
1598 | } | |
1599 | tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); | |
1600 | } | |
1601 | /* xoris */ | |
1602 | GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1603 | { | |
1604 | target_ulong uimm = UIMM(ctx->opcode); | |
1605 | ||
1606 | if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { | |
1607 | /* NOP */ | |
1608 | return; | |
1609 | } | |
1610 | tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); | |
1611 | } | |
1612 | /* popcntb : PowerPC 2.03 specification */ | |
1613 | GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB) | |
1614 | { | |
1615 | #if defined(TARGET_PPC64) | |
1616 | if (ctx->sf_mode) | |
1617 | gen_helper_popcntb_64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
1618 | else | |
1619 | #endif | |
1620 | gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
1621 | } | |
1622 | ||
1623 | #if defined(TARGET_PPC64) | |
1624 | /* extsw & extsw. */ | |
1625 | GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); | |
1626 | /* cntlzd */ | |
1627 | GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B) | |
1628 | { | |
1629 | gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
1630 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1631 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1632 | } | |
1633 | #endif | |
1634 | ||
1635 | /*** Integer rotate ***/ | |
1636 | /* rlwimi & rlwimi. */ | |
1637 | GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1638 | { | |
1639 | uint32_t mb, me, sh; | |
1640 | ||
1641 | mb = MB(ctx->opcode); | |
1642 | me = ME(ctx->opcode); | |
1643 | sh = SH(ctx->opcode); | |
1644 | if (likely(sh == 0 && mb == 0 && me == 31)) { | |
1645 | tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
1646 | } else { | |
1647 | target_ulong mask; | |
1648 | TCGv t1; | |
1649 | TCGv t0 = tcg_temp_new(); | |
1650 | #if defined(TARGET_PPC64) | |
1651 | TCGv_i32 t2 = tcg_temp_new_i32(); | |
1652 | tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]); | |
1653 | tcg_gen_rotli_i32(t2, t2, sh); | |
1654 | tcg_gen_extu_i32_i64(t0, t2); | |
1655 | tcg_temp_free_i32(t2); | |
1656 | #else | |
1657 | tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh); | |
1658 | #endif | |
1659 | #if defined(TARGET_PPC64) | |
1660 | mb += 32; | |
1661 | me += 32; | |
1662 | #endif | |
1663 | mask = MASK(mb, me); | |
1664 | t1 = tcg_temp_new(); | |
1665 | tcg_gen_andi_tl(t0, t0, mask); | |
1666 | tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask); | |
1667 | tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
1668 | tcg_temp_free(t0); | |
1669 | tcg_temp_free(t1); | |
1670 | } | |
1671 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1672 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1673 | } | |
1674 | /* rlwinm & rlwinm. */ | |
1675 | GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1676 | { | |
1677 | uint32_t mb, me, sh; | |
1678 | ||
1679 | sh = SH(ctx->opcode); | |
1680 | mb = MB(ctx->opcode); | |
1681 | me = ME(ctx->opcode); | |
1682 | ||
1683 | if (likely(mb == 0 && me == (31 - sh))) { | |
1684 | if (likely(sh == 0)) { | |
1685 | tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
1686 | } else { | |
1687 | TCGv t0 = tcg_temp_new(); | |
1688 | tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]); | |
1689 | tcg_gen_shli_tl(t0, t0, sh); | |
1690 | tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0); | |
1691 | tcg_temp_free(t0); | |
1692 | } | |
1693 | } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) { | |
1694 | TCGv t0 = tcg_temp_new(); | |
1695 | tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]); | |
1696 | tcg_gen_shri_tl(t0, t0, mb); | |
1697 | tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0); | |
1698 | tcg_temp_free(t0); | |
1699 | } else { | |
1700 | TCGv t0 = tcg_temp_new(); | |
1701 | #if defined(TARGET_PPC64) | |
1702 | TCGv_i32 t1 = tcg_temp_new_i32(); | |
1703 | tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]); | |
1704 | tcg_gen_rotli_i32(t1, t1, sh); | |
1705 | tcg_gen_extu_i32_i64(t0, t1); | |
1706 | tcg_temp_free_i32(t1); | |
1707 | #else | |
1708 | tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh); | |
1709 | #endif | |
1710 | #if defined(TARGET_PPC64) | |
1711 | mb += 32; | |
1712 | me += 32; | |
1713 | #endif | |
1714 | tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me)); | |
1715 | tcg_temp_free(t0); | |
1716 | } | |
1717 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1718 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1719 | } | |
1720 | /* rlwnm & rlwnm. */ | |
1721 | GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
1722 | { | |
1723 | uint32_t mb, me; | |
1724 | TCGv t0; | |
1725 | #if defined(TARGET_PPC64) | |
1726 | TCGv_i32 t1, t2; | |
1727 | #endif | |
1728 | ||
1729 | mb = MB(ctx->opcode); | |
1730 | me = ME(ctx->opcode); | |
1731 | t0 = tcg_temp_new(); | |
1732 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f); | |
1733 | #if defined(TARGET_PPC64) | |
1734 | t1 = tcg_temp_new_i32(); | |
1735 | t2 = tcg_temp_new_i32(); | |
1736 | tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]); | |
1737 | tcg_gen_trunc_i64_i32(t2, t0); | |
1738 | tcg_gen_rotl_i32(t1, t1, t2); | |
1739 | tcg_gen_extu_i32_i64(t0, t1); | |
1740 | tcg_temp_free_i32(t1); | |
1741 | tcg_temp_free_i32(t2); | |
1742 | #else | |
1743 | tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0); | |
1744 | #endif | |
1745 | if (unlikely(mb != 0 || me != 31)) { | |
1746 | #if defined(TARGET_PPC64) | |
1747 | mb += 32; | |
1748 | me += 32; | |
1749 | #endif | |
1750 | tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me)); | |
1751 | } else { | |
1752 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); | |
1753 | } | |
1754 | tcg_temp_free(t0); | |
1755 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1756 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1757 | } | |
1758 | ||
1759 | #if defined(TARGET_PPC64) | |
1760 | #define GEN_PPC64_R2(name, opc1, opc2) \ | |
1761 | GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \ | |
1762 | { \ | |
1763 | gen_##name(ctx, 0); \ | |
1764 | } \ | |
1765 | GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ | |
1766 | PPC_64B) \ | |
1767 | { \ | |
1768 | gen_##name(ctx, 1); \ | |
1769 | } | |
1770 | #define GEN_PPC64_R4(name, opc1, opc2) \ | |
1771 | GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \ | |
1772 | { \ | |
1773 | gen_##name(ctx, 0, 0); \ | |
1774 | } \ | |
1775 | GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ | |
1776 | PPC_64B) \ | |
1777 | { \ | |
1778 | gen_##name(ctx, 0, 1); \ | |
1779 | } \ | |
1780 | GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ | |
1781 | PPC_64B) \ | |
1782 | { \ | |
1783 | gen_##name(ctx, 1, 0); \ | |
1784 | } \ | |
1785 | GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ | |
1786 | PPC_64B) \ | |
1787 | { \ | |
1788 | gen_##name(ctx, 1, 1); \ | |
1789 | } | |
1790 | ||
1791 | static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb, | |
1792 | uint32_t me, uint32_t sh) | |
1793 | { | |
1794 | if (likely(sh != 0 && mb == 0 && me == (63 - sh))) { | |
1795 | tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); | |
1796 | } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) { | |
1797 | tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb); | |
1798 | } else { | |
1799 | TCGv t0 = tcg_temp_new(); | |
1800 | tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); | |
1801 | if (likely(mb == 0 && me == 63)) { | |
1802 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); | |
1803 | } else { | |
1804 | tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me)); | |
1805 | } | |
1806 | tcg_temp_free(t0); | |
1807 | } | |
1808 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1809 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1810 | } | |
1811 | /* rldicl - rldicl. */ | |
1812 | static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn) | |
1813 | { | |
1814 | uint32_t sh, mb; | |
1815 | ||
1816 | sh = SH(ctx->opcode) | (shn << 5); | |
1817 | mb = MB(ctx->opcode) | (mbn << 5); | |
1818 | gen_rldinm(ctx, mb, 63, sh); | |
1819 | } | |
1820 | GEN_PPC64_R4(rldicl, 0x1E, 0x00); | |
1821 | /* rldicr - rldicr. */ | |
1822 | static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn) | |
1823 | { | |
1824 | uint32_t sh, me; | |
1825 | ||
1826 | sh = SH(ctx->opcode) | (shn << 5); | |
1827 | me = MB(ctx->opcode) | (men << 5); | |
1828 | gen_rldinm(ctx, 0, me, sh); | |
1829 | } | |
1830 | GEN_PPC64_R4(rldicr, 0x1E, 0x02); | |
1831 | /* rldic - rldic. */ | |
1832 | static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn) | |
1833 | { | |
1834 | uint32_t sh, mb; | |
1835 | ||
1836 | sh = SH(ctx->opcode) | (shn << 5); | |
1837 | mb = MB(ctx->opcode) | (mbn << 5); | |
1838 | gen_rldinm(ctx, mb, 63 - sh, sh); | |
1839 | } | |
1840 | GEN_PPC64_R4(rldic, 0x1E, 0x04); | |
1841 | ||
1842 | static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb, | |
1843 | uint32_t me) | |
1844 | { | |
1845 | TCGv t0; | |
1846 | ||
1847 | mb = MB(ctx->opcode); | |
1848 | me = ME(ctx->opcode); | |
1849 | t0 = tcg_temp_new(); | |
1850 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f); | |
1851 | tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); | |
1852 | if (unlikely(mb != 0 || me != 63)) { | |
1853 | tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me)); | |
1854 | } else { | |
1855 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); | |
1856 | } | |
1857 | tcg_temp_free(t0); | |
1858 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1859 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1860 | } | |
1861 | ||
1862 | /* rldcl - rldcl. */ | |
1863 | static always_inline void gen_rldcl (DisasContext *ctx, int mbn) | |
1864 | { | |
1865 | uint32_t mb; | |
1866 | ||
1867 | mb = MB(ctx->opcode) | (mbn << 5); | |
1868 | gen_rldnm(ctx, mb, 63); | |
1869 | } | |
1870 | GEN_PPC64_R2(rldcl, 0x1E, 0x08); | |
1871 | /* rldcr - rldcr. */ | |
1872 | static always_inline void gen_rldcr (DisasContext *ctx, int men) | |
1873 | { | |
1874 | uint32_t me; | |
1875 | ||
1876 | me = MB(ctx->opcode) | (men << 5); | |
1877 | gen_rldnm(ctx, 0, me); | |
1878 | } | |
1879 | GEN_PPC64_R2(rldcr, 0x1E, 0x09); | |
1880 | /* rldimi - rldimi. */ | |
1881 | static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn) | |
1882 | { | |
1883 | uint32_t sh, mb, me; | |
1884 | ||
1885 | sh = SH(ctx->opcode) | (shn << 5); | |
1886 | mb = MB(ctx->opcode) | (mbn << 5); | |
1887 | me = 63 - sh; | |
1888 | if (unlikely(sh == 0 && mb == 0)) { | |
1889 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
1890 | } else { | |
1891 | TCGv t0, t1; | |
1892 | target_ulong mask; | |
1893 | ||
1894 | t0 = tcg_temp_new(); | |
1895 | tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); | |
1896 | t1 = tcg_temp_new(); | |
1897 | mask = MASK(mb, me); | |
1898 | tcg_gen_andi_tl(t0, t0, mask); | |
1899 | tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask); | |
1900 | tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
1901 | tcg_temp_free(t0); | |
1902 | tcg_temp_free(t1); | |
1903 | } | |
1904 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1905 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1906 | } | |
1907 | GEN_PPC64_R4(rldimi, 0x1E, 0x06); | |
1908 | #endif | |
1909 | ||
1910 | /*** Integer shift ***/ | |
1911 | /* slw & slw. */ | |
1912 | GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER) | |
1913 | { | |
1914 | TCGv t0; | |
1915 | int l1, l2; | |
1916 | l1 = gen_new_label(); | |
1917 | l2 = gen_new_label(); | |
1918 | ||
1919 | t0 = tcg_temp_local_new(); | |
1920 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f); | |
1921 | tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1); | |
1922 | tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); | |
1923 | tcg_gen_br(l2); | |
1924 | gen_set_label(l1); | |
1925 | tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0); | |
1926 | tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
1927 | gen_set_label(l2); | |
1928 | tcg_temp_free(t0); | |
1929 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1930 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1931 | } | |
1932 | /* sraw & sraw. */ | |
1933 | GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER) | |
1934 | { | |
1935 | gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], | |
1936 | cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
1937 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1938 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1939 | } | |
1940 | /* srawi & srawi. */ | |
1941 | GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER) | |
1942 | { | |
1943 | int sh = SH(ctx->opcode); | |
1944 | if (sh != 0) { | |
1945 | int l1, l2; | |
1946 | TCGv t0; | |
1947 | l1 = gen_new_label(); | |
1948 | l2 = gen_new_label(); | |
1949 | t0 = tcg_temp_local_new(); | |
1950 | tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]); | |
1951 | tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1); | |
1952 | tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1); | |
1953 | tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); | |
1954 | tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA); | |
1955 | tcg_gen_br(l2); | |
1956 | gen_set_label(l1); | |
1957 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); | |
1958 | gen_set_label(l2); | |
1959 | tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]); | |
1960 | tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh); | |
1961 | tcg_temp_free(t0); | |
1962 | } else { | |
1963 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
1964 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); | |
1965 | } | |
1966 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1967 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1968 | } | |
1969 | /* srw & srw. */ | |
1970 | GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER) | |
1971 | { | |
1972 | TCGv t0, t1; | |
1973 | int l1, l2; | |
1974 | l1 = gen_new_label(); | |
1975 | l2 = gen_new_label(); | |
1976 | ||
1977 | t0 = tcg_temp_local_new(); | |
1978 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f); | |
1979 | tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1); | |
1980 | tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); | |
1981 | tcg_gen_br(l2); | |
1982 | gen_set_label(l1); | |
1983 | t1 = tcg_temp_new(); | |
1984 | tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]); | |
1985 | tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t1, t0); | |
1986 | tcg_temp_free(t1); | |
1987 | gen_set_label(l2); | |
1988 | tcg_temp_free(t0); | |
1989 | if (unlikely(Rc(ctx->opcode) != 0)) | |
1990 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
1991 | } | |
1992 | #if defined(TARGET_PPC64) | |
1993 | /* sld & sld. */ | |
1994 | GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B) | |
1995 | { | |
1996 | TCGv t0; | |
1997 | int l1, l2; | |
1998 | l1 = gen_new_label(); | |
1999 | l2 = gen_new_label(); | |
2000 | ||
2001 | t0 = tcg_temp_local_new(); | |
2002 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f); | |
2003 | tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1); | |
2004 | tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); | |
2005 | tcg_gen_br(l2); | |
2006 | gen_set_label(l1); | |
2007 | tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0); | |
2008 | gen_set_label(l2); | |
2009 | tcg_temp_free(t0); | |
2010 | if (unlikely(Rc(ctx->opcode) != 0)) | |
2011 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
2012 | } | |
2013 | /* srad & srad. */ | |
2014 | GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B) | |
2015 | { | |
2016 | gen_helper_srad(cpu_gpr[rA(ctx->opcode)], | |
2017 | cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
2018 | if (unlikely(Rc(ctx->opcode) != 0)) | |
2019 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
2020 | } | |
2021 | /* sradi & sradi. */ | |
2022 | static always_inline void gen_sradi (DisasContext *ctx, int n) | |
2023 | { | |
2024 | int sh = SH(ctx->opcode) + (n << 5); | |
2025 | if (sh != 0) { | |
2026 | int l1, l2; | |
2027 | TCGv t0; | |
2028 | l1 = gen_new_label(); | |
2029 | l2 = gen_new_label(); | |
2030 | t0 = tcg_temp_local_new(); | |
2031 | tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); | |
2032 | tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1); | |
2033 | tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); | |
2034 | tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA); | |
2035 | tcg_gen_br(l2); | |
2036 | gen_set_label(l1); | |
2037 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); | |
2038 | gen_set_label(l2); | |
2039 | tcg_temp_free(t0); | |
2040 | tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); | |
2041 | } else { | |
2042 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
2043 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); | |
2044 | } | |
2045 | if (unlikely(Rc(ctx->opcode) != 0)) | |
2046 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
2047 | } | |
2048 | GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B) | |
2049 | { | |
2050 | gen_sradi(ctx, 0); | |
2051 | } | |
2052 | GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B) | |
2053 | { | |
2054 | gen_sradi(ctx, 1); | |
2055 | } | |
2056 | /* srd & srd. */ | |
2057 | GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B) | |
2058 | { | |
2059 | TCGv t0; | |
2060 | int l1, l2; | |
2061 | l1 = gen_new_label(); | |
2062 | l2 = gen_new_label(); | |
2063 | ||
2064 | t0 = tcg_temp_local_new(); | |
2065 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f); | |
2066 | tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1); | |
2067 | tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); | |
2068 | tcg_gen_br(l2); | |
2069 | gen_set_label(l1); | |
2070 | tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0); | |
2071 | gen_set_label(l2); | |
2072 | tcg_temp_free(t0); | |
2073 | if (unlikely(Rc(ctx->opcode) != 0)) | |
2074 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
2075 | } | |
2076 | #endif | |
2077 | ||
2078 | /*** Floating-Point arithmetic ***/ | |
2079 | #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \ | |
2080 | GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type) \ | |
2081 | { \ | |
2082 | if (unlikely(!ctx->fpu_enabled)) { \ | |
2083 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
2084 | return; \ | |
2085 | } \ | |
2086 | /* NIP cannot be restored if the memory exception comes from an helper */ \ | |
2087 | gen_update_nip(ctx, ctx->nip - 4); \ | |
2088 | gen_reset_fpstatus(); \ | |
2089 | gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \ | |
2090 | cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \ | |
2091 | if (isfloat) { \ | |
2092 | gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \ | |
2093 | } \ | |
2094 | gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \ | |
2095 | Rc(ctx->opcode) != 0); \ | |
2096 | } | |
2097 | ||
2098 | #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \ | |
2099 | _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \ | |
2100 | _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type); | |
2101 | ||
2102 | #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \ | |
2103 | GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \ | |
2104 | { \ | |
2105 | if (unlikely(!ctx->fpu_enabled)) { \ | |
2106 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
2107 | return; \ | |
2108 | } \ | |
2109 | /* NIP cannot be restored if the memory exception comes from an helper */ \ | |
2110 | gen_update_nip(ctx, ctx->nip - 4); \ | |
2111 | gen_reset_fpstatus(); \ | |
2112 | gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \ | |
2113 | cpu_fpr[rB(ctx->opcode)]); \ | |
2114 | if (isfloat) { \ | |
2115 | gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \ | |
2116 | } \ | |
2117 | gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \ | |
2118 | set_fprf, Rc(ctx->opcode) != 0); \ | |
2119 | } | |
2120 | #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \ | |
2121 | _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \ | |
2122 | _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type); | |
2123 | ||
2124 | #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \ | |
2125 | GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \ | |
2126 | { \ | |
2127 | if (unlikely(!ctx->fpu_enabled)) { \ | |
2128 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
2129 | return; \ | |
2130 | } \ | |
2131 | /* NIP cannot be restored if the memory exception comes from an helper */ \ | |
2132 | gen_update_nip(ctx, ctx->nip - 4); \ | |
2133 | gen_reset_fpstatus(); \ | |
2134 | gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \ | |
2135 | cpu_fpr[rC(ctx->opcode)]); \ | |
2136 | if (isfloat) { \ | |
2137 | gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \ | |
2138 | } \ | |
2139 | gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \ | |
2140 | set_fprf, Rc(ctx->opcode) != 0); \ | |
2141 | } | |
2142 | #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \ | |
2143 | _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \ | |
2144 | _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type); | |
2145 | ||
2146 | #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \ | |
2147 | GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type) \ | |
2148 | { \ | |
2149 | if (unlikely(!ctx->fpu_enabled)) { \ | |
2150 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
2151 | return; \ | |
2152 | } \ | |
2153 | /* NIP cannot be restored if the memory exception comes from an helper */ \ | |
2154 | gen_update_nip(ctx, ctx->nip - 4); \ | |
2155 | gen_reset_fpstatus(); \ | |
2156 | gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \ | |
2157 | gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \ | |
2158 | set_fprf, Rc(ctx->opcode) != 0); \ | |
2159 | } | |
2160 | ||
2161 | #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \ | |
2162 | GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type) \ | |
2163 | { \ | |
2164 | if (unlikely(!ctx->fpu_enabled)) { \ | |
2165 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
2166 | return; \ | |
2167 | } \ | |
2168 | /* NIP cannot be restored if the memory exception comes from an helper */ \ | |
2169 | gen_update_nip(ctx, ctx->nip - 4); \ | |
2170 | gen_reset_fpstatus(); \ | |
2171 | gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \ | |
2172 | gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \ | |
2173 | set_fprf, Rc(ctx->opcode) != 0); \ | |
2174 | } | |
2175 | ||
2176 | /* fadd - fadds */ | |
2177 | GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT); | |
2178 | /* fdiv - fdivs */ | |
2179 | GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT); | |
2180 | /* fmul - fmuls */ | |
2181 | GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT); | |
2182 | ||
2183 | /* fre */ | |
2184 | GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT); | |
2185 | ||
2186 | /* fres */ | |
2187 | GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES); | |
2188 | ||
2189 | /* frsqrte */ | |
2190 | GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE); | |
2191 | ||
2192 | /* frsqrtes */ | |
2193 | GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES) | |
2194 | { | |
2195 | if (unlikely(!ctx->fpu_enabled)) { | |
2196 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2197 | return; | |
2198 | } | |
2199 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
2200 | gen_update_nip(ctx, ctx->nip - 4); | |
2201 | gen_reset_fpstatus(); | |
2202 | gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); | |
2203 | gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); | |
2204 | gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0); | |
2205 | } | |
2206 | ||
2207 | /* fsel */ | |
2208 | _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL); | |
2209 | /* fsub - fsubs */ | |
2210 | GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT); | |
2211 | /* Optional: */ | |
2212 | /* fsqrt */ | |
2213 | GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT) | |
2214 | { | |
2215 | if (unlikely(!ctx->fpu_enabled)) { | |
2216 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2217 | return; | |
2218 | } | |
2219 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
2220 | gen_update_nip(ctx, ctx->nip - 4); | |
2221 | gen_reset_fpstatus(); | |
2222 | gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); | |
2223 | gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0); | |
2224 | } | |
2225 | ||
2226 | GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT) | |
2227 | { | |
2228 | if (unlikely(!ctx->fpu_enabled)) { | |
2229 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2230 | return; | |
2231 | } | |
2232 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
2233 | gen_update_nip(ctx, ctx->nip - 4); | |
2234 | gen_reset_fpstatus(); | |
2235 | gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); | |
2236 | gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); | |
2237 | gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0); | |
2238 | } | |
2239 | ||
2240 | /*** Floating-Point multiply-and-add ***/ | |
2241 | /* fmadd - fmadds */ | |
2242 | GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT); | |
2243 | /* fmsub - fmsubs */ | |
2244 | GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT); | |
2245 | /* fnmadd - fnmadds */ | |
2246 | GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT); | |
2247 | /* fnmsub - fnmsubs */ | |
2248 | GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT); | |
2249 | ||
2250 | /*** Floating-Point round & convert ***/ | |
2251 | /* fctiw */ | |
2252 | GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT); | |
2253 | /* fctiwz */ | |
2254 | GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT); | |
2255 | /* frsp */ | |
2256 | GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT); | |
2257 | #if defined(TARGET_PPC64) | |
2258 | /* fcfid */ | |
2259 | GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B); | |
2260 | /* fctid */ | |
2261 | GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B); | |
2262 | /* fctidz */ | |
2263 | GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B); | |
2264 | #endif | |
2265 | ||
2266 | /* frin */ | |
2267 | GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT); | |
2268 | /* friz */ | |
2269 | GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT); | |
2270 | /* frip */ | |
2271 | GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT); | |
2272 | /* frim */ | |
2273 | GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT); | |
2274 | ||
2275 | /*** Floating-Point compare ***/ | |
2276 | /* fcmpo */ | |
2277 | GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT) | |
2278 | { | |
2279 | TCGv_i32 crf; | |
2280 | if (unlikely(!ctx->fpu_enabled)) { | |
2281 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2282 | return; | |
2283 | } | |
2284 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
2285 | gen_update_nip(ctx, ctx->nip - 4); | |
2286 | gen_reset_fpstatus(); | |
2287 | crf = tcg_const_i32(crfD(ctx->opcode)); | |
2288 | gen_helper_fcmpo(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf); | |
2289 | tcg_temp_free_i32(crf); | |
2290 | gen_helper_float_check_status(); | |
2291 | } | |
2292 | ||
2293 | /* fcmpu */ | |
2294 | GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT) | |
2295 | { | |
2296 | TCGv_i32 crf; | |
2297 | if (unlikely(!ctx->fpu_enabled)) { | |
2298 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2299 | return; | |
2300 | } | |
2301 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
2302 | gen_update_nip(ctx, ctx->nip - 4); | |
2303 | gen_reset_fpstatus(); | |
2304 | crf = tcg_const_i32(crfD(ctx->opcode)); | |
2305 | gen_helper_fcmpu(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf); | |
2306 | tcg_temp_free_i32(crf); | |
2307 | gen_helper_float_check_status(); | |
2308 | } | |
2309 | ||
2310 | /*** Floating-point move ***/ | |
2311 | /* fabs */ | |
2312 | /* XXX: beware that fabs never checks for NaNs nor update FPSCR */ | |
2313 | GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT); | |
2314 | ||
2315 | /* fmr - fmr. */ | |
2316 | /* XXX: beware that fmr never checks for NaNs nor update FPSCR */ | |
2317 | GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT) | |
2318 | { | |
2319 | if (unlikely(!ctx->fpu_enabled)) { | |
2320 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2321 | return; | |
2322 | } | |
2323 | tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); | |
2324 | gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0); | |
2325 | } | |
2326 | ||
2327 | /* fnabs */ | |
2328 | /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */ | |
2329 | GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT); | |
2330 | /* fneg */ | |
2331 | /* XXX: beware that fneg never checks for NaNs nor update FPSCR */ | |
2332 | GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT); | |
2333 | ||
2334 | /*** Floating-Point status & ctrl register ***/ | |
2335 | /* mcrfs */ | |
2336 | GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT) | |
2337 | { | |
2338 | int bfa; | |
2339 | ||
2340 | if (unlikely(!ctx->fpu_enabled)) { | |
2341 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2342 | return; | |
2343 | } | |
2344 | bfa = 4 * (7 - crfS(ctx->opcode)); | |
2345 | tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa); | |
2346 | tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf); | |
2347 | tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(0xF << bfa)); | |
2348 | } | |
2349 | ||
2350 | /* mffs */ | |
2351 | GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT) | |
2352 | { | |
2353 | if (unlikely(!ctx->fpu_enabled)) { | |
2354 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2355 | return; | |
2356 | } | |
2357 | gen_reset_fpstatus(); | |
2358 | tcg_gen_extu_i32_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr); | |
2359 | gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0); | |
2360 | } | |
2361 | ||
2362 | /* mtfsb0 */ | |
2363 | GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT) | |
2364 | { | |
2365 | uint8_t crb; | |
2366 | ||
2367 | if (unlikely(!ctx->fpu_enabled)) { | |
2368 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2369 | return; | |
2370 | } | |
2371 | crb = 31 - crbD(ctx->opcode); | |
2372 | gen_reset_fpstatus(); | |
2373 | if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) { | |
2374 | TCGv_i32 t0; | |
2375 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
2376 | gen_update_nip(ctx, ctx->nip - 4); | |
2377 | t0 = tcg_const_i32(crb); | |
2378 | gen_helper_fpscr_clrbit(t0); | |
2379 | tcg_temp_free_i32(t0); | |
2380 | } | |
2381 | if (unlikely(Rc(ctx->opcode) != 0)) { | |
2382 | tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX); | |
2383 | } | |
2384 | } | |
2385 | ||
2386 | /* mtfsb1 */ | |
2387 | GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT) | |
2388 | { | |
2389 | uint8_t crb; | |
2390 | ||
2391 | if (unlikely(!ctx->fpu_enabled)) { | |
2392 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2393 | return; | |
2394 | } | |
2395 | crb = 31 - crbD(ctx->opcode); | |
2396 | gen_reset_fpstatus(); | |
2397 | /* XXX: we pretend we can only do IEEE floating-point computations */ | |
2398 | if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) { | |
2399 | TCGv_i32 t0; | |
2400 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
2401 | gen_update_nip(ctx, ctx->nip - 4); | |
2402 | t0 = tcg_const_i32(crb); | |
2403 | gen_helper_fpscr_setbit(t0); | |
2404 | tcg_temp_free_i32(t0); | |
2405 | } | |
2406 | if (unlikely(Rc(ctx->opcode) != 0)) { | |
2407 | tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX); | |
2408 | } | |
2409 | /* We can raise a differed exception */ | |
2410 | gen_helper_float_check_status(); | |
2411 | } | |
2412 | ||
2413 | /* mtfsf */ | |
2414 | GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x02010000, PPC_FLOAT) | |
2415 | { | |
2416 | TCGv_i32 t0; | |
2417 | ||
2418 | if (unlikely(!ctx->fpu_enabled)) { | |
2419 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2420 | return; | |
2421 | } | |
2422 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
2423 | gen_update_nip(ctx, ctx->nip - 4); | |
2424 | gen_reset_fpstatus(); | |
2425 | t0 = tcg_const_i32(FM(ctx->opcode)); | |
2426 | gen_helper_store_fpscr(cpu_fpr[rB(ctx->opcode)], t0); | |
2427 | tcg_temp_free_i32(t0); | |
2428 | if (unlikely(Rc(ctx->opcode) != 0)) { | |
2429 | tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX); | |
2430 | } | |
2431 | /* We can raise a differed exception */ | |
2432 | gen_helper_float_check_status(); | |
2433 | } | |
2434 | ||
2435 | /* mtfsfi */ | |
2436 | GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT) | |
2437 | { | |
2438 | int bf, sh; | |
2439 | TCGv_i64 t0; | |
2440 | TCGv_i32 t1; | |
2441 | ||
2442 | if (unlikely(!ctx->fpu_enabled)) { | |
2443 | gen_exception(ctx, POWERPC_EXCP_FPU); | |
2444 | return; | |
2445 | } | |
2446 | bf = crbD(ctx->opcode) >> 2; | |
2447 | sh = 7 - bf; | |
2448 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
2449 | gen_update_nip(ctx, ctx->nip - 4); | |
2450 | gen_reset_fpstatus(); | |
2451 | t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh)); | |
2452 | t1 = tcg_const_i32(1 << sh); | |
2453 | gen_helper_store_fpscr(t0, t1); | |
2454 | tcg_temp_free_i64(t0); | |
2455 | tcg_temp_free_i32(t1); | |
2456 | if (unlikely(Rc(ctx->opcode) != 0)) { | |
2457 | tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX); | |
2458 | } | |
2459 | /* We can raise a differed exception */ | |
2460 | gen_helper_float_check_status(); | |
2461 | } | |
2462 | ||
2463 | /*** Addressing modes ***/ | |
2464 | /* Register indirect with immediate index : EA = (rA|0) + SIMM */ | |
2465 | static always_inline void gen_addr_imm_index (DisasContext *ctx, TCGv EA, target_long maskl) | |
2466 | { | |
2467 | target_long simm = SIMM(ctx->opcode); | |
2468 | ||
2469 | simm &= ~maskl; | |
2470 | if (rA(ctx->opcode) == 0) { | |
2471 | #if defined(TARGET_PPC64) | |
2472 | if (!ctx->sf_mode) { | |
2473 | tcg_gen_movi_tl(EA, (uint32_t)simm); | |
2474 | } else | |
2475 | #endif | |
2476 | tcg_gen_movi_tl(EA, simm); | |
2477 | } else if (likely(simm != 0)) { | |
2478 | tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); | |
2479 | #if defined(TARGET_PPC64) | |
2480 | if (!ctx->sf_mode) { | |
2481 | tcg_gen_ext32u_tl(EA, EA); | |
2482 | } | |
2483 | #endif | |
2484 | } else { | |
2485 | #if defined(TARGET_PPC64) | |
2486 | if (!ctx->sf_mode) { | |
2487 | tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); | |
2488 | } else | |
2489 | #endif | |
2490 | tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); | |
2491 | } | |
2492 | } | |
2493 | ||
2494 | static always_inline void gen_addr_reg_index (DisasContext *ctx, TCGv EA) | |
2495 | { | |
2496 | if (rA(ctx->opcode) == 0) { | |
2497 | #if defined(TARGET_PPC64) | |
2498 | if (!ctx->sf_mode) { | |
2499 | tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); | |
2500 | } else | |
2501 | #endif | |
2502 | tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); | |
2503 | } else { | |
2504 | tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
2505 | #if defined(TARGET_PPC64) | |
2506 | if (!ctx->sf_mode) { | |
2507 | tcg_gen_ext32u_tl(EA, EA); | |
2508 | } | |
2509 | #endif | |
2510 | } | |
2511 | } | |
2512 | ||
2513 | static always_inline void gen_addr_register (DisasContext *ctx, TCGv EA) | |
2514 | { | |
2515 | if (rA(ctx->opcode) == 0) { | |
2516 | tcg_gen_movi_tl(EA, 0); | |
2517 | } else { | |
2518 | #if defined(TARGET_PPC64) | |
2519 | if (!ctx->sf_mode) { | |
2520 | tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); | |
2521 | } else | |
2522 | #endif | |
2523 | tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); | |
2524 | } | |
2525 | } | |
2526 | ||
2527 | static always_inline void gen_addr_add (DisasContext *ctx, TCGv ret, TCGv arg1, target_long val) | |
2528 | { | |
2529 | tcg_gen_addi_tl(ret, arg1, val); | |
2530 | #if defined(TARGET_PPC64) | |
2531 | if (!ctx->sf_mode) { | |
2532 | tcg_gen_ext32u_tl(ret, ret); | |
2533 | } | |
2534 | #endif | |
2535 | } | |
2536 | ||
2537 | static always_inline void gen_check_align (DisasContext *ctx, TCGv EA, int mask) | |
2538 | { | |
2539 | int l1 = gen_new_label(); | |
2540 | TCGv t0 = tcg_temp_new(); | |
2541 | TCGv_i32 t1, t2; | |
2542 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
2543 | gen_update_nip(ctx, ctx->nip - 4); | |
2544 | tcg_gen_andi_tl(t0, EA, mask); | |
2545 | tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); | |
2546 | t1 = tcg_const_i32(POWERPC_EXCP_ALIGN); | |
2547 | t2 = tcg_const_i32(0); | |
2548 | gen_helper_raise_exception_err(t1, t2); | |
2549 | tcg_temp_free_i32(t1); | |
2550 | tcg_temp_free_i32(t2); | |
2551 | gen_set_label(l1); | |
2552 | tcg_temp_free(t0); | |
2553 | } | |
2554 | ||
2555 | /*** Integer load ***/ | |
2556 | static always_inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
2557 | { | |
2558 | tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx); | |
2559 | } | |
2560 | ||
2561 | static always_inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
2562 | { | |
2563 | tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx); | |
2564 | } | |
2565 | ||
2566 | static always_inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
2567 | { | |
2568 | tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx); | |
2569 | if (unlikely(ctx->le_mode)) { | |
2570 | #if defined(TARGET_PPC64) | |
2571 | TCGv_i32 t0 = tcg_temp_new_i32(); | |
2572 | tcg_gen_trunc_tl_i32(t0, arg1); | |
2573 | tcg_gen_bswap16_i32(t0, t0); | |
2574 | tcg_gen_extu_i32_tl(arg1, t0); | |
2575 | tcg_temp_free_i32(t0); | |
2576 | #else | |
2577 | tcg_gen_bswap16_i32(arg1, arg1); | |
2578 | #endif | |
2579 | } | |
2580 | } | |
2581 | ||
2582 | static always_inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
2583 | { | |
2584 | if (unlikely(ctx->le_mode)) { | |
2585 | #if defined(TARGET_PPC64) | |
2586 | TCGv_i32 t0; | |
2587 | tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx); | |
2588 | t0 = tcg_temp_new_i32(); | |
2589 | tcg_gen_trunc_tl_i32(t0, arg1); | |
2590 | tcg_gen_bswap16_i32(t0, t0); | |
2591 | tcg_gen_extu_i32_tl(arg1, t0); | |
2592 | tcg_gen_ext16s_tl(arg1, arg1); | |
2593 | tcg_temp_free_i32(t0); | |
2594 | #else | |
2595 | tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx); | |
2596 | tcg_gen_bswap16_i32(arg1, arg1); | |
2597 | tcg_gen_ext16s_i32(arg1, arg1); | |
2598 | #endif | |
2599 | } else { | |
2600 | tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx); | |
2601 | } | |
2602 | } | |
2603 | ||
2604 | static always_inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
2605 | { | |
2606 | tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx); | |
2607 | if (unlikely(ctx->le_mode)) { | |
2608 | #if defined(TARGET_PPC64) | |
2609 | TCGv_i32 t0 = tcg_temp_new_i32(); | |
2610 | tcg_gen_trunc_tl_i32(t0, arg1); | |
2611 | tcg_gen_bswap_i32(t0, t0); | |
2612 | tcg_gen_extu_i32_tl(arg1, t0); | |
2613 | tcg_temp_free_i32(t0); | |
2614 | #else | |
2615 | tcg_gen_bswap_i32(arg1, arg1); | |
2616 | #endif | |
2617 | } | |
2618 | } | |
2619 | ||
2620 | #if defined(TARGET_PPC64) | |
2621 | static always_inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
2622 | { | |
2623 | if (unlikely(ctx->mem_idx)) { | |
2624 | TCGv_i32 t0; | |
2625 | tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx); | |
2626 | t0 = tcg_temp_new_i32(); | |
2627 | tcg_gen_trunc_tl_i32(t0, arg1); | |
2628 | tcg_gen_bswap_i32(t0, t0); | |
2629 | tcg_gen_ext_i32_tl(arg1, t0); | |
2630 | tcg_temp_free_i32(t0); | |
2631 | } else | |
2632 | tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx); | |
2633 | } | |
2634 | #endif | |
2635 | ||
2636 | static always_inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) | |
2637 | { | |
2638 | tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx); | |
2639 | if (unlikely(ctx->le_mode)) { | |
2640 | tcg_gen_bswap_i64(arg1, arg1); | |
2641 | } | |
2642 | } | |
2643 | ||
2644 | static always_inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
2645 | { | |
2646 | tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx); | |
2647 | } | |
2648 | ||
2649 | static always_inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
2650 | { | |
2651 | if (unlikely(ctx->le_mode)) { | |
2652 | #if defined(TARGET_PPC64) | |
2653 | TCGv_i32 t0; | |
2654 | TCGv t1; | |
2655 | t0 = tcg_temp_new_i32(); | |
2656 | tcg_gen_trunc_tl_i32(t0, arg1); | |
2657 | tcg_gen_ext16u_i32(t0, t0); | |
2658 | tcg_gen_bswap16_i32(t0, t0); | |
2659 | t1 = tcg_temp_new(); | |
2660 | tcg_gen_extu_i32_tl(t1, t0); | |
2661 | tcg_temp_free_i32(t0); | |
2662 | tcg_gen_qemu_st16(t1, arg2, ctx->mem_idx); | |
2663 | tcg_temp_free(t1); | |
2664 | #else | |
2665 | TCGv t0 = tcg_temp_new(); | |
2666 | tcg_gen_ext16u_tl(t0, arg1); | |
2667 | tcg_gen_bswap16_i32(t0, t0); | |
2668 | tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx); | |
2669 | tcg_temp_free(t0); | |
2670 | #endif | |
2671 | } else { | |
2672 | tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx); | |
2673 | } | |
2674 | } | |
2675 | ||
2676 | static always_inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
2677 | { | |
2678 | if (unlikely(ctx->le_mode)) { | |
2679 | #if defined(TARGET_PPC64) | |
2680 | TCGv_i32 t0; | |
2681 | TCGv t1; | |
2682 | t0 = tcg_temp_new_i32(); | |
2683 | tcg_gen_trunc_tl_i32(t0, arg1); | |
2684 | tcg_gen_bswap_i32(t0, t0); | |
2685 | t1 = tcg_temp_new(); | |
2686 | tcg_gen_extu_i32_tl(t1, t0); | |
2687 | tcg_temp_free_i32(t0); | |
2688 | tcg_gen_qemu_st32(t1, arg2, ctx->mem_idx); | |
2689 | tcg_temp_free(t1); | |
2690 | #else | |
2691 | TCGv t0 = tcg_temp_new_i32(); | |
2692 | tcg_gen_bswap_i32(t0, arg1); | |
2693 | tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx); | |
2694 | tcg_temp_free(t0); | |
2695 | #endif | |
2696 | } else { | |
2697 | tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx); | |
2698 | } | |
2699 | } | |
2700 | ||
2701 | static always_inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) | |
2702 | { | |
2703 | if (unlikely(ctx->le_mode)) { | |
2704 | TCGv_i64 t0 = tcg_temp_new_i64(); | |
2705 | tcg_gen_bswap_i64(t0, arg1); | |
2706 | tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx); | |
2707 | tcg_temp_free_i64(t0); | |
2708 | } else | |
2709 | tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx); | |
2710 | } | |
2711 | ||
2712 | #define GEN_LD(name, ldop, opc, type) \ | |
2713 | GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \ | |
2714 | { \ | |
2715 | TCGv EA; \ | |
2716 | gen_set_access_type(ctx, ACCESS_INT); \ | |
2717 | EA = tcg_temp_new(); \ | |
2718 | gen_addr_imm_index(ctx, EA, 0); \ | |
2719 | gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ | |
2720 | tcg_temp_free(EA); \ | |
2721 | } | |
2722 | ||
2723 | #define GEN_LDU(name, ldop, opc, type) \ | |
2724 | GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \ | |
2725 | { \ | |
2726 | TCGv EA; \ | |
2727 | if (unlikely(rA(ctx->opcode) == 0 || \ | |
2728 | rA(ctx->opcode) == rD(ctx->opcode))) { \ | |
2729 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ | |
2730 | return; \ | |
2731 | } \ | |
2732 | gen_set_access_type(ctx, ACCESS_INT); \ | |
2733 | EA = tcg_temp_new(); \ | |
2734 | if (type == PPC_64B) \ | |
2735 | gen_addr_imm_index(ctx, EA, 0x03); \ | |
2736 | else \ | |
2737 | gen_addr_imm_index(ctx, EA, 0); \ | |
2738 | gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ | |
2739 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ | |
2740 | tcg_temp_free(EA); \ | |
2741 | } | |
2742 | ||
2743 | #define GEN_LDUX(name, ldop, opc2, opc3, type) \ | |
2744 | GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type) \ | |
2745 | { \ | |
2746 | TCGv EA; \ | |
2747 | if (unlikely(rA(ctx->opcode) == 0 || \ | |
2748 | rA(ctx->opcode) == rD(ctx->opcode))) { \ | |
2749 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ | |
2750 | return; \ | |
2751 | } \ | |
2752 | gen_set_access_type(ctx, ACCESS_INT); \ | |
2753 | EA = tcg_temp_new(); \ | |
2754 | gen_addr_reg_index(ctx, EA); \ | |
2755 | gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ | |
2756 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ | |
2757 | tcg_temp_free(EA); \ | |
2758 | } | |
2759 | ||
2760 | #define GEN_LDX(name, ldop, opc2, opc3, type) \ | |
2761 | GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \ | |
2762 | { \ | |
2763 | TCGv EA; \ | |
2764 | gen_set_access_type(ctx, ACCESS_INT); \ | |
2765 | EA = tcg_temp_new(); \ | |
2766 | gen_addr_reg_index(ctx, EA); \ | |
2767 | gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ | |
2768 | tcg_temp_free(EA); \ | |
2769 | } | |
2770 | ||
2771 | #define GEN_LDS(name, ldop, op, type) \ | |
2772 | GEN_LD(name, ldop, op | 0x20, type); \ | |
2773 | GEN_LDU(name, ldop, op | 0x21, type); \ | |
2774 | GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ | |
2775 | GEN_LDX(name, ldop, 0x17, op | 0x00, type) | |
2776 | ||
2777 | /* lbz lbzu lbzux lbzx */ | |
2778 | GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); | |
2779 | /* lha lhau lhaux lhax */ | |
2780 | GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); | |
2781 | /* lhz lhzu lhzux lhzx */ | |
2782 | GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); | |
2783 | /* lwz lwzu lwzux lwzx */ | |
2784 | GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); | |
2785 | #if defined(TARGET_PPC64) | |
2786 | /* lwaux */ | |
2787 | GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); | |
2788 | /* lwax */ | |
2789 | GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); | |
2790 | /* ldux */ | |
2791 | GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B); | |
2792 | /* ldx */ | |
2793 | GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B); | |
2794 | GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B) | |
2795 | { | |
2796 | TCGv EA; | |
2797 | if (Rc(ctx->opcode)) { | |
2798 | if (unlikely(rA(ctx->opcode) == 0 || | |
2799 | rA(ctx->opcode) == rD(ctx->opcode))) { | |
2800 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
2801 | return; | |
2802 | } | |
2803 | } | |
2804 | gen_set_access_type(ctx, ACCESS_INT); | |
2805 | EA = tcg_temp_new(); | |
2806 | gen_addr_imm_index(ctx, EA, 0x03); | |
2807 | if (ctx->opcode & 0x02) { | |
2808 | /* lwa (lwau is undefined) */ | |
2809 | gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); | |
2810 | } else { | |
2811 | /* ld - ldu */ | |
2812 | gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA); | |
2813 | } | |
2814 | if (Rc(ctx->opcode)) | |
2815 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); | |
2816 | tcg_temp_free(EA); | |
2817 | } | |
2818 | /* lq */ | |
2819 | GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX) | |
2820 | { | |
2821 | #if defined(CONFIG_USER_ONLY) | |
2822 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
2823 | #else | |
2824 | int ra, rd; | |
2825 | TCGv EA; | |
2826 | ||
2827 | /* Restore CPU state */ | |
2828 | if (unlikely(ctx->mem_idx == 0)) { | |
2829 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
2830 | return; | |
2831 | } | |
2832 | ra = rA(ctx->opcode); | |
2833 | rd = rD(ctx->opcode); | |
2834 | if (unlikely((rd & 1) || rd == ra)) { | |
2835 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
2836 | return; | |
2837 | } | |
2838 | if (unlikely(ctx->le_mode)) { | |
2839 | /* Little-endian mode is not handled */ | |
2840 | gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE); | |
2841 | return; | |
2842 | } | |
2843 | gen_set_access_type(ctx, ACCESS_INT); | |
2844 | EA = tcg_temp_new(); | |
2845 | gen_addr_imm_index(ctx, EA, 0x0F); | |
2846 | gen_qemu_ld64(ctx, cpu_gpr[rd], EA); | |
2847 | gen_addr_add(ctx, EA, EA, 8); | |
2848 | gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA); | |
2849 | tcg_temp_free(EA); | |
2850 | #endif | |
2851 | } | |
2852 | #endif | |
2853 | ||
2854 | /*** Integer store ***/ | |
2855 | #define GEN_ST(name, stop, opc, type) \ | |
2856 | GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \ | |
2857 | { \ | |
2858 | TCGv EA; \ | |
2859 | gen_set_access_type(ctx, ACCESS_INT); \ | |
2860 | EA = tcg_temp_new(); \ | |
2861 | gen_addr_imm_index(ctx, EA, 0); \ | |
2862 | gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ | |
2863 | tcg_temp_free(EA); \ | |
2864 | } | |
2865 | ||
2866 | #define GEN_STU(name, stop, opc, type) \ | |
2867 | GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type) \ | |
2868 | { \ | |
2869 | TCGv EA; \ | |
2870 | if (unlikely(rA(ctx->opcode) == 0)) { \ | |
2871 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ | |
2872 | return; \ | |
2873 | } \ | |
2874 | gen_set_access_type(ctx, ACCESS_INT); \ | |
2875 | EA = tcg_temp_new(); \ | |
2876 | if (type == PPC_64B) \ | |
2877 | gen_addr_imm_index(ctx, EA, 0x03); \ | |
2878 | else \ | |
2879 | gen_addr_imm_index(ctx, EA, 0); \ | |
2880 | gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ | |
2881 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ | |
2882 | tcg_temp_free(EA); \ | |
2883 | } | |
2884 | ||
2885 | #define GEN_STUX(name, stop, opc2, opc3, type) \ | |
2886 | GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type) \ | |
2887 | { \ | |
2888 | TCGv EA; \ | |
2889 | if (unlikely(rA(ctx->opcode) == 0)) { \ | |
2890 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ | |
2891 | return; \ | |
2892 | } \ | |
2893 | gen_set_access_type(ctx, ACCESS_INT); \ | |
2894 | EA = tcg_temp_new(); \ | |
2895 | gen_addr_reg_index(ctx, EA); \ | |
2896 | gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ | |
2897 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ | |
2898 | tcg_temp_free(EA); \ | |
2899 | } | |
2900 | ||
2901 | #define GEN_STX(name, stop, opc2, opc3, type) \ | |
2902 | GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \ | |
2903 | { \ | |
2904 | TCGv EA; \ | |
2905 | gen_set_access_type(ctx, ACCESS_INT); \ | |
2906 | EA = tcg_temp_new(); \ | |
2907 | gen_addr_reg_index(ctx, EA); \ | |
2908 | gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ | |
2909 | tcg_temp_free(EA); \ | |
2910 | } | |
2911 | ||
2912 | #define GEN_STS(name, stop, op, type) \ | |
2913 | GEN_ST(name, stop, op | 0x20, type); \ | |
2914 | GEN_STU(name, stop, op | 0x21, type); \ | |
2915 | GEN_STUX(name, stop, 0x17, op | 0x01, type); \ | |
2916 | GEN_STX(name, stop, 0x17, op | 0x00, type) | |
2917 | ||
2918 | /* stb stbu stbux stbx */ | |
2919 | GEN_STS(stb, st8, 0x06, PPC_INTEGER); | |
2920 | /* sth sthu sthux sthx */ | |
2921 | GEN_STS(sth, st16, 0x0C, PPC_INTEGER); | |
2922 | /* stw stwu stwux stwx */ | |
2923 | GEN_STS(stw, st32, 0x04, PPC_INTEGER); | |
2924 | #if defined(TARGET_PPC64) | |
2925 | GEN_STUX(std, st64, 0x15, 0x05, PPC_64B); | |
2926 | GEN_STX(std, st64, 0x15, 0x04, PPC_64B); | |
2927 | GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B) | |
2928 | { | |
2929 | int rs; | |
2930 | TCGv EA; | |
2931 | ||
2932 | rs = rS(ctx->opcode); | |
2933 | if ((ctx->opcode & 0x3) == 0x2) { | |
2934 | #if defined(CONFIG_USER_ONLY) | |
2935 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
2936 | #else | |
2937 | /* stq */ | |
2938 | if (unlikely(ctx->mem_idx == 0)) { | |
2939 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
2940 | return; | |
2941 | } | |
2942 | if (unlikely(rs & 1)) { | |
2943 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
2944 | return; | |
2945 | } | |
2946 | if (unlikely(ctx->le_mode)) { | |
2947 | /* Little-endian mode is not handled */ | |
2948 | gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE); | |
2949 | return; | |
2950 | } | |
2951 | gen_set_access_type(ctx, ACCESS_INT); | |
2952 | EA = tcg_temp_new(); | |
2953 | gen_addr_imm_index(ctx, EA, 0x03); | |
2954 | gen_qemu_st64(ctx, cpu_gpr[rs], EA); | |
2955 | gen_addr_add(ctx, EA, EA, 8); | |
2956 | gen_qemu_st64(ctx, cpu_gpr[rs+1], EA); | |
2957 | tcg_temp_free(EA); | |
2958 | #endif | |
2959 | } else { | |
2960 | /* std / stdu */ | |
2961 | if (Rc(ctx->opcode)) { | |
2962 | if (unlikely(rA(ctx->opcode) == 0)) { | |
2963 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
2964 | return; | |
2965 | } | |
2966 | } | |
2967 | gen_set_access_type(ctx, ACCESS_INT); | |
2968 | EA = tcg_temp_new(); | |
2969 | gen_addr_imm_index(ctx, EA, 0x03); | |
2970 | gen_qemu_st64(ctx, cpu_gpr[rs], EA); | |
2971 | if (Rc(ctx->opcode)) | |
2972 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); | |
2973 | tcg_temp_free(EA); | |
2974 | } | |
2975 | } | |
2976 | #endif | |
2977 | /*** Integer load and store with byte reverse ***/ | |
2978 | /* lhbrx */ | |
2979 | static void always_inline gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
2980 | { | |
2981 | tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx); | |
2982 | if (likely(!ctx->le_mode)) { | |
2983 | #if defined(TARGET_PPC64) | |
2984 | TCGv_i32 t0 = tcg_temp_new_i32(); | |
2985 | tcg_gen_trunc_tl_i32(t0, arg1); | |
2986 | tcg_gen_bswap16_i32(t0, t0); | |
2987 | tcg_gen_extu_i32_tl(arg1, t0); | |
2988 | tcg_temp_free_i32(t0); | |
2989 | #else | |
2990 | tcg_gen_bswap16_i32(arg1, arg1); | |
2991 | #endif | |
2992 | } | |
2993 | } | |
2994 | GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); | |
2995 | ||
2996 | /* lwbrx */ | |
2997 | static void always_inline gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
2998 | { | |
2999 | tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx); | |
3000 | if (likely(!ctx->le_mode)) { | |
3001 | #if defined(TARGET_PPC64) | |
3002 | TCGv_i32 t0 = tcg_temp_new_i32(); | |
3003 | tcg_gen_trunc_tl_i32(t0, arg1); | |
3004 | tcg_gen_bswap_i32(t0, t0); | |
3005 | tcg_gen_extu_i32_tl(arg1, t0); | |
3006 | tcg_temp_free_i32(t0); | |
3007 | #else | |
3008 | tcg_gen_bswap_i32(arg1, arg1); | |
3009 | #endif | |
3010 | } | |
3011 | } | |
3012 | GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); | |
3013 | ||
3014 | /* sthbrx */ | |
3015 | static void always_inline gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
3016 | { | |
3017 | if (likely(!ctx->le_mode)) { | |
3018 | #if defined(TARGET_PPC64) | |
3019 | TCGv_i32 t0; | |
3020 | TCGv t1; | |
3021 | t0 = tcg_temp_new_i32(); | |
3022 | tcg_gen_trunc_tl_i32(t0, arg1); | |
3023 | tcg_gen_ext16u_i32(t0, t0); | |
3024 | tcg_gen_bswap16_i32(t0, t0); | |
3025 | t1 = tcg_temp_new(); | |
3026 | tcg_gen_extu_i32_tl(t1, t0); | |
3027 | tcg_temp_free_i32(t0); | |
3028 | tcg_gen_qemu_st16(t1, arg2, ctx->mem_idx); | |
3029 | tcg_temp_free(t1); | |
3030 | #else | |
3031 | TCGv t0 = tcg_temp_new(); | |
3032 | tcg_gen_ext16u_tl(t0, arg1); | |
3033 | tcg_gen_bswap16_i32(t0, t0); | |
3034 | tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx); | |
3035 | tcg_temp_free(t0); | |
3036 | #endif | |
3037 | } else { | |
3038 | tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx); | |
3039 | } | |
3040 | } | |
3041 | GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); | |
3042 | ||
3043 | /* stwbrx */ | |
3044 | static void always_inline gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2) | |
3045 | { | |
3046 | if (likely(!ctx->le_mode)) { | |
3047 | #if defined(TARGET_PPC64) | |
3048 | TCGv_i32 t0; | |
3049 | TCGv t1; | |
3050 | t0 = tcg_temp_new_i32(); | |
3051 | tcg_gen_trunc_tl_i32(t0, arg1); | |
3052 | tcg_gen_bswap_i32(t0, t0); | |
3053 | t1 = tcg_temp_new(); | |
3054 | tcg_gen_extu_i32_tl(t1, t0); | |
3055 | tcg_temp_free_i32(t0); | |
3056 | tcg_gen_qemu_st32(t1, arg2, ctx->mem_idx); | |
3057 | tcg_temp_free(t1); | |
3058 | #else | |
3059 | TCGv t0 = tcg_temp_new_i32(); | |
3060 | tcg_gen_bswap_i32(t0, arg1); | |
3061 | tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx); | |
3062 | tcg_temp_free(t0); | |
3063 | #endif | |
3064 | } else { | |
3065 | tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx); | |
3066 | } | |
3067 | } | |
3068 | GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); | |
3069 | ||
3070 | /*** Integer load and store multiple ***/ | |
3071 | /* lmw */ | |
3072 | GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
3073 | { | |
3074 | TCGv t0; | |
3075 | TCGv_i32 t1; | |
3076 | gen_set_access_type(ctx, ACCESS_INT); | |
3077 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
3078 | gen_update_nip(ctx, ctx->nip - 4); | |
3079 | t0 = tcg_temp_new(); | |
3080 | t1 = tcg_const_i32(rD(ctx->opcode)); | |
3081 | gen_addr_imm_index(ctx, t0, 0); | |
3082 | gen_helper_lmw(t0, t1); | |
3083 | tcg_temp_free(t0); | |
3084 | tcg_temp_free_i32(t1); | |
3085 | } | |
3086 | ||
3087 | /* stmw */ | |
3088 | GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) | |
3089 | { | |
3090 | TCGv t0; | |
3091 | TCGv_i32 t1; | |
3092 | gen_set_access_type(ctx, ACCESS_INT); | |
3093 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
3094 | gen_update_nip(ctx, ctx->nip - 4); | |
3095 | t0 = tcg_temp_new(); | |
3096 | t1 = tcg_const_i32(rS(ctx->opcode)); | |
3097 | gen_addr_imm_index(ctx, t0, 0); | |
3098 | gen_helper_stmw(t0, t1); | |
3099 | tcg_temp_free(t0); | |
3100 | tcg_temp_free_i32(t1); | |
3101 | } | |
3102 | ||
3103 | /*** Integer load and store strings ***/ | |
3104 | /* lswi */ | |
3105 | /* PowerPC32 specification says we must generate an exception if | |
3106 | * rA is in the range of registers to be loaded. | |
3107 | * In an other hand, IBM says this is valid, but rA won't be loaded. | |
3108 | * For now, I'll follow the spec... | |
3109 | */ | |
3110 | GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING) | |
3111 | { | |
3112 | TCGv t0; | |
3113 | TCGv_i32 t1, t2; | |
3114 | int nb = NB(ctx->opcode); | |
3115 | int start = rD(ctx->opcode); | |
3116 | int ra = rA(ctx->opcode); | |
3117 | int nr; | |
3118 | ||
3119 | if (nb == 0) | |
3120 | nb = 32; | |
3121 | nr = nb / 4; | |
3122 | if (unlikely(((start + nr) > 32 && | |
3123 | start <= ra && (start + nr - 32) > ra) || | |
3124 | ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) { | |
3125 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); | |
3126 | return; | |
3127 | } | |
3128 | gen_set_access_type(ctx, ACCESS_INT); | |
3129 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
3130 | gen_update_nip(ctx, ctx->nip - 4); | |
3131 | t0 = tcg_temp_new(); | |
3132 | gen_addr_register(ctx, t0); | |
3133 | t1 = tcg_const_i32(nb); | |
3134 | t2 = tcg_const_i32(start); | |
3135 | gen_helper_lsw(t0, t1, t2); | |
3136 | tcg_temp_free(t0); | |
3137 | tcg_temp_free_i32(t1); | |
3138 | tcg_temp_free_i32(t2); | |
3139 | } | |
3140 | ||
3141 | /* lswx */ | |
3142 | GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING) | |
3143 | { | |
3144 | TCGv t0; | |
3145 | TCGv_i32 t1, t2, t3; | |
3146 | gen_set_access_type(ctx, ACCESS_INT); | |
3147 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
3148 | gen_update_nip(ctx, ctx->nip - 4); | |
3149 | t0 = tcg_temp_new(); | |
3150 | gen_addr_reg_index(ctx, t0); | |
3151 | t1 = tcg_const_i32(rD(ctx->opcode)); | |
3152 | t2 = tcg_const_i32(rA(ctx->opcode)); | |
3153 | t3 = tcg_const_i32(rB(ctx->opcode)); | |
3154 | gen_helper_lswx(t0, t1, t2, t3); | |
3155 | tcg_temp_free(t0); | |
3156 | tcg_temp_free_i32(t1); | |
3157 | tcg_temp_free_i32(t2); | |
3158 | tcg_temp_free_i32(t3); | |
3159 | } | |
3160 | ||
3161 | /* stswi */ | |
3162 | GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING) | |
3163 | { | |
3164 | TCGv t0; | |
3165 | TCGv_i32 t1, t2; | |
3166 | int nb = NB(ctx->opcode); | |
3167 | gen_set_access_type(ctx, ACCESS_INT); | |
3168 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
3169 | gen_update_nip(ctx, ctx->nip - 4); | |
3170 | t0 = tcg_temp_new(); | |
3171 | gen_addr_register(ctx, t0); | |
3172 | if (nb == 0) | |
3173 | nb = 32; | |
3174 | t1 = tcg_const_i32(nb); | |
3175 | t2 = tcg_const_i32(rS(ctx->opcode)); | |
3176 | gen_helper_stsw(t0, t1, t2); | |
3177 | tcg_temp_free(t0); | |
3178 | tcg_temp_free_i32(t1); | |
3179 | tcg_temp_free_i32(t2); | |
3180 | } | |
3181 | ||
3182 | /* stswx */ | |
3183 | GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING) | |
3184 | { | |
3185 | TCGv t0; | |
3186 | TCGv_i32 t1, t2; | |
3187 | gen_set_access_type(ctx, ACCESS_INT); | |
3188 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
3189 | gen_update_nip(ctx, ctx->nip - 4); | |
3190 | t0 = tcg_temp_new(); | |
3191 | gen_addr_reg_index(ctx, t0); | |
3192 | t1 = tcg_temp_new_i32(); | |
3193 | tcg_gen_trunc_tl_i32(t1, cpu_xer); | |
3194 | tcg_gen_andi_i32(t1, t1, 0x7F); | |
3195 | t2 = tcg_const_i32(rS(ctx->opcode)); | |
3196 | gen_helper_stsw(t0, t1, t2); | |
3197 | tcg_temp_free(t0); | |
3198 | tcg_temp_free_i32(t1); | |
3199 | tcg_temp_free_i32(t2); | |
3200 | } | |
3201 | ||
3202 | /*** Memory synchronisation ***/ | |
3203 | /* eieio */ | |
3204 | GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO) | |
3205 | { | |
3206 | } | |
3207 | ||
3208 | /* isync */ | |
3209 | GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM) | |
3210 | { | |
3211 | gen_stop_exception(ctx); | |
3212 | } | |
3213 | ||
3214 | /* lwarx */ | |
3215 | GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES) | |
3216 | { | |
3217 | TCGv t0; | |
3218 | gen_set_access_type(ctx, ACCESS_RES); | |
3219 | t0 = tcg_temp_local_new(); | |
3220 | gen_addr_reg_index(ctx, t0); | |
3221 | gen_check_align(ctx, t0, 0x03); | |
3222 | gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0); | |
3223 | tcg_gen_mov_tl(cpu_reserve, t0); | |
3224 | tcg_temp_free(t0); | |
3225 | } | |
3226 | ||
3227 | /* stwcx. */ | |
3228 | GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES) | |
3229 | { | |
3230 | int l1; | |
3231 | TCGv t0; | |
3232 | gen_set_access_type(ctx, ACCESS_RES); | |
3233 | t0 = tcg_temp_local_new(); | |
3234 | gen_addr_reg_index(ctx, t0); | |
3235 | gen_check_align(ctx, t0, 0x03); | |
3236 | tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer); | |
3237 | tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO); | |
3238 | tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1); | |
3239 | l1 = gen_new_label(); | |
3240 | tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); | |
3241 | tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); | |
3242 | gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0); | |
3243 | gen_set_label(l1); | |
3244 | tcg_gen_movi_tl(cpu_reserve, -1); | |
3245 | tcg_temp_free(t0); | |
3246 | } | |
3247 | ||
3248 | #if defined(TARGET_PPC64) | |
3249 | /* ldarx */ | |
3250 | GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B) | |
3251 | { | |
3252 | TCGv t0; | |
3253 | gen_set_access_type(ctx, ACCESS_RES); | |
3254 | t0 = tcg_temp_local_new(); | |
3255 | gen_addr_reg_index(ctx, t0); | |
3256 | gen_check_align(ctx, t0, 0x07); | |
3257 | gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], t0); | |
3258 | tcg_gen_mov_tl(cpu_reserve, t0); | |
3259 | tcg_temp_free(t0); | |
3260 | } | |
3261 | ||
3262 | /* stdcx. */ | |
3263 | GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B) | |
3264 | { | |
3265 | int l1; | |
3266 | TCGv t0; | |
3267 | gen_set_access_type(ctx, ACCESS_RES); | |
3268 | t0 = tcg_temp_local_new(); | |
3269 | gen_addr_reg_index(ctx, t0); | |
3270 | gen_check_align(ctx, t0, 0x07); | |
3271 | tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer); | |
3272 | tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO); | |
3273 | tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1); | |
3274 | l1 = gen_new_label(); | |
3275 | tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); | |
3276 | tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); | |
3277 | gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0); | |
3278 | gen_set_label(l1); | |
3279 | tcg_gen_movi_tl(cpu_reserve, -1); | |
3280 | tcg_temp_free(t0); | |
3281 | } | |
3282 | #endif /* defined(TARGET_PPC64) */ | |
3283 | ||
3284 | /* sync */ | |
3285 | GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC) | |
3286 | { | |
3287 | } | |
3288 | ||
3289 | /* wait */ | |
3290 | GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT) | |
3291 | { | |
3292 | TCGv_i32 t0 = tcg_temp_new_i32(); | |
3293 | tcg_gen_st_i32(t0, cpu_env, offsetof(CPUState, halted)); | |
3294 | tcg_temp_free_i32(t0); | |
3295 | /* Stop translation, as the CPU is supposed to sleep from now */ | |
3296 | gen_exception_err(ctx, EXCP_HLT, 1); | |
3297 | } | |
3298 | ||
3299 | /*** Floating-point load ***/ | |
3300 | #define GEN_LDF(name, ldop, opc, type) \ | |
3301 | GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \ | |
3302 | { \ | |
3303 | TCGv EA; \ | |
3304 | if (unlikely(!ctx->fpu_enabled)) { \ | |
3305 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
3306 | return; \ | |
3307 | } \ | |
3308 | gen_set_access_type(ctx, ACCESS_FLOAT); \ | |
3309 | EA = tcg_temp_new(); \ | |
3310 | gen_addr_imm_index(ctx, EA, 0); \ | |
3311 | gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \ | |
3312 | tcg_temp_free(EA); \ | |
3313 | } | |
3314 | ||
3315 | #define GEN_LDUF(name, ldop, opc, type) \ | |
3316 | GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \ | |
3317 | { \ | |
3318 | TCGv EA; \ | |
3319 | if (unlikely(!ctx->fpu_enabled)) { \ | |
3320 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
3321 | return; \ | |
3322 | } \ | |
3323 | if (unlikely(rA(ctx->opcode) == 0)) { \ | |
3324 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ | |
3325 | return; \ | |
3326 | } \ | |
3327 | gen_set_access_type(ctx, ACCESS_FLOAT); \ | |
3328 | EA = tcg_temp_new(); \ | |
3329 | gen_addr_imm_index(ctx, EA, 0); \ | |
3330 | gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \ | |
3331 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ | |
3332 | tcg_temp_free(EA); \ | |
3333 | } | |
3334 | ||
3335 | #define GEN_LDUXF(name, ldop, opc, type) \ | |
3336 | GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type) \ | |
3337 | { \ | |
3338 | TCGv EA; \ | |
3339 | if (unlikely(!ctx->fpu_enabled)) { \ | |
3340 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
3341 | return; \ | |
3342 | } \ | |
3343 | if (unlikely(rA(ctx->opcode) == 0)) { \ | |
3344 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ | |
3345 | return; \ | |
3346 | } \ | |
3347 | gen_set_access_type(ctx, ACCESS_FLOAT); \ | |
3348 | EA = tcg_temp_new(); \ | |
3349 | gen_addr_reg_index(ctx, EA); \ | |
3350 | gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \ | |
3351 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ | |
3352 | tcg_temp_free(EA); \ | |
3353 | } | |
3354 | ||
3355 | #define GEN_LDXF(name, ldop, opc2, opc3, type) \ | |
3356 | GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \ | |
3357 | { \ | |
3358 | TCGv EA; \ | |
3359 | if (unlikely(!ctx->fpu_enabled)) { \ | |
3360 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
3361 | return; \ | |
3362 | } \ | |
3363 | gen_set_access_type(ctx, ACCESS_FLOAT); \ | |
3364 | EA = tcg_temp_new(); \ | |
3365 | gen_addr_reg_index(ctx, EA); \ | |
3366 | gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \ | |
3367 | tcg_temp_free(EA); \ | |
3368 | } | |
3369 | ||
3370 | #define GEN_LDFS(name, ldop, op, type) \ | |
3371 | GEN_LDF(name, ldop, op | 0x20, type); \ | |
3372 | GEN_LDUF(name, ldop, op | 0x21, type); \ | |
3373 | GEN_LDUXF(name, ldop, op | 0x01, type); \ | |
3374 | GEN_LDXF(name, ldop, 0x17, op | 0x00, type) | |
3375 | ||
3376 | static always_inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) | |
3377 | { | |
3378 | TCGv t0 = tcg_temp_new(); | |
3379 | TCGv_i32 t1 = tcg_temp_new_i32(); | |
3380 | gen_qemu_ld32u(ctx, t0, arg2); | |
3381 | tcg_gen_trunc_tl_i32(t1, t0); | |
3382 | tcg_temp_free(t0); | |
3383 | gen_helper_float32_to_float64(arg1, t1); | |
3384 | tcg_temp_free_i32(t1); | |
3385 | } | |
3386 | ||
3387 | /* lfd lfdu lfdux lfdx */ | |
3388 | GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT); | |
3389 | /* lfs lfsu lfsux lfsx */ | |
3390 | GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT); | |
3391 | ||
3392 | /*** Floating-point store ***/ | |
3393 | #define GEN_STF(name, stop, opc, type) \ | |
3394 | GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \ | |
3395 | { \ | |
3396 | TCGv EA; \ | |
3397 | if (unlikely(!ctx->fpu_enabled)) { \ | |
3398 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
3399 | return; \ | |
3400 | } \ | |
3401 | gen_set_access_type(ctx, ACCESS_FLOAT); \ | |
3402 | EA = tcg_temp_new(); \ | |
3403 | gen_addr_imm_index(ctx, EA, 0); \ | |
3404 | gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \ | |
3405 | tcg_temp_free(EA); \ | |
3406 | } | |
3407 | ||
3408 | #define GEN_STUF(name, stop, opc, type) \ | |
3409 | GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \ | |
3410 | { \ | |
3411 | TCGv EA; \ | |
3412 | if (unlikely(!ctx->fpu_enabled)) { \ | |
3413 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
3414 | return; \ | |
3415 | } \ | |
3416 | if (unlikely(rA(ctx->opcode) == 0)) { \ | |
3417 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ | |
3418 | return; \ | |
3419 | } \ | |
3420 | gen_set_access_type(ctx, ACCESS_FLOAT); \ | |
3421 | EA = tcg_temp_new(); \ | |
3422 | gen_addr_imm_index(ctx, EA, 0); \ | |
3423 | gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \ | |
3424 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ | |
3425 | tcg_temp_free(EA); \ | |
3426 | } | |
3427 | ||
3428 | #define GEN_STUXF(name, stop, opc, type) \ | |
3429 | GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type) \ | |
3430 | { \ | |
3431 | TCGv EA; \ | |
3432 | if (unlikely(!ctx->fpu_enabled)) { \ | |
3433 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
3434 | return; \ | |
3435 | } \ | |
3436 | if (unlikely(rA(ctx->opcode) == 0)) { \ | |
3437 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ | |
3438 | return; \ | |
3439 | } \ | |
3440 | gen_set_access_type(ctx, ACCESS_FLOAT); \ | |
3441 | EA = tcg_temp_new(); \ | |
3442 | gen_addr_reg_index(ctx, EA); \ | |
3443 | gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \ | |
3444 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ | |
3445 | tcg_temp_free(EA); \ | |
3446 | } | |
3447 | ||
3448 | #define GEN_STXF(name, stop, opc2, opc3, type) \ | |
3449 | GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \ | |
3450 | { \ | |
3451 | TCGv EA; \ | |
3452 | if (unlikely(!ctx->fpu_enabled)) { \ | |
3453 | gen_exception(ctx, POWERPC_EXCP_FPU); \ | |
3454 | return; \ | |
3455 | } \ | |
3456 | gen_set_access_type(ctx, ACCESS_FLOAT); \ | |
3457 | EA = tcg_temp_new(); \ | |
3458 | gen_addr_reg_index(ctx, EA); \ | |
3459 | gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \ | |
3460 | tcg_temp_free(EA); \ | |
3461 | } | |
3462 | ||
3463 | #define GEN_STFS(name, stop, op, type) \ | |
3464 | GEN_STF(name, stop, op | 0x20, type); \ | |
3465 | GEN_STUF(name, stop, op | 0x21, type); \ | |
3466 | GEN_STUXF(name, stop, op | 0x01, type); \ | |
3467 | GEN_STXF(name, stop, 0x17, op | 0x00, type) | |
3468 | ||
3469 | static always_inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) | |
3470 | { | |
3471 | TCGv_i32 t0 = tcg_temp_new_i32(); | |
3472 | TCGv t1 = tcg_temp_new(); | |
3473 | gen_helper_float64_to_float32(t0, arg1); | |
3474 | tcg_gen_extu_i32_tl(t1, t0); | |
3475 | tcg_temp_free_i32(t0); | |
3476 | gen_qemu_st32(ctx, t1, arg2); | |
3477 | tcg_temp_free(t1); | |
3478 | } | |
3479 | ||
3480 | /* stfd stfdu stfdux stfdx */ | |
3481 | GEN_STFS(stfd, st64, 0x16, PPC_FLOAT); | |
3482 | /* stfs stfsu stfsux stfsx */ | |
3483 | GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT); | |
3484 | ||
3485 | /* Optional: */ | |
3486 | static always_inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) | |
3487 | { | |
3488 | TCGv t0 = tcg_temp_new(); | |
3489 | tcg_gen_trunc_i64_tl(t0, arg1), | |
3490 | gen_qemu_st32(ctx, t0, arg2); | |
3491 | tcg_temp_free(t0); | |
3492 | } | |
3493 | /* stfiwx */ | |
3494 | GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX); | |
3495 | ||
3496 | /*** Branch ***/ | |
3497 | static always_inline void gen_goto_tb (DisasContext *ctx, int n, | |
3498 | target_ulong dest) | |
3499 | { | |
3500 | TranslationBlock *tb; | |
3501 | tb = ctx->tb; | |
3502 | #if defined(TARGET_PPC64) | |
3503 | if (!ctx->sf_mode) | |
3504 | dest = (uint32_t) dest; | |
3505 | #endif | |
3506 | if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) && | |
3507 | likely(!ctx->singlestep_enabled)) { | |
3508 | tcg_gen_goto_tb(n); | |
3509 | tcg_gen_movi_tl(cpu_nip, dest & ~3); | |
3510 | tcg_gen_exit_tb((long)tb + n); | |
3511 | } else { | |
3512 | tcg_gen_movi_tl(cpu_nip, dest & ~3); | |
3513 | if (unlikely(ctx->singlestep_enabled)) { | |
3514 | if ((ctx->singlestep_enabled & | |
3515 | (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) && | |
3516 | ctx->exception == POWERPC_EXCP_BRANCH) { | |
3517 | target_ulong tmp = ctx->nip; | |
3518 | ctx->nip = dest; | |
3519 | gen_exception(ctx, POWERPC_EXCP_TRACE); | |
3520 | ctx->nip = tmp; | |
3521 | } | |
3522 | if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) { | |
3523 | gen_debug_exception(ctx); | |
3524 | } | |
3525 | } | |
3526 | tcg_gen_exit_tb(0); | |
3527 | } | |
3528 | } | |
3529 | ||
3530 | static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip) | |
3531 | { | |
3532 | #if defined(TARGET_PPC64) | |
3533 | if (ctx->sf_mode == 0) | |
3534 | tcg_gen_movi_tl(cpu_lr, (uint32_t)nip); | |
3535 | else | |
3536 | #endif | |
3537 | tcg_gen_movi_tl(cpu_lr, nip); | |
3538 | } | |
3539 | ||
3540 | /* b ba bl bla */ | |
3541 | GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW) | |
3542 | { | |
3543 | target_ulong li, target; | |
3544 | ||
3545 | ctx->exception = POWERPC_EXCP_BRANCH; | |
3546 | /* sign extend LI */ | |
3547 | #if defined(TARGET_PPC64) | |
3548 | if (ctx->sf_mode) | |
3549 | li = ((int64_t)LI(ctx->opcode) << 38) >> 38; | |
3550 | else | |
3551 | #endif | |
3552 | li = ((int32_t)LI(ctx->opcode) << 6) >> 6; | |
3553 | if (likely(AA(ctx->opcode) == 0)) | |
3554 | target = ctx->nip + li - 4; | |
3555 | else | |
3556 | target = li; | |
3557 | if (LK(ctx->opcode)) | |
3558 | gen_setlr(ctx, ctx->nip); | |
3559 | gen_goto_tb(ctx, 0, target); | |
3560 | } | |
3561 | ||
3562 | #define BCOND_IM 0 | |
3563 | #define BCOND_LR 1 | |
3564 | #define BCOND_CTR 2 | |
3565 | ||
3566 | static always_inline void gen_bcond (DisasContext *ctx, int type) | |
3567 | { | |
3568 | uint32_t bo = BO(ctx->opcode); | |
3569 | int l1 = gen_new_label(); | |
3570 | TCGv target; | |
3571 | ||
3572 | ctx->exception = POWERPC_EXCP_BRANCH; | |
3573 | if (type == BCOND_LR || type == BCOND_CTR) { | |
3574 | target = tcg_temp_local_new(); | |
3575 | if (type == BCOND_CTR) | |
3576 | tcg_gen_mov_tl(target, cpu_ctr); | |
3577 | else | |
3578 | tcg_gen_mov_tl(target, cpu_lr); | |
3579 | } | |
3580 | if (LK(ctx->opcode)) | |
3581 | gen_setlr(ctx, ctx->nip); | |
3582 | l1 = gen_new_label(); | |
3583 | if ((bo & 0x4) == 0) { | |
3584 | /* Decrement and test CTR */ | |
3585 | TCGv temp = tcg_temp_new(); | |
3586 | if (unlikely(type == BCOND_CTR)) { | |
3587 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
3588 | return; | |
3589 | } | |
3590 | tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); | |
3591 | #if defined(TARGET_PPC64) | |
3592 | if (!ctx->sf_mode) | |
3593 | tcg_gen_ext32u_tl(temp, cpu_ctr); | |
3594 | else | |
3595 | #endif | |
3596 | tcg_gen_mov_tl(temp, cpu_ctr); | |
3597 | if (bo & 0x2) { | |
3598 | tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); | |
3599 | } else { | |
3600 | tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); | |
3601 | } | |
3602 | tcg_temp_free(temp); | |
3603 | } | |
3604 | if ((bo & 0x10) == 0) { | |
3605 | /* Test CR */ | |
3606 | uint32_t bi = BI(ctx->opcode); | |
3607 | uint32_t mask = 1 << (3 - (bi & 0x03)); | |
3608 | TCGv_i32 temp = tcg_temp_new_i32(); | |
3609 | ||
3610 | if (bo & 0x8) { | |
3611 | tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); | |
3612 | tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); | |
3613 | } else { | |
3614 | tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); | |
3615 | tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); | |
3616 | } | |
3617 | tcg_temp_free_i32(temp); | |
3618 | } | |
3619 | if (type == BCOND_IM) { | |
3620 | target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); | |
3621 | if (likely(AA(ctx->opcode) == 0)) { | |
3622 | gen_goto_tb(ctx, 0, ctx->nip + li - 4); | |
3623 | } else { | |
3624 | gen_goto_tb(ctx, 0, li); | |
3625 | } | |
3626 | gen_set_label(l1); | |
3627 | gen_goto_tb(ctx, 1, ctx->nip); | |
3628 | } else { | |
3629 | #if defined(TARGET_PPC64) | |
3630 | if (!(ctx->sf_mode)) | |
3631 | tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); | |
3632 | else | |
3633 | #endif | |
3634 | tcg_gen_andi_tl(cpu_nip, target, ~3); | |
3635 | tcg_gen_exit_tb(0); | |
3636 | gen_set_label(l1); | |
3637 | #if defined(TARGET_PPC64) | |
3638 | if (!(ctx->sf_mode)) | |
3639 | tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip); | |
3640 | else | |
3641 | #endif | |
3642 | tcg_gen_movi_tl(cpu_nip, ctx->nip); | |
3643 | tcg_gen_exit_tb(0); | |
3644 | } | |
3645 | } | |
3646 | ||
3647 | GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW) | |
3648 | { | |
3649 | gen_bcond(ctx, BCOND_IM); | |
3650 | } | |
3651 | ||
3652 | GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW) | |
3653 | { | |
3654 | gen_bcond(ctx, BCOND_CTR); | |
3655 | } | |
3656 | ||
3657 | GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW) | |
3658 | { | |
3659 | gen_bcond(ctx, BCOND_LR); | |
3660 | } | |
3661 | ||
3662 | /*** Condition register logical ***/ | |
3663 | #define GEN_CRLOGIC(name, tcg_op, opc) \ | |
3664 | GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) \ | |
3665 | { \ | |
3666 | uint8_t bitmask; \ | |
3667 | int sh; \ | |
3668 | TCGv_i32 t0, t1; \ | |
3669 | sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ | |
3670 | t0 = tcg_temp_new_i32(); \ | |
3671 | if (sh > 0) \ | |
3672 | tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ | |
3673 | else if (sh < 0) \ | |
3674 | tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ | |
3675 | else \ | |
3676 | tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ | |
3677 | t1 = tcg_temp_new_i32(); \ | |
3678 | sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ | |
3679 | if (sh > 0) \ | |
3680 | tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ | |
3681 | else if (sh < 0) \ | |
3682 | tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ | |
3683 | else \ | |
3684 | tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ | |
3685 | tcg_op(t0, t0, t1); \ | |
3686 | bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \ | |
3687 | tcg_gen_andi_i32(t0, t0, bitmask); \ | |
3688 | tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ | |
3689 | tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ | |
3690 | tcg_temp_free_i32(t0); \ | |
3691 | tcg_temp_free_i32(t1); \ | |
3692 | } | |
3693 | ||
3694 | /* crand */ | |
3695 | GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); | |
3696 | /* crandc */ | |
3697 | GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); | |
3698 | /* creqv */ | |
3699 | GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); | |
3700 | /* crnand */ | |
3701 | GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); | |
3702 | /* crnor */ | |
3703 | GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); | |
3704 | /* cror */ | |
3705 | GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); | |
3706 | /* crorc */ | |
3707 | GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); | |
3708 | /* crxor */ | |
3709 | GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); | |
3710 | /* mcrf */ | |
3711 | GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER) | |
3712 | { | |
3713 | tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); | |
3714 | } | |
3715 | ||
3716 | /*** System linkage ***/ | |
3717 | /* rfi (mem_idx only) */ | |
3718 | GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW) | |
3719 | { | |
3720 | #if defined(CONFIG_USER_ONLY) | |
3721 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
3722 | #else | |
3723 | /* Restore CPU state */ | |
3724 | if (unlikely(!ctx->mem_idx)) { | |
3725 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
3726 | return; | |
3727 | } | |
3728 | gen_helper_rfi(); | |
3729 | gen_sync_exception(ctx); | |
3730 | #endif | |
3731 | } | |
3732 | ||
3733 | #if defined(TARGET_PPC64) | |
3734 | GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B) | |
3735 | { | |
3736 | #if defined(CONFIG_USER_ONLY) | |
3737 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
3738 | #else | |
3739 | /* Restore CPU state */ | |
3740 | if (unlikely(!ctx->mem_idx)) { | |
3741 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
3742 | return; | |
3743 | } | |
3744 | gen_helper_rfid(); | |
3745 | gen_sync_exception(ctx); | |
3746 | #endif | |
3747 | } | |
3748 | ||
3749 | GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H) | |
3750 | { | |
3751 | #if defined(CONFIG_USER_ONLY) | |
3752 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
3753 | #else | |
3754 | /* Restore CPU state */ | |
3755 | if (unlikely(ctx->mem_idx <= 1)) { | |
3756 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
3757 | return; | |
3758 | } | |
3759 | gen_helper_hrfid(); | |
3760 | gen_sync_exception(ctx); | |
3761 | #endif | |
3762 | } | |
3763 | #endif | |
3764 | ||
3765 | /* sc */ | |
3766 | #if defined(CONFIG_USER_ONLY) | |
3767 | #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER | |
3768 | #else | |
3769 | #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL | |
3770 | #endif | |
3771 | GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW) | |
3772 | { | |
3773 | uint32_t lev; | |
3774 | ||
3775 | lev = (ctx->opcode >> 5) & 0x7F; | |
3776 | gen_exception_err(ctx, POWERPC_SYSCALL, lev); | |
3777 | } | |
3778 | ||
3779 | /*** Trap ***/ | |
3780 | /* tw */ | |
3781 | GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW) | |
3782 | { | |
3783 | TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode)); | |
3784 | /* Update the nip since this might generate a trap exception */ | |
3785 | gen_update_nip(ctx, ctx->nip); | |
3786 | gen_helper_tw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); | |
3787 | tcg_temp_free_i32(t0); | |
3788 | } | |
3789 | ||
3790 | /* twi */ | |
3791 | GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW) | |
3792 | { | |
3793 | TCGv t0 = tcg_const_tl(SIMM(ctx->opcode)); | |
3794 | TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode)); | |
3795 | /* Update the nip since this might generate a trap exception */ | |
3796 | gen_update_nip(ctx, ctx->nip); | |
3797 | gen_helper_tw(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
3798 | tcg_temp_free(t0); | |
3799 | tcg_temp_free_i32(t1); | |
3800 | } | |
3801 | ||
3802 | #if defined(TARGET_PPC64) | |
3803 | /* td */ | |
3804 | GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B) | |
3805 | { | |
3806 | TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode)); | |
3807 | /* Update the nip since this might generate a trap exception */ | |
3808 | gen_update_nip(ctx, ctx->nip); | |
3809 | gen_helper_td(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); | |
3810 | tcg_temp_free_i32(t0); | |
3811 | } | |
3812 | ||
3813 | /* tdi */ | |
3814 | GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B) | |
3815 | { | |
3816 | TCGv t0 = tcg_const_tl(SIMM(ctx->opcode)); | |
3817 | TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode)); | |
3818 | /* Update the nip since this might generate a trap exception */ | |
3819 | gen_update_nip(ctx, ctx->nip); | |
3820 | gen_helper_td(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
3821 | tcg_temp_free(t0); | |
3822 | tcg_temp_free_i32(t1); | |
3823 | } | |
3824 | #endif | |
3825 | ||
3826 | /*** Processor control ***/ | |
3827 | /* mcrxr */ | |
3828 | GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC) | |
3829 | { | |
3830 | tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer); | |
3831 | tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA); | |
3832 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA)); | |
3833 | } | |
3834 | ||
3835 | /* mfcr */ | |
3836 | GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC) | |
3837 | { | |
3838 | uint32_t crm, crn; | |
3839 | ||
3840 | if (likely(ctx->opcode & 0x00100000)) { | |
3841 | crm = CRM(ctx->opcode); | |
3842 | if (likely((crm ^ (crm - 1)) == 0)) { | |
3843 | crn = ffs(crm); | |
3844 | tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); | |
3845 | } | |
3846 | } else { | |
3847 | gen_helper_load_cr(cpu_gpr[rD(ctx->opcode)]); | |
3848 | } | |
3849 | } | |
3850 | ||
3851 | /* mfmsr */ | |
3852 | GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC) | |
3853 | { | |
3854 | #if defined(CONFIG_USER_ONLY) | |
3855 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
3856 | #else | |
3857 | if (unlikely(!ctx->mem_idx)) { | |
3858 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
3859 | return; | |
3860 | } | |
3861 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); | |
3862 | #endif | |
3863 | } | |
3864 | ||
3865 | #if 1 | |
3866 | #define SPR_NOACCESS ((void *)(-1UL)) | |
3867 | #else | |
3868 | static void spr_noaccess (void *opaque, int sprn) | |
3869 | { | |
3870 | sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); | |
3871 | printf("ERROR: try to access SPR %d !\n", sprn); | |
3872 | } | |
3873 | #define SPR_NOACCESS (&spr_noaccess) | |
3874 | #endif | |
3875 | ||
3876 | /* mfspr */ | |
3877 | static always_inline void gen_op_mfspr (DisasContext *ctx) | |
3878 | { | |
3879 | void (*read_cb)(void *opaque, int gprn, int sprn); | |
3880 | uint32_t sprn = SPR(ctx->opcode); | |
3881 | ||
3882 | #if !defined(CONFIG_USER_ONLY) | |
3883 | if (ctx->mem_idx == 2) | |
3884 | read_cb = ctx->spr_cb[sprn].hea_read; | |
3885 | else if (ctx->mem_idx) | |
3886 | read_cb = ctx->spr_cb[sprn].oea_read; | |
3887 | else | |
3888 | #endif | |
3889 | read_cb = ctx->spr_cb[sprn].uea_read; | |
3890 | if (likely(read_cb != NULL)) { | |
3891 | if (likely(read_cb != SPR_NOACCESS)) { | |
3892 | (*read_cb)(ctx, rD(ctx->opcode), sprn); | |
3893 | } else { | |
3894 | /* Privilege exception */ | |
3895 | /* This is a hack to avoid warnings when running Linux: | |
3896 | * this OS breaks the PowerPC virtualisation model, | |
3897 | * allowing userland application to read the PVR | |
3898 | */ | |
3899 | if (sprn != SPR_PVR) { | |
3900 | qemu_log("Trying to read privileged spr %d %03x at " | |
3901 | ADDRX "\n", sprn, sprn, ctx->nip); | |
3902 | printf("Trying to read privileged spr %d %03x at " ADDRX "\n", | |
3903 | sprn, sprn, ctx->nip); | |
3904 | } | |
3905 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
3906 | } | |
3907 | } else { | |
3908 | /* Not defined */ | |
3909 | qemu_log("Trying to read invalid spr %d %03x at " | |
3910 | ADDRX "\n", sprn, sprn, ctx->nip); | |
3911 | printf("Trying to read invalid spr %d %03x at " ADDRX "\n", | |
3912 | sprn, sprn, ctx->nip); | |
3913 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR); | |
3914 | } | |
3915 | } | |
3916 | ||
3917 | GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC) | |
3918 | { | |
3919 | gen_op_mfspr(ctx); | |
3920 | } | |
3921 | ||
3922 | /* mftb */ | |
3923 | GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB) | |
3924 | { | |
3925 | gen_op_mfspr(ctx); | |
3926 | } | |
3927 | ||
3928 | /* mtcrf */ | |
3929 | GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC) | |
3930 | { | |
3931 | uint32_t crm, crn; | |
3932 | ||
3933 | crm = CRM(ctx->opcode); | |
3934 | if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) { | |
3935 | TCGv_i32 temp = tcg_temp_new_i32(); | |
3936 | crn = ffs(crm); | |
3937 | tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); | |
3938 | tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); | |
3939 | tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); | |
3940 | tcg_temp_free_i32(temp); | |
3941 | } else { | |
3942 | TCGv_i32 temp = tcg_const_i32(crm); | |
3943 | gen_helper_store_cr(cpu_gpr[rS(ctx->opcode)], temp); | |
3944 | tcg_temp_free_i32(temp); | |
3945 | } | |
3946 | } | |
3947 | ||
3948 | /* mtmsr */ | |
3949 | #if defined(TARGET_PPC64) | |
3950 | GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B) | |
3951 | { | |
3952 | #if defined(CONFIG_USER_ONLY) | |
3953 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
3954 | #else | |
3955 | if (unlikely(!ctx->mem_idx)) { | |
3956 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
3957 | return; | |
3958 | } | |
3959 | if (ctx->opcode & 0x00010000) { | |
3960 | /* Special form that does not need any synchronisation */ | |
3961 | TCGv t0 = tcg_temp_new(); | |
3962 | tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); | |
3963 | tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE))); | |
3964 | tcg_gen_or_tl(cpu_msr, cpu_msr, t0); | |
3965 | tcg_temp_free(t0); | |
3966 | } else { | |
3967 | /* XXX: we need to update nip before the store | |
3968 | * if we enter power saving mode, we will exit the loop | |
3969 | * directly from ppc_store_msr | |
3970 | */ | |
3971 | gen_update_nip(ctx, ctx->nip); | |
3972 | gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]); | |
3973 | /* Must stop the translation as machine state (may have) changed */ | |
3974 | /* Note that mtmsr is not always defined as context-synchronizing */ | |
3975 | gen_stop_exception(ctx); | |
3976 | } | |
3977 | #endif | |
3978 | } | |
3979 | #endif | |
3980 | ||
3981 | GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC) | |
3982 | { | |
3983 | #if defined(CONFIG_USER_ONLY) | |
3984 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
3985 | #else | |
3986 | if (unlikely(!ctx->mem_idx)) { | |
3987 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
3988 | return; | |
3989 | } | |
3990 | if (ctx->opcode & 0x00010000) { | |
3991 | /* Special form that does not need any synchronisation */ | |
3992 | TCGv t0 = tcg_temp_new(); | |
3993 | tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); | |
3994 | tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE))); | |
3995 | tcg_gen_or_tl(cpu_msr, cpu_msr, t0); | |
3996 | tcg_temp_free(t0); | |
3997 | } else { | |
3998 | /* XXX: we need to update nip before the store | |
3999 | * if we enter power saving mode, we will exit the loop | |
4000 | * directly from ppc_store_msr | |
4001 | */ | |
4002 | gen_update_nip(ctx, ctx->nip); | |
4003 | #if defined(TARGET_PPC64) | |
4004 | if (!ctx->sf_mode) { | |
4005 | TCGv t0 = tcg_temp_new(); | |
4006 | TCGv t1 = tcg_temp_new(); | |
4007 | tcg_gen_andi_tl(t0, cpu_msr, 0xFFFFFFFF00000000ULL); | |
4008 | tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]); | |
4009 | tcg_gen_or_tl(t0, t0, t1); | |
4010 | tcg_temp_free(t1); | |
4011 | gen_helper_store_msr(t0); | |
4012 | tcg_temp_free(t0); | |
4013 | } else | |
4014 | #endif | |
4015 | gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]); | |
4016 | /* Must stop the translation as machine state (may have) changed */ | |
4017 | /* Note that mtmsr is not always defined as context-synchronizing */ | |
4018 | gen_stop_exception(ctx); | |
4019 | } | |
4020 | #endif | |
4021 | } | |
4022 | ||
4023 | /* mtspr */ | |
4024 | GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC) | |
4025 | { | |
4026 | void (*write_cb)(void *opaque, int sprn, int gprn); | |
4027 | uint32_t sprn = SPR(ctx->opcode); | |
4028 | ||
4029 | #if !defined(CONFIG_USER_ONLY) | |
4030 | if (ctx->mem_idx == 2) | |
4031 | write_cb = ctx->spr_cb[sprn].hea_write; | |
4032 | else if (ctx->mem_idx) | |
4033 | write_cb = ctx->spr_cb[sprn].oea_write; | |
4034 | else | |
4035 | #endif | |
4036 | write_cb = ctx->spr_cb[sprn].uea_write; | |
4037 | if (likely(write_cb != NULL)) { | |
4038 | if (likely(write_cb != SPR_NOACCESS)) { | |
4039 | (*write_cb)(ctx, sprn, rS(ctx->opcode)); | |
4040 | } else { | |
4041 | /* Privilege exception */ | |
4042 | qemu_log("Trying to write privileged spr %d %03x at " | |
4043 | ADDRX "\n", sprn, sprn, ctx->nip); | |
4044 | printf("Trying to write privileged spr %d %03x at " ADDRX "\n", | |
4045 | sprn, sprn, ctx->nip); | |
4046 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4047 | } | |
4048 | } else { | |
4049 | /* Not defined */ | |
4050 | qemu_log("Trying to write invalid spr %d %03x at " | |
4051 | ADDRX "\n", sprn, sprn, ctx->nip); | |
4052 | printf("Trying to write invalid spr %d %03x at " ADDRX "\n", | |
4053 | sprn, sprn, ctx->nip); | |
4054 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR); | |
4055 | } | |
4056 | } | |
4057 | ||
4058 | /*** Cache management ***/ | |
4059 | /* dcbf */ | |
4060 | GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE) | |
4061 | { | |
4062 | /* XXX: specification says this is treated as a load by the MMU */ | |
4063 | TCGv t0; | |
4064 | gen_set_access_type(ctx, ACCESS_CACHE); | |
4065 | t0 = tcg_temp_new(); | |
4066 | gen_addr_reg_index(ctx, t0); | |
4067 | gen_qemu_ld8u(ctx, t0, t0); | |
4068 | tcg_temp_free(t0); | |
4069 | } | |
4070 | ||
4071 | /* dcbi (Supervisor only) */ | |
4072 | GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE) | |
4073 | { | |
4074 | #if defined(CONFIG_USER_ONLY) | |
4075 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4076 | #else | |
4077 | TCGv EA, val; | |
4078 | if (unlikely(!ctx->mem_idx)) { | |
4079 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4080 | return; | |
4081 | } | |
4082 | EA = tcg_temp_new(); | |
4083 | gen_set_access_type(ctx, ACCESS_CACHE); | |
4084 | gen_addr_reg_index(ctx, EA); | |
4085 | val = tcg_temp_new(); | |
4086 | /* XXX: specification says this should be treated as a store by the MMU */ | |
4087 | gen_qemu_ld8u(ctx, val, EA); | |
4088 | gen_qemu_st8(ctx, val, EA); | |
4089 | tcg_temp_free(val); | |
4090 | tcg_temp_free(EA); | |
4091 | #endif | |
4092 | } | |
4093 | ||
4094 | /* dcdst */ | |
4095 | GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE) | |
4096 | { | |
4097 | /* XXX: specification say this is treated as a load by the MMU */ | |
4098 | TCGv t0; | |
4099 | gen_set_access_type(ctx, ACCESS_CACHE); | |
4100 | t0 = tcg_temp_new(); | |
4101 | gen_addr_reg_index(ctx, t0); | |
4102 | gen_qemu_ld8u(ctx, t0, t0); | |
4103 | tcg_temp_free(t0); | |
4104 | } | |
4105 | ||
4106 | /* dcbt */ | |
4107 | GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE) | |
4108 | { | |
4109 | /* interpreted as no-op */ | |
4110 | /* XXX: specification say this is treated as a load by the MMU | |
4111 | * but does not generate any exception | |
4112 | */ | |
4113 | } | |
4114 | ||
4115 | /* dcbtst */ | |
4116 | GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE) | |
4117 | { | |
4118 | /* interpreted as no-op */ | |
4119 | /* XXX: specification say this is treated as a load by the MMU | |
4120 | * but does not generate any exception | |
4121 | */ | |
4122 | } | |
4123 | ||
4124 | /* dcbz */ | |
4125 | GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ) | |
4126 | { | |
4127 | TCGv t0; | |
4128 | gen_set_access_type(ctx, ACCESS_CACHE); | |
4129 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
4130 | gen_update_nip(ctx, ctx->nip - 4); | |
4131 | t0 = tcg_temp_new(); | |
4132 | gen_addr_reg_index(ctx, t0); | |
4133 | gen_helper_dcbz(t0); | |
4134 | tcg_temp_free(t0); | |
4135 | } | |
4136 | ||
4137 | GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT) | |
4138 | { | |
4139 | TCGv t0; | |
4140 | gen_set_access_type(ctx, ACCESS_CACHE); | |
4141 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
4142 | gen_update_nip(ctx, ctx->nip - 4); | |
4143 | t0 = tcg_temp_new(); | |
4144 | gen_addr_reg_index(ctx, t0); | |
4145 | if (ctx->opcode & 0x00200000) | |
4146 | gen_helper_dcbz(t0); | |
4147 | else | |
4148 | gen_helper_dcbz_970(t0); | |
4149 | tcg_temp_free(t0); | |
4150 | } | |
4151 | ||
4152 | /* dst / dstt */ | |
4153 | GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC) | |
4154 | { | |
4155 | if (rA(ctx->opcode) == 0) { | |
4156 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); | |
4157 | } else { | |
4158 | /* interpreted as no-op */ | |
4159 | } | |
4160 | } | |
4161 | ||
4162 | /* dstst /dststt */ | |
4163 | GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC) | |
4164 | { | |
4165 | if (rA(ctx->opcode) == 0) { | |
4166 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); | |
4167 | } else { | |
4168 | /* interpreted as no-op */ | |
4169 | } | |
4170 | ||
4171 | } | |
4172 | ||
4173 | /* dss / dssall */ | |
4174 | GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC) | |
4175 | { | |
4176 | /* interpreted as no-op */ | |
4177 | } | |
4178 | ||
4179 | /* icbi */ | |
4180 | GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI) | |
4181 | { | |
4182 | TCGv t0; | |
4183 | gen_set_access_type(ctx, ACCESS_CACHE); | |
4184 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
4185 | gen_update_nip(ctx, ctx->nip - 4); | |
4186 | t0 = tcg_temp_new(); | |
4187 | gen_addr_reg_index(ctx, t0); | |
4188 | gen_helper_icbi(t0); | |
4189 | tcg_temp_free(t0); | |
4190 | } | |
4191 | ||
4192 | /* Optional: */ | |
4193 | /* dcba */ | |
4194 | GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA) | |
4195 | { | |
4196 | /* interpreted as no-op */ | |
4197 | /* XXX: specification say this is treated as a store by the MMU | |
4198 | * but does not generate any exception | |
4199 | */ | |
4200 | } | |
4201 | ||
4202 | /*** Segment register manipulation ***/ | |
4203 | /* Supervisor only: */ | |
4204 | /* mfsr */ | |
4205 | GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT) | |
4206 | { | |
4207 | #if defined(CONFIG_USER_ONLY) | |
4208 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4209 | #else | |
4210 | TCGv t0; | |
4211 | if (unlikely(!ctx->mem_idx)) { | |
4212 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4213 | return; | |
4214 | } | |
4215 | t0 = tcg_const_tl(SR(ctx->opcode)); | |
4216 | gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0); | |
4217 | tcg_temp_free(t0); | |
4218 | #endif | |
4219 | } | |
4220 | ||
4221 | /* mfsrin */ | |
4222 | GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT) | |
4223 | { | |
4224 | #if defined(CONFIG_USER_ONLY) | |
4225 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4226 | #else | |
4227 | TCGv t0; | |
4228 | if (unlikely(!ctx->mem_idx)) { | |
4229 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4230 | return; | |
4231 | } | |
4232 | t0 = tcg_temp_new(); | |
4233 | tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); | |
4234 | tcg_gen_andi_tl(t0, t0, 0xF); | |
4235 | gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0); | |
4236 | tcg_temp_free(t0); | |
4237 | #endif | |
4238 | } | |
4239 | ||
4240 | /* mtsr */ | |
4241 | GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT) | |
4242 | { | |
4243 | #if defined(CONFIG_USER_ONLY) | |
4244 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4245 | #else | |
4246 | TCGv t0; | |
4247 | if (unlikely(!ctx->mem_idx)) { | |
4248 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4249 | return; | |
4250 | } | |
4251 | t0 = tcg_const_tl(SR(ctx->opcode)); | |
4252 | gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]); | |
4253 | tcg_temp_free(t0); | |
4254 | #endif | |
4255 | } | |
4256 | ||
4257 | /* mtsrin */ | |
4258 | GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT) | |
4259 | { | |
4260 | #if defined(CONFIG_USER_ONLY) | |
4261 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4262 | #else | |
4263 | TCGv t0; | |
4264 | if (unlikely(!ctx->mem_idx)) { | |
4265 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4266 | return; | |
4267 | } | |
4268 | t0 = tcg_temp_new(); | |
4269 | tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); | |
4270 | tcg_gen_andi_tl(t0, t0, 0xF); | |
4271 | gen_helper_store_sr(t0, cpu_gpr[rD(ctx->opcode)]); | |
4272 | tcg_temp_free(t0); | |
4273 | #endif | |
4274 | } | |
4275 | ||
4276 | #if defined(TARGET_PPC64) | |
4277 | /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ | |
4278 | /* mfsr */ | |
4279 | GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B) | |
4280 | { | |
4281 | #if defined(CONFIG_USER_ONLY) | |
4282 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4283 | #else | |
4284 | TCGv t0; | |
4285 | if (unlikely(!ctx->mem_idx)) { | |
4286 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4287 | return; | |
4288 | } | |
4289 | t0 = tcg_const_tl(SR(ctx->opcode)); | |
4290 | gen_helper_load_slb(cpu_gpr[rD(ctx->opcode)], t0); | |
4291 | tcg_temp_free(t0); | |
4292 | #endif | |
4293 | } | |
4294 | ||
4295 | /* mfsrin */ | |
4296 | GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, | |
4297 | PPC_SEGMENT_64B) | |
4298 | { | |
4299 | #if defined(CONFIG_USER_ONLY) | |
4300 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4301 | #else | |
4302 | TCGv t0; | |
4303 | if (unlikely(!ctx->mem_idx)) { | |
4304 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4305 | return; | |
4306 | } | |
4307 | t0 = tcg_temp_new(); | |
4308 | tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); | |
4309 | tcg_gen_andi_tl(t0, t0, 0xF); | |
4310 | gen_helper_load_slb(cpu_gpr[rD(ctx->opcode)], t0); | |
4311 | tcg_temp_free(t0); | |
4312 | #endif | |
4313 | } | |
4314 | ||
4315 | /* mtsr */ | |
4316 | GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B) | |
4317 | { | |
4318 | #if defined(CONFIG_USER_ONLY) | |
4319 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4320 | #else | |
4321 | TCGv t0; | |
4322 | if (unlikely(!ctx->mem_idx)) { | |
4323 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4324 | return; | |
4325 | } | |
4326 | t0 = tcg_const_tl(SR(ctx->opcode)); | |
4327 | gen_helper_store_slb(t0, cpu_gpr[rS(ctx->opcode)]); | |
4328 | tcg_temp_free(t0); | |
4329 | #endif | |
4330 | } | |
4331 | ||
4332 | /* mtsrin */ | |
4333 | GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, | |
4334 | PPC_SEGMENT_64B) | |
4335 | { | |
4336 | #if defined(CONFIG_USER_ONLY) | |
4337 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4338 | #else | |
4339 | TCGv t0; | |
4340 | if (unlikely(!ctx->mem_idx)) { | |
4341 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
4342 | return; | |
4343 | } | |
4344 | t0 = tcg_temp_new(); | |
4345 | tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); | |
4346 | tcg_gen_andi_tl(t0, t0, 0xF); | |
4347 | gen_helper_store_slb(t0, cpu_gpr[rS(ctx->opcode)]); | |
4348 | tcg_temp_free(t0); | |
4349 | #endif | |
4350 | } | |
4351 | #endif /* defined(TARGET_PPC64) */ | |
4352 | ||
4353 | /*** Lookaside buffer management ***/ | |
4354 | /* Optional & mem_idx only: */ | |
4355 | /* tlbia */ | |
4356 | GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA) | |
4357 | { | |
4358 | #if defined(CONFIG_USER_ONLY) | |
4359 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4360 | #else | |
4361 | if (unlikely(!ctx->mem_idx)) { | |
4362 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4363 | return; | |
4364 | } | |
4365 | gen_helper_tlbia(); | |
4366 | #endif | |
4367 | } | |
4368 | ||
4369 | /* tlbie */ | |
4370 | GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE) | |
4371 | { | |
4372 | #if defined(CONFIG_USER_ONLY) | |
4373 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4374 | #else | |
4375 | if (unlikely(!ctx->mem_idx)) { | |
4376 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4377 | return; | |
4378 | } | |
4379 | #if defined(TARGET_PPC64) | |
4380 | if (!ctx->sf_mode) { | |
4381 | TCGv t0 = tcg_temp_new(); | |
4382 | tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); | |
4383 | gen_helper_tlbie(t0); | |
4384 | tcg_temp_free(t0); | |
4385 | } else | |
4386 | #endif | |
4387 | gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]); | |
4388 | #endif | |
4389 | } | |
4390 | ||
4391 | /* tlbsync */ | |
4392 | GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC) | |
4393 | { | |
4394 | #if defined(CONFIG_USER_ONLY) | |
4395 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4396 | #else | |
4397 | if (unlikely(!ctx->mem_idx)) { | |
4398 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4399 | return; | |
4400 | } | |
4401 | /* This has no effect: it should ensure that all previous | |
4402 | * tlbie have completed | |
4403 | */ | |
4404 | gen_stop_exception(ctx); | |
4405 | #endif | |
4406 | } | |
4407 | ||
4408 | #if defined(TARGET_PPC64) | |
4409 | /* slbia */ | |
4410 | GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI) | |
4411 | { | |
4412 | #if defined(CONFIG_USER_ONLY) | |
4413 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4414 | #else | |
4415 | if (unlikely(!ctx->mem_idx)) { | |
4416 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4417 | return; | |
4418 | } | |
4419 | gen_helper_slbia(); | |
4420 | #endif | |
4421 | } | |
4422 | ||
4423 | /* slbie */ | |
4424 | GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI) | |
4425 | { | |
4426 | #if defined(CONFIG_USER_ONLY) | |
4427 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4428 | #else | |
4429 | if (unlikely(!ctx->mem_idx)) { | |
4430 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
4431 | return; | |
4432 | } | |
4433 | gen_helper_slbie(cpu_gpr[rB(ctx->opcode)]); | |
4434 | #endif | |
4435 | } | |
4436 | #endif | |
4437 | ||
4438 | /*** External control ***/ | |
4439 | /* Optional: */ | |
4440 | /* eciwx */ | |
4441 | GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN) | |
4442 | { | |
4443 | TCGv t0; | |
4444 | /* Should check EAR[E] ! */ | |
4445 | gen_set_access_type(ctx, ACCESS_EXT); | |
4446 | t0 = tcg_temp_new(); | |
4447 | gen_addr_reg_index(ctx, t0); | |
4448 | gen_check_align(ctx, t0, 0x03); | |
4449 | gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0); | |
4450 | tcg_temp_free(t0); | |
4451 | } | |
4452 | ||
4453 | /* ecowx */ | |
4454 | GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN) | |
4455 | { | |
4456 | TCGv t0; | |
4457 | /* Should check EAR[E] ! */ | |
4458 | gen_set_access_type(ctx, ACCESS_EXT); | |
4459 | t0 = tcg_temp_new(); | |
4460 | gen_addr_reg_index(ctx, t0); | |
4461 | gen_check_align(ctx, t0, 0x03); | |
4462 | gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0); | |
4463 | tcg_temp_free(t0); | |
4464 | } | |
4465 | ||
4466 | /* PowerPC 601 specific instructions */ | |
4467 | /* abs - abs. */ | |
4468 | GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR) | |
4469 | { | |
4470 | int l1 = gen_new_label(); | |
4471 | int l2 = gen_new_label(); | |
4472 | tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); | |
4473 | tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4474 | tcg_gen_br(l2); | |
4475 | gen_set_label(l1); | |
4476 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4477 | gen_set_label(l2); | |
4478 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4479 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4480 | } | |
4481 | ||
4482 | /* abso - abso. */ | |
4483 | GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR) | |
4484 | { | |
4485 | int l1 = gen_new_label(); | |
4486 | int l2 = gen_new_label(); | |
4487 | int l3 = gen_new_label(); | |
4488 | /* Start with XER OV disabled, the most likely case */ | |
4489 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
4490 | tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); | |
4491 | tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); | |
4492 | tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); | |
4493 | tcg_gen_br(l2); | |
4494 | gen_set_label(l1); | |
4495 | tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4496 | tcg_gen_br(l3); | |
4497 | gen_set_label(l2); | |
4498 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4499 | gen_set_label(l3); | |
4500 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4501 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4502 | } | |
4503 | ||
4504 | /* clcs */ | |
4505 | GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR) | |
4506 | { | |
4507 | TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); | |
4508 | gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], t0); | |
4509 | tcg_temp_free_i32(t0); | |
4510 | /* Rc=1 sets CR0 to an undefined state */ | |
4511 | } | |
4512 | ||
4513 | /* div - div. */ | |
4514 | GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR) | |
4515 | { | |
4516 | gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
4517 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4518 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4519 | } | |
4520 | ||
4521 | /* divo - divo. */ | |
4522 | GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR) | |
4523 | { | |
4524 | gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
4525 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4526 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4527 | } | |
4528 | ||
4529 | /* divs - divs. */ | |
4530 | GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR) | |
4531 | { | |
4532 | gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
4533 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4534 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4535 | } | |
4536 | ||
4537 | /* divso - divso. */ | |
4538 | GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR) | |
4539 | { | |
4540 | gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
4541 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4542 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4543 | } | |
4544 | ||
4545 | /* doz - doz. */ | |
4546 | GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR) | |
4547 | { | |
4548 | int l1 = gen_new_label(); | |
4549 | int l2 = gen_new_label(); | |
4550 | tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); | |
4551 | tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4552 | tcg_gen_br(l2); | |
4553 | gen_set_label(l1); | |
4554 | tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); | |
4555 | gen_set_label(l2); | |
4556 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4557 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4558 | } | |
4559 | ||
4560 | /* dozo - dozo. */ | |
4561 | GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR) | |
4562 | { | |
4563 | int l1 = gen_new_label(); | |
4564 | int l2 = gen_new_label(); | |
4565 | TCGv t0 = tcg_temp_new(); | |
4566 | TCGv t1 = tcg_temp_new(); | |
4567 | TCGv t2 = tcg_temp_new(); | |
4568 | /* Start with XER OV disabled, the most likely case */ | |
4569 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
4570 | tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); | |
4571 | tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4572 | tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4573 | tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); | |
4574 | tcg_gen_andc_tl(t1, t1, t2); | |
4575 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
4576 | tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); | |
4577 | tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); | |
4578 | tcg_gen_br(l2); | |
4579 | gen_set_label(l1); | |
4580 | tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); | |
4581 | gen_set_label(l2); | |
4582 | tcg_temp_free(t0); | |
4583 | tcg_temp_free(t1); | |
4584 | tcg_temp_free(t2); | |
4585 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4586 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4587 | } | |
4588 | ||
4589 | /* dozi */ | |
4590 | GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR) | |
4591 | { | |
4592 | target_long simm = SIMM(ctx->opcode); | |
4593 | int l1 = gen_new_label(); | |
4594 | int l2 = gen_new_label(); | |
4595 | tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); | |
4596 | tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); | |
4597 | tcg_gen_br(l2); | |
4598 | gen_set_label(l1); | |
4599 | tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); | |
4600 | gen_set_label(l2); | |
4601 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4602 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4603 | } | |
4604 | ||
4605 | /* lscbx - lscbx. */ | |
4606 | GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR) | |
4607 | { | |
4608 | TCGv t0 = tcg_temp_new(); | |
4609 | TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); | |
4610 | TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); | |
4611 | TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); | |
4612 | ||
4613 | gen_addr_reg_index(ctx, t0); | |
4614 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
4615 | gen_update_nip(ctx, ctx->nip - 4); | |
4616 | gen_helper_lscbx(t0, t0, t1, t2, t3); | |
4617 | tcg_temp_free_i32(t1); | |
4618 | tcg_temp_free_i32(t2); | |
4619 | tcg_temp_free_i32(t3); | |
4620 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); | |
4621 | tcg_gen_or_tl(cpu_xer, cpu_xer, t0); | |
4622 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4623 | gen_set_Rc0(ctx, t0); | |
4624 | tcg_temp_free(t0); | |
4625 | } | |
4626 | ||
4627 | /* maskg - maskg. */ | |
4628 | GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR) | |
4629 | { | |
4630 | int l1 = gen_new_label(); | |
4631 | TCGv t0 = tcg_temp_new(); | |
4632 | TCGv t1 = tcg_temp_new(); | |
4633 | TCGv t2 = tcg_temp_new(); | |
4634 | TCGv t3 = tcg_temp_new(); | |
4635 | tcg_gen_movi_tl(t3, 0xFFFFFFFF); | |
4636 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
4637 | tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); | |
4638 | tcg_gen_addi_tl(t2, t0, 1); | |
4639 | tcg_gen_shr_tl(t2, t3, t2); | |
4640 | tcg_gen_shr_tl(t3, t3, t1); | |
4641 | tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); | |
4642 | tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); | |
4643 | tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4644 | gen_set_label(l1); | |
4645 | tcg_temp_free(t0); | |
4646 | tcg_temp_free(t1); | |
4647 | tcg_temp_free(t2); | |
4648 | tcg_temp_free(t3); | |
4649 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4650 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4651 | } | |
4652 | ||
4653 | /* maskir - maskir. */ | |
4654 | GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR) | |
4655 | { | |
4656 | TCGv t0 = tcg_temp_new(); | |
4657 | TCGv t1 = tcg_temp_new(); | |
4658 | tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
4659 | tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
4660 | tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
4661 | tcg_temp_free(t0); | |
4662 | tcg_temp_free(t1); | |
4663 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4664 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4665 | } | |
4666 | ||
4667 | /* mul - mul. */ | |
4668 | GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR) | |
4669 | { | |
4670 | TCGv_i64 t0 = tcg_temp_new_i64(); | |
4671 | TCGv_i64 t1 = tcg_temp_new_i64(); | |
4672 | TCGv t2 = tcg_temp_new(); | |
4673 | tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); | |
4674 | tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); | |
4675 | tcg_gen_mul_i64(t0, t0, t1); | |
4676 | tcg_gen_trunc_i64_tl(t2, t0); | |
4677 | gen_store_spr(SPR_MQ, t2); | |
4678 | tcg_gen_shri_i64(t1, t0, 32); | |
4679 | tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); | |
4680 | tcg_temp_free_i64(t0); | |
4681 | tcg_temp_free_i64(t1); | |
4682 | tcg_temp_free(t2); | |
4683 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4684 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4685 | } | |
4686 | ||
4687 | /* mulo - mulo. */ | |
4688 | GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR) | |
4689 | { | |
4690 | int l1 = gen_new_label(); | |
4691 | TCGv_i64 t0 = tcg_temp_new_i64(); | |
4692 | TCGv_i64 t1 = tcg_temp_new_i64(); | |
4693 | TCGv t2 = tcg_temp_new(); | |
4694 | /* Start with XER OV disabled, the most likely case */ | |
4695 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
4696 | tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); | |
4697 | tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); | |
4698 | tcg_gen_mul_i64(t0, t0, t1); | |
4699 | tcg_gen_trunc_i64_tl(t2, t0); | |
4700 | gen_store_spr(SPR_MQ, t2); | |
4701 | tcg_gen_shri_i64(t1, t0, 32); | |
4702 | tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); | |
4703 | tcg_gen_ext32s_i64(t1, t0); | |
4704 | tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); | |
4705 | tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); | |
4706 | gen_set_label(l1); | |
4707 | tcg_temp_free_i64(t0); | |
4708 | tcg_temp_free_i64(t1); | |
4709 | tcg_temp_free(t2); | |
4710 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4711 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4712 | } | |
4713 | ||
4714 | /* nabs - nabs. */ | |
4715 | GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR) | |
4716 | { | |
4717 | int l1 = gen_new_label(); | |
4718 | int l2 = gen_new_label(); | |
4719 | tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); | |
4720 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4721 | tcg_gen_br(l2); | |
4722 | gen_set_label(l1); | |
4723 | tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4724 | gen_set_label(l2); | |
4725 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4726 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4727 | } | |
4728 | ||
4729 | /* nabso - nabso. */ | |
4730 | GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR) | |
4731 | { | |
4732 | int l1 = gen_new_label(); | |
4733 | int l2 = gen_new_label(); | |
4734 | tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); | |
4735 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4736 | tcg_gen_br(l2); | |
4737 | gen_set_label(l1); | |
4738 | tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
4739 | gen_set_label(l2); | |
4740 | /* nabs never overflows */ | |
4741 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
4742 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4743 | gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); | |
4744 | } | |
4745 | ||
4746 | /* rlmi - rlmi. */ | |
4747 | GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR) | |
4748 | { | |
4749 | uint32_t mb = MB(ctx->opcode); | |
4750 | uint32_t me = ME(ctx->opcode); | |
4751 | TCGv t0 = tcg_temp_new(); | |
4752 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
4753 | tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); | |
4754 | tcg_gen_andi_tl(t0, t0, MASK(mb, me)); | |
4755 | tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); | |
4756 | tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); | |
4757 | tcg_temp_free(t0); | |
4758 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4759 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4760 | } | |
4761 | ||
4762 | /* rrib - rrib. */ | |
4763 | GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR) | |
4764 | { | |
4765 | TCGv t0 = tcg_temp_new(); | |
4766 | TCGv t1 = tcg_temp_new(); | |
4767 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
4768 | tcg_gen_movi_tl(t1, 0x80000000); | |
4769 | tcg_gen_shr_tl(t1, t1, t0); | |
4770 | tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); | |
4771 | tcg_gen_and_tl(t0, t0, t1); | |
4772 | tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); | |
4773 | tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
4774 | tcg_temp_free(t0); | |
4775 | tcg_temp_free(t1); | |
4776 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4777 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4778 | } | |
4779 | ||
4780 | /* sle - sle. */ | |
4781 | GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR) | |
4782 | { | |
4783 | TCGv t0 = tcg_temp_new(); | |
4784 | TCGv t1 = tcg_temp_new(); | |
4785 | tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
4786 | tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); | |
4787 | tcg_gen_subfi_tl(t1, 32, t1); | |
4788 | tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); | |
4789 | tcg_gen_or_tl(t1, t0, t1); | |
4790 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); | |
4791 | gen_store_spr(SPR_MQ, t1); | |
4792 | tcg_temp_free(t0); | |
4793 | tcg_temp_free(t1); | |
4794 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4795 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4796 | } | |
4797 | ||
4798 | /* sleq - sleq. */ | |
4799 | GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR) | |
4800 | { | |
4801 | TCGv t0 = tcg_temp_new(); | |
4802 | TCGv t1 = tcg_temp_new(); | |
4803 | TCGv t2 = tcg_temp_new(); | |
4804 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
4805 | tcg_gen_movi_tl(t2, 0xFFFFFFFF); | |
4806 | tcg_gen_shl_tl(t2, t2, t0); | |
4807 | tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); | |
4808 | gen_load_spr(t1, SPR_MQ); | |
4809 | gen_store_spr(SPR_MQ, t0); | |
4810 | tcg_gen_and_tl(t0, t0, t2); | |
4811 | tcg_gen_andc_tl(t1, t1, t2); | |
4812 | tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
4813 | tcg_temp_free(t0); | |
4814 | tcg_temp_free(t1); | |
4815 | tcg_temp_free(t2); | |
4816 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4817 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4818 | } | |
4819 | ||
4820 | /* sliq - sliq. */ | |
4821 | GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR) | |
4822 | { | |
4823 | int sh = SH(ctx->opcode); | |
4824 | TCGv t0 = tcg_temp_new(); | |
4825 | TCGv t1 = tcg_temp_new(); | |
4826 | tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); | |
4827 | tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); | |
4828 | tcg_gen_or_tl(t1, t0, t1); | |
4829 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); | |
4830 | gen_store_spr(SPR_MQ, t1); | |
4831 | tcg_temp_free(t0); | |
4832 | tcg_temp_free(t1); | |
4833 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4834 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4835 | } | |
4836 | ||
4837 | /* slliq - slliq. */ | |
4838 | GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR) | |
4839 | { | |
4840 | int sh = SH(ctx->opcode); | |
4841 | TCGv t0 = tcg_temp_new(); | |
4842 | TCGv t1 = tcg_temp_new(); | |
4843 | tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); | |
4844 | gen_load_spr(t1, SPR_MQ); | |
4845 | gen_store_spr(SPR_MQ, t0); | |
4846 | tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); | |
4847 | tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); | |
4848 | tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
4849 | tcg_temp_free(t0); | |
4850 | tcg_temp_free(t1); | |
4851 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4852 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4853 | } | |
4854 | ||
4855 | /* sllq - sllq. */ | |
4856 | GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR) | |
4857 | { | |
4858 | int l1 = gen_new_label(); | |
4859 | int l2 = gen_new_label(); | |
4860 | TCGv t0 = tcg_temp_local_new(); | |
4861 | TCGv t1 = tcg_temp_local_new(); | |
4862 | TCGv t2 = tcg_temp_local_new(); | |
4863 | tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
4864 | tcg_gen_movi_tl(t1, 0xFFFFFFFF); | |
4865 | tcg_gen_shl_tl(t1, t1, t2); | |
4866 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); | |
4867 | tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); | |
4868 | gen_load_spr(t0, SPR_MQ); | |
4869 | tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
4870 | tcg_gen_br(l2); | |
4871 | gen_set_label(l1); | |
4872 | tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); | |
4873 | gen_load_spr(t2, SPR_MQ); | |
4874 | tcg_gen_andc_tl(t1, t2, t1); | |
4875 | tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
4876 | gen_set_label(l2); | |
4877 | tcg_temp_free(t0); | |
4878 | tcg_temp_free(t1); | |
4879 | tcg_temp_free(t2); | |
4880 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4881 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4882 | } | |
4883 | ||
4884 | /* slq - slq. */ | |
4885 | GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR) | |
4886 | { | |
4887 | int l1 = gen_new_label(); | |
4888 | TCGv t0 = tcg_temp_new(); | |
4889 | TCGv t1 = tcg_temp_new(); | |
4890 | tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
4891 | tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); | |
4892 | tcg_gen_subfi_tl(t1, 32, t1); | |
4893 | tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); | |
4894 | tcg_gen_or_tl(t1, t0, t1); | |
4895 | gen_store_spr(SPR_MQ, t1); | |
4896 | tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); | |
4897 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); | |
4898 | tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); | |
4899 | tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); | |
4900 | gen_set_label(l1); | |
4901 | tcg_temp_free(t0); | |
4902 | tcg_temp_free(t1); | |
4903 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4904 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4905 | } | |
4906 | ||
4907 | /* sraiq - sraiq. */ | |
4908 | GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR) | |
4909 | { | |
4910 | int sh = SH(ctx->opcode); | |
4911 | int l1 = gen_new_label(); | |
4912 | TCGv t0 = tcg_temp_new(); | |
4913 | TCGv t1 = tcg_temp_new(); | |
4914 | tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); | |
4915 | tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); | |
4916 | tcg_gen_or_tl(t0, t0, t1); | |
4917 | gen_store_spr(SPR_MQ, t0); | |
4918 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); | |
4919 | tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); | |
4920 | tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); | |
4921 | tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA)); | |
4922 | gen_set_label(l1); | |
4923 | tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); | |
4924 | tcg_temp_free(t0); | |
4925 | tcg_temp_free(t1); | |
4926 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4927 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4928 | } | |
4929 | ||
4930 | /* sraq - sraq. */ | |
4931 | GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR) | |
4932 | { | |
4933 | int l1 = gen_new_label(); | |
4934 | int l2 = gen_new_label(); | |
4935 | TCGv t0 = tcg_temp_new(); | |
4936 | TCGv t1 = tcg_temp_local_new(); | |
4937 | TCGv t2 = tcg_temp_local_new(); | |
4938 | tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
4939 | tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); | |
4940 | tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); | |
4941 | tcg_gen_subfi_tl(t2, 32, t2); | |
4942 | tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); | |
4943 | tcg_gen_or_tl(t0, t0, t2); | |
4944 | gen_store_spr(SPR_MQ, t0); | |
4945 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); | |
4946 | tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); | |
4947 | tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); | |
4948 | tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); | |
4949 | gen_set_label(l1); | |
4950 | tcg_temp_free(t0); | |
4951 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); | |
4952 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); | |
4953 | tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); | |
4954 | tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); | |
4955 | tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA)); | |
4956 | gen_set_label(l2); | |
4957 | tcg_temp_free(t1); | |
4958 | tcg_temp_free(t2); | |
4959 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4960 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4961 | } | |
4962 | ||
4963 | /* sre - sre. */ | |
4964 | GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR) | |
4965 | { | |
4966 | TCGv t0 = tcg_temp_new(); | |
4967 | TCGv t1 = tcg_temp_new(); | |
4968 | tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
4969 | tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); | |
4970 | tcg_gen_subfi_tl(t1, 32, t1); | |
4971 | tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); | |
4972 | tcg_gen_or_tl(t1, t0, t1); | |
4973 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); | |
4974 | gen_store_spr(SPR_MQ, t1); | |
4975 | tcg_temp_free(t0); | |
4976 | tcg_temp_free(t1); | |
4977 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4978 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4979 | } | |
4980 | ||
4981 | /* srea - srea. */ | |
4982 | GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR) | |
4983 | { | |
4984 | TCGv t0 = tcg_temp_new(); | |
4985 | TCGv t1 = tcg_temp_new(); | |
4986 | tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
4987 | tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); | |
4988 | gen_store_spr(SPR_MQ, t0); | |
4989 | tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); | |
4990 | tcg_temp_free(t0); | |
4991 | tcg_temp_free(t1); | |
4992 | if (unlikely(Rc(ctx->opcode) != 0)) | |
4993 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
4994 | } | |
4995 | ||
4996 | /* sreq */ | |
4997 | GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR) | |
4998 | { | |
4999 | TCGv t0 = tcg_temp_new(); | |
5000 | TCGv t1 = tcg_temp_new(); | |
5001 | TCGv t2 = tcg_temp_new(); | |
5002 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
5003 | tcg_gen_movi_tl(t1, 0xFFFFFFFF); | |
5004 | tcg_gen_shr_tl(t1, t1, t0); | |
5005 | tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); | |
5006 | gen_load_spr(t2, SPR_MQ); | |
5007 | gen_store_spr(SPR_MQ, t0); | |
5008 | tcg_gen_and_tl(t0, t0, t1); | |
5009 | tcg_gen_andc_tl(t2, t2, t1); | |
5010 | tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); | |
5011 | tcg_temp_free(t0); | |
5012 | tcg_temp_free(t1); | |
5013 | tcg_temp_free(t2); | |
5014 | if (unlikely(Rc(ctx->opcode) != 0)) | |
5015 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
5016 | } | |
5017 | ||
5018 | /* sriq */ | |
5019 | GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR) | |
5020 | { | |
5021 | int sh = SH(ctx->opcode); | |
5022 | TCGv t0 = tcg_temp_new(); | |
5023 | TCGv t1 = tcg_temp_new(); | |
5024 | tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); | |
5025 | tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); | |
5026 | tcg_gen_or_tl(t1, t0, t1); | |
5027 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); | |
5028 | gen_store_spr(SPR_MQ, t1); | |
5029 | tcg_temp_free(t0); | |
5030 | tcg_temp_free(t1); | |
5031 | if (unlikely(Rc(ctx->opcode) != 0)) | |
5032 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
5033 | } | |
5034 | ||
5035 | /* srliq */ | |
5036 | GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR) | |
5037 | { | |
5038 | int sh = SH(ctx->opcode); | |
5039 | TCGv t0 = tcg_temp_new(); | |
5040 | TCGv t1 = tcg_temp_new(); | |
5041 | tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); | |
5042 | gen_load_spr(t1, SPR_MQ); | |
5043 | gen_store_spr(SPR_MQ, t0); | |
5044 | tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); | |
5045 | tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); | |
5046 | tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
5047 | tcg_temp_free(t0); | |
5048 | tcg_temp_free(t1); | |
5049 | if (unlikely(Rc(ctx->opcode) != 0)) | |
5050 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
5051 | } | |
5052 | ||
5053 | /* srlq */ | |
5054 | GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR) | |
5055 | { | |
5056 | int l1 = gen_new_label(); | |
5057 | int l2 = gen_new_label(); | |
5058 | TCGv t0 = tcg_temp_local_new(); | |
5059 | TCGv t1 = tcg_temp_local_new(); | |
5060 | TCGv t2 = tcg_temp_local_new(); | |
5061 | tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
5062 | tcg_gen_movi_tl(t1, 0xFFFFFFFF); | |
5063 | tcg_gen_shr_tl(t2, t1, t2); | |
5064 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); | |
5065 | tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); | |
5066 | gen_load_spr(t0, SPR_MQ); | |
5067 | tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); | |
5068 | tcg_gen_br(l2); | |
5069 | gen_set_label(l1); | |
5070 | tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); | |
5071 | tcg_gen_and_tl(t0, t0, t2); | |
5072 | gen_load_spr(t1, SPR_MQ); | |
5073 | tcg_gen_andc_tl(t1, t1, t2); | |
5074 | tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); | |
5075 | gen_set_label(l2); | |
5076 | tcg_temp_free(t0); | |
5077 | tcg_temp_free(t1); | |
5078 | tcg_temp_free(t2); | |
5079 | if (unlikely(Rc(ctx->opcode) != 0)) | |
5080 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
5081 | } | |
5082 | ||
5083 | /* srq */ | |
5084 | GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR) | |
5085 | { | |
5086 | int l1 = gen_new_label(); | |
5087 | TCGv t0 = tcg_temp_new(); | |
5088 | TCGv t1 = tcg_temp_new(); | |
5089 | tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); | |
5090 | tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); | |
5091 | tcg_gen_subfi_tl(t1, 32, t1); | |
5092 | tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); | |
5093 | tcg_gen_or_tl(t1, t0, t1); | |
5094 | gen_store_spr(SPR_MQ, t1); | |
5095 | tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); | |
5096 | tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); | |
5097 | tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); | |
5098 | tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); | |
5099 | gen_set_label(l1); | |
5100 | tcg_temp_free(t0); | |
5101 | tcg_temp_free(t1); | |
5102 | if (unlikely(Rc(ctx->opcode) != 0)) | |
5103 | gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); | |
5104 | } | |
5105 | ||
5106 | /* PowerPC 602 specific instructions */ | |
5107 | /* dsa */ | |
5108 | GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC) | |
5109 | { | |
5110 | /* XXX: TODO */ | |
5111 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
5112 | } | |
5113 | ||
5114 | /* esa */ | |
5115 | GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC) | |
5116 | { | |
5117 | /* XXX: TODO */ | |
5118 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
5119 | } | |
5120 | ||
5121 | /* mfrom */ | |
5122 | GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC) | |
5123 | { | |
5124 | #if defined(CONFIG_USER_ONLY) | |
5125 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5126 | #else | |
5127 | if (unlikely(!ctx->mem_idx)) { | |
5128 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5129 | return; | |
5130 | } | |
5131 | gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
5132 | #endif | |
5133 | } | |
5134 | ||
5135 | /* 602 - 603 - G2 TLB management */ | |
5136 | /* tlbld */ | |
5137 | GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB) | |
5138 | { | |
5139 | #if defined(CONFIG_USER_ONLY) | |
5140 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5141 | #else | |
5142 | if (unlikely(!ctx->mem_idx)) { | |
5143 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5144 | return; | |
5145 | } | |
5146 | gen_helper_6xx_tlbd(cpu_gpr[rB(ctx->opcode)]); | |
5147 | #endif | |
5148 | } | |
5149 | ||
5150 | /* tlbli */ | |
5151 | GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB) | |
5152 | { | |
5153 | #if defined(CONFIG_USER_ONLY) | |
5154 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5155 | #else | |
5156 | if (unlikely(!ctx->mem_idx)) { | |
5157 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5158 | return; | |
5159 | } | |
5160 | gen_helper_6xx_tlbi(cpu_gpr[rB(ctx->opcode)]); | |
5161 | #endif | |
5162 | } | |
5163 | ||
5164 | /* 74xx TLB management */ | |
5165 | /* tlbld */ | |
5166 | GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB) | |
5167 | { | |
5168 | #if defined(CONFIG_USER_ONLY) | |
5169 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5170 | #else | |
5171 | if (unlikely(!ctx->mem_idx)) { | |
5172 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5173 | return; | |
5174 | } | |
5175 | gen_helper_74xx_tlbd(cpu_gpr[rB(ctx->opcode)]); | |
5176 | #endif | |
5177 | } | |
5178 | ||
5179 | /* tlbli */ | |
5180 | GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB) | |
5181 | { | |
5182 | #if defined(CONFIG_USER_ONLY) | |
5183 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5184 | #else | |
5185 | if (unlikely(!ctx->mem_idx)) { | |
5186 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5187 | return; | |
5188 | } | |
5189 | gen_helper_74xx_tlbi(cpu_gpr[rB(ctx->opcode)]); | |
5190 | #endif | |
5191 | } | |
5192 | ||
5193 | /* POWER instructions not in PowerPC 601 */ | |
5194 | /* clf */ | |
5195 | GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER) | |
5196 | { | |
5197 | /* Cache line flush: implemented as no-op */ | |
5198 | } | |
5199 | ||
5200 | /* cli */ | |
5201 | GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER) | |
5202 | { | |
5203 | /* Cache line invalidate: privileged and treated as no-op */ | |
5204 | #if defined(CONFIG_USER_ONLY) | |
5205 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5206 | #else | |
5207 | if (unlikely(!ctx->mem_idx)) { | |
5208 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5209 | return; | |
5210 | } | |
5211 | #endif | |
5212 | } | |
5213 | ||
5214 | /* dclst */ | |
5215 | GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER) | |
5216 | { | |
5217 | /* Data cache line store: treated as no-op */ | |
5218 | } | |
5219 | ||
5220 | GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER) | |
5221 | { | |
5222 | #if defined(CONFIG_USER_ONLY) | |
5223 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5224 | #else | |
5225 | int ra = rA(ctx->opcode); | |
5226 | int rd = rD(ctx->opcode); | |
5227 | TCGv t0; | |
5228 | if (unlikely(!ctx->mem_idx)) { | |
5229 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5230 | return; | |
5231 | } | |
5232 | t0 = tcg_temp_new(); | |
5233 | gen_addr_reg_index(ctx, t0); | |
5234 | tcg_gen_shri_tl(t0, t0, 28); | |
5235 | tcg_gen_andi_tl(t0, t0, 0xF); | |
5236 | gen_helper_load_sr(cpu_gpr[rd], t0); | |
5237 | tcg_temp_free(t0); | |
5238 | if (ra != 0 && ra != rd) | |
5239 | tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); | |
5240 | #endif | |
5241 | } | |
5242 | ||
5243 | GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER) | |
5244 | { | |
5245 | #if defined(CONFIG_USER_ONLY) | |
5246 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5247 | #else | |
5248 | TCGv t0; | |
5249 | if (unlikely(!ctx->mem_idx)) { | |
5250 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5251 | return; | |
5252 | } | |
5253 | t0 = tcg_temp_new(); | |
5254 | gen_addr_reg_index(ctx, t0); | |
5255 | gen_helper_rac(cpu_gpr[rD(ctx->opcode)], t0); | |
5256 | tcg_temp_free(t0); | |
5257 | #endif | |
5258 | } | |
5259 | ||
5260 | GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER) | |
5261 | { | |
5262 | #if defined(CONFIG_USER_ONLY) | |
5263 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5264 | #else | |
5265 | if (unlikely(!ctx->mem_idx)) { | |
5266 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5267 | return; | |
5268 | } | |
5269 | gen_helper_rfsvc(); | |
5270 | gen_sync_exception(ctx); | |
5271 | #endif | |
5272 | } | |
5273 | ||
5274 | /* svc is not implemented for now */ | |
5275 | ||
5276 | /* POWER2 specific instructions */ | |
5277 | /* Quad manipulation (load/store two floats at a time) */ | |
5278 | ||
5279 | /* lfq */ | |
5280 | GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2) | |
5281 | { | |
5282 | int rd = rD(ctx->opcode); | |
5283 | TCGv t0; | |
5284 | gen_set_access_type(ctx, ACCESS_FLOAT); | |
5285 | t0 = tcg_temp_new(); | |
5286 | gen_addr_imm_index(ctx, t0, 0); | |
5287 | gen_qemu_ld64(ctx, cpu_fpr[rd], t0); | |
5288 | gen_addr_add(ctx, t0, t0, 8); | |
5289 | gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0); | |
5290 | tcg_temp_free(t0); | |
5291 | } | |
5292 | ||
5293 | /* lfqu */ | |
5294 | GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2) | |
5295 | { | |
5296 | int ra = rA(ctx->opcode); | |
5297 | int rd = rD(ctx->opcode); | |
5298 | TCGv t0, t1; | |
5299 | gen_set_access_type(ctx, ACCESS_FLOAT); | |
5300 | t0 = tcg_temp_new(); | |
5301 | t1 = tcg_temp_new(); | |
5302 | gen_addr_imm_index(ctx, t0, 0); | |
5303 | gen_qemu_ld64(ctx, cpu_fpr[rd], t0); | |
5304 | gen_addr_add(ctx, t1, t0, 8); | |
5305 | gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1); | |
5306 | if (ra != 0) | |
5307 | tcg_gen_mov_tl(cpu_gpr[ra], t0); | |
5308 | tcg_temp_free(t0); | |
5309 | tcg_temp_free(t1); | |
5310 | } | |
5311 | ||
5312 | /* lfqux */ | |
5313 | GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2) | |
5314 | { | |
5315 | int ra = rA(ctx->opcode); | |
5316 | int rd = rD(ctx->opcode); | |
5317 | gen_set_access_type(ctx, ACCESS_FLOAT); | |
5318 | TCGv t0, t1; | |
5319 | t0 = tcg_temp_new(); | |
5320 | gen_addr_reg_index(ctx, t0); | |
5321 | gen_qemu_ld64(ctx, cpu_fpr[rd], t0); | |
5322 | t1 = tcg_temp_new(); | |
5323 | gen_addr_add(ctx, t1, t0, 8); | |
5324 | gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1); | |
5325 | tcg_temp_free(t1); | |
5326 | if (ra != 0) | |
5327 | tcg_gen_mov_tl(cpu_gpr[ra], t0); | |
5328 | tcg_temp_free(t0); | |
5329 | } | |
5330 | ||
5331 | /* lfqx */ | |
5332 | GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2) | |
5333 | { | |
5334 | int rd = rD(ctx->opcode); | |
5335 | TCGv t0; | |
5336 | gen_set_access_type(ctx, ACCESS_FLOAT); | |
5337 | t0 = tcg_temp_new(); | |
5338 | gen_addr_reg_index(ctx, t0); | |
5339 | gen_qemu_ld64(ctx, cpu_fpr[rd], t0); | |
5340 | gen_addr_add(ctx, t0, t0, 8); | |
5341 | gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0); | |
5342 | tcg_temp_free(t0); | |
5343 | } | |
5344 | ||
5345 | /* stfq */ | |
5346 | GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2) | |
5347 | { | |
5348 | int rd = rD(ctx->opcode); | |
5349 | TCGv t0; | |
5350 | gen_set_access_type(ctx, ACCESS_FLOAT); | |
5351 | t0 = tcg_temp_new(); | |
5352 | gen_addr_imm_index(ctx, t0, 0); | |
5353 | gen_qemu_st64(ctx, cpu_fpr[rd], t0); | |
5354 | gen_addr_add(ctx, t0, t0, 8); | |
5355 | gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0); | |
5356 | tcg_temp_free(t0); | |
5357 | } | |
5358 | ||
5359 | /* stfqu */ | |
5360 | GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2) | |
5361 | { | |
5362 | int ra = rA(ctx->opcode); | |
5363 | int rd = rD(ctx->opcode); | |
5364 | TCGv t0, t1; | |
5365 | gen_set_access_type(ctx, ACCESS_FLOAT); | |
5366 | t0 = tcg_temp_new(); | |
5367 | gen_addr_imm_index(ctx, t0, 0); | |
5368 | gen_qemu_st64(ctx, cpu_fpr[rd], t0); | |
5369 | t1 = tcg_temp_new(); | |
5370 | gen_addr_add(ctx, t1, t0, 8); | |
5371 | gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1); | |
5372 | tcg_temp_free(t1); | |
5373 | if (ra != 0) | |
5374 | tcg_gen_mov_tl(cpu_gpr[ra], t0); | |
5375 | tcg_temp_free(t0); | |
5376 | } | |
5377 | ||
5378 | /* stfqux */ | |
5379 | GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2) | |
5380 | { | |
5381 | int ra = rA(ctx->opcode); | |
5382 | int rd = rD(ctx->opcode); | |
5383 | TCGv t0, t1; | |
5384 | gen_set_access_type(ctx, ACCESS_FLOAT); | |
5385 | t0 = tcg_temp_new(); | |
5386 | gen_addr_reg_index(ctx, t0); | |
5387 | gen_qemu_st64(ctx, cpu_fpr[rd], t0); | |
5388 | t1 = tcg_temp_new(); | |
5389 | gen_addr_add(ctx, t1, t0, 8); | |
5390 | gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1); | |
5391 | tcg_temp_free(t1); | |
5392 | if (ra != 0) | |
5393 | tcg_gen_mov_tl(cpu_gpr[ra], t0); | |
5394 | tcg_temp_free(t0); | |
5395 | } | |
5396 | ||
5397 | /* stfqx */ | |
5398 | GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2) | |
5399 | { | |
5400 | int rd = rD(ctx->opcode); | |
5401 | TCGv t0; | |
5402 | gen_set_access_type(ctx, ACCESS_FLOAT); | |
5403 | t0 = tcg_temp_new(); | |
5404 | gen_addr_reg_index(ctx, t0); | |
5405 | gen_qemu_st64(ctx, cpu_fpr[rd], t0); | |
5406 | gen_addr_add(ctx, t0, t0, 8); | |
5407 | gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0); | |
5408 | tcg_temp_free(t0); | |
5409 | } | |
5410 | ||
5411 | /* BookE specific instructions */ | |
5412 | /* XXX: not implemented on 440 ? */ | |
5413 | GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI) | |
5414 | { | |
5415 | /* XXX: TODO */ | |
5416 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
5417 | } | |
5418 | ||
5419 | /* XXX: not implemented on 440 ? */ | |
5420 | GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA) | |
5421 | { | |
5422 | #if defined(CONFIG_USER_ONLY) | |
5423 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5424 | #else | |
5425 | TCGv t0; | |
5426 | if (unlikely(!ctx->mem_idx)) { | |
5427 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5428 | return; | |
5429 | } | |
5430 | t0 = tcg_temp_new(); | |
5431 | gen_addr_reg_index(ctx, t0); | |
5432 | gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]); | |
5433 | tcg_temp_free(t0); | |
5434 | #endif | |
5435 | } | |
5436 | ||
5437 | /* All 405 MAC instructions are translated here */ | |
5438 | static always_inline void gen_405_mulladd_insn (DisasContext *ctx, | |
5439 | int opc2, int opc3, | |
5440 | int ra, int rb, int rt, int Rc) | |
5441 | { | |
5442 | TCGv t0, t1; | |
5443 | ||
5444 | t0 = tcg_temp_local_new(); | |
5445 | t1 = tcg_temp_local_new(); | |
5446 | ||
5447 | switch (opc3 & 0x0D) { | |
5448 | case 0x05: | |
5449 | /* macchw - macchw. - macchwo - macchwo. */ | |
5450 | /* macchws - macchws. - macchwso - macchwso. */ | |
5451 | /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ | |
5452 | /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ | |
5453 | /* mulchw - mulchw. */ | |
5454 | tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); | |
5455 | tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); | |
5456 | tcg_gen_ext16s_tl(t1, t1); | |
5457 | break; | |
5458 | case 0x04: | |
5459 | /* macchwu - macchwu. - macchwuo - macchwuo. */ | |
5460 | /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ | |
5461 | /* mulchwu - mulchwu. */ | |
5462 | tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); | |
5463 | tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); | |
5464 | tcg_gen_ext16u_tl(t1, t1); | |
5465 | break; | |
5466 | case 0x01: | |
5467 | /* machhw - machhw. - machhwo - machhwo. */ | |
5468 | /* machhws - machhws. - machhwso - machhwso. */ | |
5469 | /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ | |
5470 | /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ | |
5471 | /* mulhhw - mulhhw. */ | |
5472 | tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); | |
5473 | tcg_gen_ext16s_tl(t0, t0); | |
5474 | tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); | |
5475 | tcg_gen_ext16s_tl(t1, t1); | |
5476 | break; | |
5477 | case 0x00: | |
5478 | /* machhwu - machhwu. - machhwuo - machhwuo. */ | |
5479 | /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ | |
5480 | /* mulhhwu - mulhhwu. */ | |
5481 | tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); | |
5482 | tcg_gen_ext16u_tl(t0, t0); | |
5483 | tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); | |
5484 | tcg_gen_ext16u_tl(t1, t1); | |
5485 | break; | |
5486 | case 0x0D: | |
5487 | /* maclhw - maclhw. - maclhwo - maclhwo. */ | |
5488 | /* maclhws - maclhws. - maclhwso - maclhwso. */ | |
5489 | /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ | |
5490 | /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ | |
5491 | /* mullhw - mullhw. */ | |
5492 | tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); | |
5493 | tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); | |
5494 | break; | |
5495 | case 0x0C: | |
5496 | /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ | |
5497 | /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ | |
5498 | /* mullhwu - mullhwu. */ | |
5499 | tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); | |
5500 | tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); | |
5501 | break; | |
5502 | } | |
5503 | if (opc2 & 0x04) { | |
5504 | /* (n)multiply-and-accumulate (0x0C / 0x0E) */ | |
5505 | tcg_gen_mul_tl(t1, t0, t1); | |
5506 | if (opc2 & 0x02) { | |
5507 | /* nmultiply-and-accumulate (0x0E) */ | |
5508 | tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); | |
5509 | } else { | |
5510 | /* multiply-and-accumulate (0x0C) */ | |
5511 | tcg_gen_add_tl(t0, cpu_gpr[rt], t1); | |
5512 | } | |
5513 | ||
5514 | if (opc3 & 0x12) { | |
5515 | /* Check overflow and/or saturate */ | |
5516 | int l1 = gen_new_label(); | |
5517 | ||
5518 | if (opc3 & 0x10) { | |
5519 | /* Start with XER OV disabled, the most likely case */ | |
5520 | tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); | |
5521 | } | |
5522 | if (opc3 & 0x01) { | |
5523 | /* Signed */ | |
5524 | tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); | |
5525 | tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); | |
5526 | tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); | |
5527 | tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); | |
5528 | if (opc3 & 0x02) { | |
5529 | /* Saturate */ | |
5530 | tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); | |
5531 | tcg_gen_xori_tl(t0, t0, 0x7fffffff); | |
5532 | } | |
5533 | } else { | |
5534 | /* Unsigned */ | |
5535 | tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); | |
5536 | if (opc3 & 0x02) { | |
5537 | /* Saturate */ | |
5538 | tcg_gen_movi_tl(t0, UINT32_MAX); | |
5539 | } | |
5540 | } | |
5541 | if (opc3 & 0x10) { | |
5542 | /* Check overflow */ | |
5543 | tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); | |
5544 | } | |
5545 | gen_set_label(l1); | |
5546 | tcg_gen_mov_tl(cpu_gpr[rt], t0); | |
5547 | } | |
5548 | } else { | |
5549 | tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); | |
5550 | } | |
5551 | tcg_temp_free(t0); | |
5552 | tcg_temp_free(t1); | |
5553 | if (unlikely(Rc) != 0) { | |
5554 | /* Update Rc0 */ | |
5555 | gen_set_Rc0(ctx, cpu_gpr[rt]); | |
5556 | } | |
5557 | } | |
5558 | ||
5559 | #define GEN_MAC_HANDLER(name, opc2, opc3) \ | |
5560 | GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) \ | |
5561 | { \ | |
5562 | gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ | |
5563 | rD(ctx->opcode), Rc(ctx->opcode)); \ | |
5564 | } | |
5565 | ||
5566 | /* macchw - macchw. */ | |
5567 | GEN_MAC_HANDLER(macchw, 0x0C, 0x05); | |
5568 | /* macchwo - macchwo. */ | |
5569 | GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); | |
5570 | /* macchws - macchws. */ | |
5571 | GEN_MAC_HANDLER(macchws, 0x0C, 0x07); | |
5572 | /* macchwso - macchwso. */ | |
5573 | GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); | |
5574 | /* macchwsu - macchwsu. */ | |
5575 | GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); | |
5576 | /* macchwsuo - macchwsuo. */ | |
5577 | GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); | |
5578 | /* macchwu - macchwu. */ | |
5579 | GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); | |
5580 | /* macchwuo - macchwuo. */ | |
5581 | GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); | |
5582 | /* machhw - machhw. */ | |
5583 | GEN_MAC_HANDLER(machhw, 0x0C, 0x01); | |
5584 | /* machhwo - machhwo. */ | |
5585 | GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); | |
5586 | /* machhws - machhws. */ | |
5587 | GEN_MAC_HANDLER(machhws, 0x0C, 0x03); | |
5588 | /* machhwso - machhwso. */ | |
5589 | GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); | |
5590 | /* machhwsu - machhwsu. */ | |
5591 | GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); | |
5592 | /* machhwsuo - machhwsuo. */ | |
5593 | GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); | |
5594 | /* machhwu - machhwu. */ | |
5595 | GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); | |
5596 | /* machhwuo - machhwuo. */ | |
5597 | GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); | |
5598 | /* maclhw - maclhw. */ | |
5599 | GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); | |
5600 | /* maclhwo - maclhwo. */ | |
5601 | GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); | |
5602 | /* maclhws - maclhws. */ | |
5603 | GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); | |
5604 | /* maclhwso - maclhwso. */ | |
5605 | GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); | |
5606 | /* maclhwu - maclhwu. */ | |
5607 | GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); | |
5608 | /* maclhwuo - maclhwuo. */ | |
5609 | GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); | |
5610 | /* maclhwsu - maclhwsu. */ | |
5611 | GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); | |
5612 | /* maclhwsuo - maclhwsuo. */ | |
5613 | GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); | |
5614 | /* nmacchw - nmacchw. */ | |
5615 | GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); | |
5616 | /* nmacchwo - nmacchwo. */ | |
5617 | GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); | |
5618 | /* nmacchws - nmacchws. */ | |
5619 | GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); | |
5620 | /* nmacchwso - nmacchwso. */ | |
5621 | GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); | |
5622 | /* nmachhw - nmachhw. */ | |
5623 | GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); | |
5624 | /* nmachhwo - nmachhwo. */ | |
5625 | GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); | |
5626 | /* nmachhws - nmachhws. */ | |
5627 | GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); | |
5628 | /* nmachhwso - nmachhwso. */ | |
5629 | GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); | |
5630 | /* nmaclhw - nmaclhw. */ | |
5631 | GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); | |
5632 | /* nmaclhwo - nmaclhwo. */ | |
5633 | GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); | |
5634 | /* nmaclhws - nmaclhws. */ | |
5635 | GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); | |
5636 | /* nmaclhwso - nmaclhwso. */ | |
5637 | GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); | |
5638 | ||
5639 | /* mulchw - mulchw. */ | |
5640 | GEN_MAC_HANDLER(mulchw, 0x08, 0x05); | |
5641 | /* mulchwu - mulchwu. */ | |
5642 | GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); | |
5643 | /* mulhhw - mulhhw. */ | |
5644 | GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); | |
5645 | /* mulhhwu - mulhhwu. */ | |
5646 | GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); | |
5647 | /* mullhw - mullhw. */ | |
5648 | GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); | |
5649 | /* mullhwu - mullhwu. */ | |
5650 | GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); | |
5651 | ||
5652 | /* mfdcr */ | |
5653 | GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR) | |
5654 | { | |
5655 | #if defined(CONFIG_USER_ONLY) | |
5656 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
5657 | #else | |
5658 | TCGv dcrn; | |
5659 | if (unlikely(!ctx->mem_idx)) { | |
5660 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
5661 | return; | |
5662 | } | |
5663 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
5664 | gen_update_nip(ctx, ctx->nip - 4); | |
5665 | dcrn = tcg_const_tl(SPR(ctx->opcode)); | |
5666 | gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], dcrn); | |
5667 | tcg_temp_free(dcrn); | |
5668 | #endif | |
5669 | } | |
5670 | ||
5671 | /* mtdcr */ | |
5672 | GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR) | |
5673 | { | |
5674 | #if defined(CONFIG_USER_ONLY) | |
5675 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
5676 | #else | |
5677 | TCGv dcrn; | |
5678 | if (unlikely(!ctx->mem_idx)) { | |
5679 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
5680 | return; | |
5681 | } | |
5682 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
5683 | gen_update_nip(ctx, ctx->nip - 4); | |
5684 | dcrn = tcg_const_tl(SPR(ctx->opcode)); | |
5685 | gen_helper_store_dcr(dcrn, cpu_gpr[rS(ctx->opcode)]); | |
5686 | tcg_temp_free(dcrn); | |
5687 | #endif | |
5688 | } | |
5689 | ||
5690 | /* mfdcrx */ | |
5691 | /* XXX: not implemented on 440 ? */ | |
5692 | GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX) | |
5693 | { | |
5694 | #if defined(CONFIG_USER_ONLY) | |
5695 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
5696 | #else | |
5697 | if (unlikely(!ctx->mem_idx)) { | |
5698 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
5699 | return; | |
5700 | } | |
5701 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
5702 | gen_update_nip(ctx, ctx->nip - 4); | |
5703 | gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
5704 | /* Note: Rc update flag set leads to undefined state of Rc0 */ | |
5705 | #endif | |
5706 | } | |
5707 | ||
5708 | /* mtdcrx */ | |
5709 | /* XXX: not implemented on 440 ? */ | |
5710 | GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX) | |
5711 | { | |
5712 | #if defined(CONFIG_USER_ONLY) | |
5713 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
5714 | #else | |
5715 | if (unlikely(!ctx->mem_idx)) { | |
5716 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); | |
5717 | return; | |
5718 | } | |
5719 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
5720 | gen_update_nip(ctx, ctx->nip - 4); | |
5721 | gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
5722 | /* Note: Rc update flag set leads to undefined state of Rc0 */ | |
5723 | #endif | |
5724 | } | |
5725 | ||
5726 | /* mfdcrux (PPC 460) : user-mode access to DCR */ | |
5727 | GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX) | |
5728 | { | |
5729 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
5730 | gen_update_nip(ctx, ctx->nip - 4); | |
5731 | gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
5732 | /* Note: Rc update flag set leads to undefined state of Rc0 */ | |
5733 | } | |
5734 | ||
5735 | /* mtdcrux (PPC 460) : user-mode access to DCR */ | |
5736 | GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX) | |
5737 | { | |
5738 | /* NIP cannot be restored if the memory exception comes from an helper */ | |
5739 | gen_update_nip(ctx, ctx->nip - 4); | |
5740 | gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
5741 | /* Note: Rc update flag set leads to undefined state of Rc0 */ | |
5742 | } | |
5743 | ||
5744 | /* dccci */ | |
5745 | GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON) | |
5746 | { | |
5747 | #if defined(CONFIG_USER_ONLY) | |
5748 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5749 | #else | |
5750 | if (unlikely(!ctx->mem_idx)) { | |
5751 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5752 | return; | |
5753 | } | |
5754 | /* interpreted as no-op */ | |
5755 | #endif | |
5756 | } | |
5757 | ||
5758 | /* dcread */ | |
5759 | GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON) | |
5760 | { | |
5761 | #if defined(CONFIG_USER_ONLY) | |
5762 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5763 | #else | |
5764 | TCGv EA, val; | |
5765 | if (unlikely(!ctx->mem_idx)) { | |
5766 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5767 | return; | |
5768 | } | |
5769 | gen_set_access_type(ctx, ACCESS_CACHE); | |
5770 | EA = tcg_temp_new(); | |
5771 | gen_addr_reg_index(ctx, EA); | |
5772 | val = tcg_temp_new(); | |
5773 | gen_qemu_ld32u(ctx, val, EA); | |
5774 | tcg_temp_free(val); | |
5775 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); | |
5776 | tcg_temp_free(EA); | |
5777 | #endif | |
5778 | } | |
5779 | ||
5780 | /* icbt */ | |
5781 | GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT) | |
5782 | { | |
5783 | /* interpreted as no-op */ | |
5784 | /* XXX: specification say this is treated as a load by the MMU | |
5785 | * but does not generate any exception | |
5786 | */ | |
5787 | } | |
5788 | ||
5789 | /* iccci */ | |
5790 | GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON) | |
5791 | { | |
5792 | #if defined(CONFIG_USER_ONLY) | |
5793 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5794 | #else | |
5795 | if (unlikely(!ctx->mem_idx)) { | |
5796 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5797 | return; | |
5798 | } | |
5799 | /* interpreted as no-op */ | |
5800 | #endif | |
5801 | } | |
5802 | ||
5803 | /* icread */ | |
5804 | GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON) | |
5805 | { | |
5806 | #if defined(CONFIG_USER_ONLY) | |
5807 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5808 | #else | |
5809 | if (unlikely(!ctx->mem_idx)) { | |
5810 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5811 | return; | |
5812 | } | |
5813 | /* interpreted as no-op */ | |
5814 | #endif | |
5815 | } | |
5816 | ||
5817 | /* rfci (mem_idx only) */ | |
5818 | GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP) | |
5819 | { | |
5820 | #if defined(CONFIG_USER_ONLY) | |
5821 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5822 | #else | |
5823 | if (unlikely(!ctx->mem_idx)) { | |
5824 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5825 | return; | |
5826 | } | |
5827 | /* Restore CPU state */ | |
5828 | gen_helper_40x_rfci(); | |
5829 | gen_sync_exception(ctx); | |
5830 | #endif | |
5831 | } | |
5832 | ||
5833 | GEN_HANDLER(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE) | |
5834 | { | |
5835 | #if defined(CONFIG_USER_ONLY) | |
5836 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5837 | #else | |
5838 | if (unlikely(!ctx->mem_idx)) { | |
5839 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5840 | return; | |
5841 | } | |
5842 | /* Restore CPU state */ | |
5843 | gen_helper_rfci(); | |
5844 | gen_sync_exception(ctx); | |
5845 | #endif | |
5846 | } | |
5847 | ||
5848 | /* BookE specific */ | |
5849 | /* XXX: not implemented on 440 ? */ | |
5850 | GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI) | |
5851 | { | |
5852 | #if defined(CONFIG_USER_ONLY) | |
5853 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5854 | #else | |
5855 | if (unlikely(!ctx->mem_idx)) { | |
5856 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5857 | return; | |
5858 | } | |
5859 | /* Restore CPU state */ | |
5860 | gen_helper_rfdi(); | |
5861 | gen_sync_exception(ctx); | |
5862 | #endif | |
5863 | } | |
5864 | ||
5865 | /* XXX: not implemented on 440 ? */ | |
5866 | GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI) | |
5867 | { | |
5868 | #if defined(CONFIG_USER_ONLY) | |
5869 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5870 | #else | |
5871 | if (unlikely(!ctx->mem_idx)) { | |
5872 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5873 | return; | |
5874 | } | |
5875 | /* Restore CPU state */ | |
5876 | gen_helper_rfmci(); | |
5877 | gen_sync_exception(ctx); | |
5878 | #endif | |
5879 | } | |
5880 | ||
5881 | /* TLB management - PowerPC 405 implementation */ | |
5882 | /* tlbre */ | |
5883 | GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB) | |
5884 | { | |
5885 | #if defined(CONFIG_USER_ONLY) | |
5886 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5887 | #else | |
5888 | if (unlikely(!ctx->mem_idx)) { | |
5889 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5890 | return; | |
5891 | } | |
5892 | switch (rB(ctx->opcode)) { | |
5893 | case 0: | |
5894 | gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
5895 | break; | |
5896 | case 1: | |
5897 | gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
5898 | break; | |
5899 | default: | |
5900 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
5901 | break; | |
5902 | } | |
5903 | #endif | |
5904 | } | |
5905 | ||
5906 | /* tlbsx - tlbsx. */ | |
5907 | GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB) | |
5908 | { | |
5909 | #if defined(CONFIG_USER_ONLY) | |
5910 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5911 | #else | |
5912 | TCGv t0; | |
5913 | if (unlikely(!ctx->mem_idx)) { | |
5914 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5915 | return; | |
5916 | } | |
5917 | t0 = tcg_temp_new(); | |
5918 | gen_addr_reg_index(ctx, t0); | |
5919 | gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], t0); | |
5920 | tcg_temp_free(t0); | |
5921 | if (Rc(ctx->opcode)) { | |
5922 | int l1 = gen_new_label(); | |
5923 | tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer); | |
5924 | tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO); | |
5925 | tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1); | |
5926 | tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); | |
5927 | tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); | |
5928 | gen_set_label(l1); | |
5929 | } | |
5930 | #endif | |
5931 | } | |
5932 | ||
5933 | /* tlbwe */ | |
5934 | GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB) | |
5935 | { | |
5936 | #if defined(CONFIG_USER_ONLY) | |
5937 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5938 | #else | |
5939 | if (unlikely(!ctx->mem_idx)) { | |
5940 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5941 | return; | |
5942 | } | |
5943 | switch (rB(ctx->opcode)) { | |
5944 | case 0: | |
5945 | gen_helper_4xx_tlbwe_hi(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
5946 | break; | |
5947 | case 1: | |
5948 | gen_helper_4xx_tlbwe_lo(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
5949 | break; | |
5950 | default: | |
5951 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
5952 | break; | |
5953 | } | |
5954 | #endif | |
5955 | } | |
5956 | ||
5957 | /* TLB management - PowerPC 440 implementation */ | |
5958 | /* tlbre */ | |
5959 | GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE) | |
5960 | { | |
5961 | #if defined(CONFIG_USER_ONLY) | |
5962 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5963 | #else | |
5964 | if (unlikely(!ctx->mem_idx)) { | |
5965 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5966 | return; | |
5967 | } | |
5968 | switch (rB(ctx->opcode)) { | |
5969 | case 0: | |
5970 | case 1: | |
5971 | case 2: | |
5972 | { | |
5973 | TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); | |
5974 | gen_helper_440_tlbwe(t0, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
5975 | tcg_temp_free_i32(t0); | |
5976 | } | |
5977 | break; | |
5978 | default: | |
5979 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
5980 | break; | |
5981 | } | |
5982 | #endif | |
5983 | } | |
5984 | ||
5985 | /* tlbsx - tlbsx. */ | |
5986 | GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE) | |
5987 | { | |
5988 | #if defined(CONFIG_USER_ONLY) | |
5989 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5990 | #else | |
5991 | TCGv t0; | |
5992 | if (unlikely(!ctx->mem_idx)) { | |
5993 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
5994 | return; | |
5995 | } | |
5996 | t0 = tcg_temp_new(); | |
5997 | gen_addr_reg_index(ctx, t0); | |
5998 | gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], t0); | |
5999 | tcg_temp_free(t0); | |
6000 | if (Rc(ctx->opcode)) { | |
6001 | int l1 = gen_new_label(); | |
6002 | tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer); | |
6003 | tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO); | |
6004 | tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1); | |
6005 | tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); | |
6006 | tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); | |
6007 | gen_set_label(l1); | |
6008 | } | |
6009 | #endif | |
6010 | } | |
6011 | ||
6012 | /* tlbwe */ | |
6013 | GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE) | |
6014 | { | |
6015 | #if defined(CONFIG_USER_ONLY) | |
6016 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
6017 | #else | |
6018 | if (unlikely(!ctx->mem_idx)) { | |
6019 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
6020 | return; | |
6021 | } | |
6022 | switch (rB(ctx->opcode)) { | |
6023 | case 0: | |
6024 | case 1: | |
6025 | case 2: | |
6026 | { | |
6027 | TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); | |
6028 | gen_helper_440_tlbwe(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); | |
6029 | tcg_temp_free_i32(t0); | |
6030 | } | |
6031 | break; | |
6032 | default: | |
6033 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
6034 | break; | |
6035 | } | |
6036 | #endif | |
6037 | } | |
6038 | ||
6039 | /* wrtee */ | |
6040 | GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE) | |
6041 | { | |
6042 | #if defined(CONFIG_USER_ONLY) | |
6043 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
6044 | #else | |
6045 | TCGv t0; | |
6046 | if (unlikely(!ctx->mem_idx)) { | |
6047 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
6048 | return; | |
6049 | } | |
6050 | t0 = tcg_temp_new(); | |
6051 | tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); | |
6052 | tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); | |
6053 | tcg_gen_or_tl(cpu_msr, cpu_msr, t0); | |
6054 | tcg_temp_free(t0); | |
6055 | /* Stop translation to have a chance to raise an exception | |
6056 | * if we just set msr_ee to 1 | |
6057 | */ | |
6058 | gen_stop_exception(ctx); | |
6059 | #endif | |
6060 | } | |
6061 | ||
6062 | /* wrteei */ | |
6063 | GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000EFC01, PPC_WRTEE) | |
6064 | { | |
6065 | #if defined(CONFIG_USER_ONLY) | |
6066 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
6067 | #else | |
6068 | if (unlikely(!ctx->mem_idx)) { | |
6069 | gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); | |
6070 | return; | |
6071 | } | |
6072 | if (ctx->opcode & 0x00010000) { | |
6073 | tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); | |
6074 | /* Stop translation to have a chance to raise an exception */ | |
6075 | gen_stop_exception(ctx); | |
6076 | } else { | |
6077 | tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); | |
6078 | } | |
6079 | #endif | |
6080 | } | |
6081 | ||
6082 | /* PowerPC 440 specific instructions */ | |
6083 | /* dlmzb */ | |
6084 | GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC) | |
6085 | { | |
6086 | TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); | |
6087 | gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], | |
6088 | cpu_gpr[rB(ctx->opcode)], t0); | |
6089 | tcg_temp_free_i32(t0); | |
6090 | } | |
6091 | ||
6092 | /* mbar replaces eieio on 440 */ | |
6093 | GEN_HANDLER(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, PPC_BOOKE) | |
6094 | { | |
6095 | /* interpreted as no-op */ | |
6096 | } | |
6097 | ||
6098 | /* msync replaces sync on 440 */ | |
6099 | GEN_HANDLER(msync, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE) | |
6100 | { | |
6101 | /* interpreted as no-op */ | |
6102 | } | |
6103 | ||
6104 | /* icbt */ | |
6105 | GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, PPC_BOOKE) | |
6106 | { | |
6107 | /* interpreted as no-op */ | |
6108 | /* XXX: specification say this is treated as a load by the MMU | |
6109 | * but does not generate any exception | |
6110 | */ | |
6111 | } | |
6112 | ||
6113 | /*** Altivec vector extension ***/ | |
6114 | /* Altivec registers moves */ | |
6115 | ||
6116 | static always_inline TCGv_ptr gen_avr_ptr(int reg) | |
6117 | { | |
6118 | TCGv_ptr r = tcg_temp_new_ptr(); | |
6119 | tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg])); | |
6120 | return r; | |
6121 | } | |
6122 | ||
6123 | #define GEN_VR_LDX(name, opc2, opc3) \ | |
6124 | GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \ | |
6125 | { \ | |
6126 | TCGv EA; \ | |
6127 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6128 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6129 | return; \ | |
6130 | } \ | |
6131 | gen_set_access_type(ctx, ACCESS_INT); \ | |
6132 | EA = tcg_temp_new(); \ | |
6133 | gen_addr_reg_index(ctx, EA); \ | |
6134 | tcg_gen_andi_tl(EA, EA, ~0xf); \ | |
6135 | if (ctx->le_mode) { \ | |
6136 | gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \ | |
6137 | tcg_gen_addi_tl(EA, EA, 8); \ | |
6138 | gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \ | |
6139 | } else { \ | |
6140 | gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \ | |
6141 | tcg_gen_addi_tl(EA, EA, 8); \ | |
6142 | gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \ | |
6143 | } \ | |
6144 | tcg_temp_free(EA); \ | |
6145 | } | |
6146 | ||
6147 | #define GEN_VR_STX(name, opc2, opc3) \ | |
6148 | GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \ | |
6149 | { \ | |
6150 | TCGv EA; \ | |
6151 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6152 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6153 | return; \ | |
6154 | } \ | |
6155 | gen_set_access_type(ctx, ACCESS_INT); \ | |
6156 | EA = tcg_temp_new(); \ | |
6157 | gen_addr_reg_index(ctx, EA); \ | |
6158 | tcg_gen_andi_tl(EA, EA, ~0xf); \ | |
6159 | if (ctx->le_mode) { \ | |
6160 | gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \ | |
6161 | tcg_gen_addi_tl(EA, EA, 8); \ | |
6162 | gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \ | |
6163 | } else { \ | |
6164 | gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \ | |
6165 | tcg_gen_addi_tl(EA, EA, 8); \ | |
6166 | gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \ | |
6167 | } \ | |
6168 | tcg_temp_free(EA); \ | |
6169 | } | |
6170 | ||
6171 | #define GEN_VR_LVE(name, opc2, opc3) \ | |
6172 | GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \ | |
6173 | { \ | |
6174 | TCGv EA; \ | |
6175 | TCGv_ptr rs; \ | |
6176 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6177 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6178 | return; \ | |
6179 | } \ | |
6180 | gen_set_access_type(ctx, ACCESS_INT); \ | |
6181 | EA = tcg_temp_new(); \ | |
6182 | gen_addr_reg_index(ctx, EA); \ | |
6183 | rs = gen_avr_ptr(rS(ctx->opcode)); \ | |
6184 | gen_helper_lve##name (rs, EA); \ | |
6185 | tcg_temp_free(EA); \ | |
6186 | tcg_temp_free_ptr(rs); \ | |
6187 | } | |
6188 | ||
6189 | #define GEN_VR_STVE(name, opc2, opc3) \ | |
6190 | GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \ | |
6191 | { \ | |
6192 | TCGv EA; \ | |
6193 | TCGv_ptr rs; \ | |
6194 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6195 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6196 | return; \ | |
6197 | } \ | |
6198 | gen_set_access_type(ctx, ACCESS_INT); \ | |
6199 | EA = tcg_temp_new(); \ | |
6200 | gen_addr_reg_index(ctx, EA); \ | |
6201 | rs = gen_avr_ptr(rS(ctx->opcode)); \ | |
6202 | gen_helper_stve##name (rs, EA); \ | |
6203 | tcg_temp_free(EA); \ | |
6204 | tcg_temp_free_ptr(rs); \ | |
6205 | } | |
6206 | ||
6207 | GEN_VR_LDX(lvx, 0x07, 0x03); | |
6208 | /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */ | |
6209 | GEN_VR_LDX(lvxl, 0x07, 0x0B); | |
6210 | ||
6211 | GEN_VR_LVE(bx, 0x07, 0x00); | |
6212 | GEN_VR_LVE(hx, 0x07, 0x01); | |
6213 | GEN_VR_LVE(wx, 0x07, 0x02); | |
6214 | ||
6215 | GEN_VR_STX(svx, 0x07, 0x07); | |
6216 | /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */ | |
6217 | GEN_VR_STX(svxl, 0x07, 0x0F); | |
6218 | ||
6219 | GEN_VR_STVE(bx, 0x07, 0x04); | |
6220 | GEN_VR_STVE(hx, 0x07, 0x05); | |
6221 | GEN_VR_STVE(wx, 0x07, 0x06); | |
6222 | ||
6223 | GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC) | |
6224 | { | |
6225 | TCGv_ptr rd; | |
6226 | TCGv EA; | |
6227 | if (unlikely(!ctx->altivec_enabled)) { | |
6228 | gen_exception(ctx, POWERPC_EXCP_VPU); | |
6229 | return; | |
6230 | } | |
6231 | EA = tcg_temp_new(); | |
6232 | gen_addr_reg_index(ctx, EA); | |
6233 | rd = gen_avr_ptr(rD(ctx->opcode)); | |
6234 | gen_helper_lvsl(rd, EA); | |
6235 | tcg_temp_free(EA); | |
6236 | tcg_temp_free_ptr(rd); | |
6237 | } | |
6238 | ||
6239 | GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC) | |
6240 | { | |
6241 | TCGv_ptr rd; | |
6242 | TCGv EA; | |
6243 | if (unlikely(!ctx->altivec_enabled)) { | |
6244 | gen_exception(ctx, POWERPC_EXCP_VPU); | |
6245 | return; | |
6246 | } | |
6247 | EA = tcg_temp_new(); | |
6248 | gen_addr_reg_index(ctx, EA); | |
6249 | rd = gen_avr_ptr(rD(ctx->opcode)); | |
6250 | gen_helper_lvsr(rd, EA); | |
6251 | tcg_temp_free(EA); | |
6252 | tcg_temp_free_ptr(rd); | |
6253 | } | |
6254 | ||
6255 | GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC) | |
6256 | { | |
6257 | TCGv_i32 t; | |
6258 | if (unlikely(!ctx->altivec_enabled)) { | |
6259 | gen_exception(ctx, POWERPC_EXCP_VPU); | |
6260 | return; | |
6261 | } | |
6262 | tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0); | |
6263 | t = tcg_temp_new_i32(); | |
6264 | tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, vscr)); | |
6265 | tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t); | |
6266 | tcg_temp_free_i32(t); | |
6267 | } | |
6268 | ||
6269 | GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC) | |
6270 | { | |
6271 | TCGv_ptr p; | |
6272 | if (unlikely(!ctx->altivec_enabled)) { | |
6273 | gen_exception(ctx, POWERPC_EXCP_VPU); | |
6274 | return; | |
6275 | } | |
6276 | p = gen_avr_ptr(rD(ctx->opcode)); | |
6277 | gen_helper_mtvscr(p); | |
6278 | tcg_temp_free_ptr(p); | |
6279 | } | |
6280 | ||
6281 | /* Logical operations */ | |
6282 | #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \ | |
6283 | GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC) \ | |
6284 | { \ | |
6285 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6286 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6287 | return; \ | |
6288 | } \ | |
6289 | tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \ | |
6290 | tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \ | |
6291 | } | |
6292 | ||
6293 | GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16); | |
6294 | GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17); | |
6295 | GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18); | |
6296 | GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19); | |
6297 | GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20); | |
6298 | ||
6299 | #define GEN_VXFORM(name, opc2, opc3) \ | |
6300 | GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC) \ | |
6301 | { \ | |
6302 | TCGv_ptr ra, rb, rd; \ | |
6303 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6304 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6305 | return; \ | |
6306 | } \ | |
6307 | ra = gen_avr_ptr(rA(ctx->opcode)); \ | |
6308 | rb = gen_avr_ptr(rB(ctx->opcode)); \ | |
6309 | rd = gen_avr_ptr(rD(ctx->opcode)); \ | |
6310 | gen_helper_##name (rd, ra, rb); \ | |
6311 | tcg_temp_free_ptr(ra); \ | |
6312 | tcg_temp_free_ptr(rb); \ | |
6313 | tcg_temp_free_ptr(rd); \ | |
6314 | } | |
6315 | ||
6316 | GEN_VXFORM(vaddubm, 0, 0); | |
6317 | GEN_VXFORM(vadduhm, 0, 1); | |
6318 | GEN_VXFORM(vadduwm, 0, 2); | |
6319 | GEN_VXFORM(vsububm, 0, 16); | |
6320 | GEN_VXFORM(vsubuhm, 0, 17); | |
6321 | GEN_VXFORM(vsubuwm, 0, 18); | |
6322 | GEN_VXFORM(vmaxub, 1, 0); | |
6323 | GEN_VXFORM(vmaxuh, 1, 1); | |
6324 | GEN_VXFORM(vmaxuw, 1, 2); | |
6325 | GEN_VXFORM(vmaxsb, 1, 4); | |
6326 | GEN_VXFORM(vmaxsh, 1, 5); | |
6327 | GEN_VXFORM(vmaxsw, 1, 6); | |
6328 | GEN_VXFORM(vminub, 1, 8); | |
6329 | GEN_VXFORM(vminuh, 1, 9); | |
6330 | GEN_VXFORM(vminuw, 1, 10); | |
6331 | GEN_VXFORM(vminsb, 1, 12); | |
6332 | GEN_VXFORM(vminsh, 1, 13); | |
6333 | GEN_VXFORM(vminsw, 1, 14); | |
6334 | GEN_VXFORM(vavgub, 1, 16); | |
6335 | GEN_VXFORM(vavguh, 1, 17); | |
6336 | GEN_VXFORM(vavguw, 1, 18); | |
6337 | GEN_VXFORM(vavgsb, 1, 20); | |
6338 | GEN_VXFORM(vavgsh, 1, 21); | |
6339 | GEN_VXFORM(vavgsw, 1, 22); | |
6340 | GEN_VXFORM(vmrghb, 6, 0); | |
6341 | GEN_VXFORM(vmrghh, 6, 1); | |
6342 | GEN_VXFORM(vmrghw, 6, 2); | |
6343 | GEN_VXFORM(vmrglb, 6, 4); | |
6344 | GEN_VXFORM(vmrglh, 6, 5); | |
6345 | GEN_VXFORM(vmrglw, 6, 6); | |
6346 | GEN_VXFORM(vmuloub, 4, 0); | |
6347 | GEN_VXFORM(vmulouh, 4, 1); | |
6348 | GEN_VXFORM(vmulosb, 4, 4); | |
6349 | GEN_VXFORM(vmulosh, 4, 5); | |
6350 | GEN_VXFORM(vmuleub, 4, 8); | |
6351 | GEN_VXFORM(vmuleuh, 4, 9); | |
6352 | GEN_VXFORM(vmulesb, 4, 12); | |
6353 | GEN_VXFORM(vmulesh, 4, 13); | |
6354 | GEN_VXFORM(vslb, 2, 4); | |
6355 | GEN_VXFORM(vslh, 2, 5); | |
6356 | GEN_VXFORM(vslw, 2, 6); | |
6357 | GEN_VXFORM(vsrb, 2, 8); | |
6358 | GEN_VXFORM(vsrh, 2, 9); | |
6359 | GEN_VXFORM(vsrw, 2, 10); | |
6360 | GEN_VXFORM(vsrab, 2, 12); | |
6361 | GEN_VXFORM(vsrah, 2, 13); | |
6362 | GEN_VXFORM(vsraw, 2, 14); | |
6363 | GEN_VXFORM(vslo, 6, 16); | |
6364 | GEN_VXFORM(vsro, 6, 17); | |
6365 | GEN_VXFORM(vaddcuw, 0, 6); | |
6366 | GEN_VXFORM(vsubcuw, 0, 22); | |
6367 | GEN_VXFORM(vaddubs, 0, 8); | |
6368 | GEN_VXFORM(vadduhs, 0, 9); | |
6369 | GEN_VXFORM(vadduws, 0, 10); | |
6370 | GEN_VXFORM(vaddsbs, 0, 12); | |
6371 | GEN_VXFORM(vaddshs, 0, 13); | |
6372 | GEN_VXFORM(vaddsws, 0, 14); | |
6373 | GEN_VXFORM(vsububs, 0, 24); | |
6374 | GEN_VXFORM(vsubuhs, 0, 25); | |
6375 | GEN_VXFORM(vsubuws, 0, 26); | |
6376 | GEN_VXFORM(vsubsbs, 0, 28); | |
6377 | GEN_VXFORM(vsubshs, 0, 29); | |
6378 | GEN_VXFORM(vsubsws, 0, 30); | |
6379 | GEN_VXFORM(vrlb, 2, 0); | |
6380 | GEN_VXFORM(vrlh, 2, 1); | |
6381 | GEN_VXFORM(vrlw, 2, 2); | |
6382 | GEN_VXFORM(vsl, 2, 7); | |
6383 | GEN_VXFORM(vsr, 2, 11); | |
6384 | GEN_VXFORM(vpkuhum, 7, 0); | |
6385 | GEN_VXFORM(vpkuwum, 7, 1); | |
6386 | GEN_VXFORM(vpkuhus, 7, 2); | |
6387 | GEN_VXFORM(vpkuwus, 7, 3); | |
6388 | GEN_VXFORM(vpkshus, 7, 4); | |
6389 | GEN_VXFORM(vpkswus, 7, 5); | |
6390 | GEN_VXFORM(vpkshss, 7, 6); | |
6391 | GEN_VXFORM(vpkswss, 7, 7); | |
6392 | GEN_VXFORM(vpkpx, 7, 12); | |
6393 | GEN_VXFORM(vsum4ubs, 4, 24); | |
6394 | GEN_VXFORM(vsum4sbs, 4, 28); | |
6395 | GEN_VXFORM(vsum4shs, 4, 25); | |
6396 | GEN_VXFORM(vsum2sws, 4, 26); | |
6397 | GEN_VXFORM(vsumsws, 4, 30); | |
6398 | ||
6399 | #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \ | |
6400 | GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC) \ | |
6401 | { \ | |
6402 | TCGv_ptr ra, rb, rd; \ | |
6403 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6404 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6405 | return; \ | |
6406 | } \ | |
6407 | ra = gen_avr_ptr(rA(ctx->opcode)); \ | |
6408 | rb = gen_avr_ptr(rB(ctx->opcode)); \ | |
6409 | rd = gen_avr_ptr(rD(ctx->opcode)); \ | |
6410 | gen_helper_##opname (rd, ra, rb); \ | |
6411 | tcg_temp_free_ptr(ra); \ | |
6412 | tcg_temp_free_ptr(rb); \ | |
6413 | tcg_temp_free_ptr(rd); \ | |
6414 | } | |
6415 | ||
6416 | #define GEN_VXRFORM(name, opc2, opc3) \ | |
6417 | GEN_VXRFORM1(name, name, #name, opc2, opc3) \ | |
6418 | GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4))) | |
6419 | ||
6420 | GEN_VXRFORM(vcmpequb, 3, 0) | |
6421 | GEN_VXRFORM(vcmpequh, 3, 1) | |
6422 | GEN_VXRFORM(vcmpequw, 3, 2) | |
6423 | GEN_VXRFORM(vcmpgtsb, 3, 12) | |
6424 | GEN_VXRFORM(vcmpgtsh, 3, 13) | |
6425 | GEN_VXRFORM(vcmpgtsw, 3, 14) | |
6426 | GEN_VXRFORM(vcmpgtub, 3, 8) | |
6427 | GEN_VXRFORM(vcmpgtuh, 3, 9) | |
6428 | GEN_VXRFORM(vcmpgtuw, 3, 10) | |
6429 | ||
6430 | #define GEN_VXFORM_SIMM(name, opc2, opc3) \ | |
6431 | GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC) \ | |
6432 | { \ | |
6433 | TCGv_ptr rd; \ | |
6434 | TCGv_i32 simm; \ | |
6435 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6436 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6437 | return; \ | |
6438 | } \ | |
6439 | simm = tcg_const_i32(SIMM5(ctx->opcode)); \ | |
6440 | rd = gen_avr_ptr(rD(ctx->opcode)); \ | |
6441 | gen_helper_##name (rd, simm); \ | |
6442 | tcg_temp_free_i32(simm); \ | |
6443 | tcg_temp_free_ptr(rd); \ | |
6444 | } | |
6445 | ||
6446 | GEN_VXFORM_SIMM(vspltisb, 6, 12); | |
6447 | GEN_VXFORM_SIMM(vspltish, 6, 13); | |
6448 | GEN_VXFORM_SIMM(vspltisw, 6, 14); | |
6449 | ||
6450 | #define GEN_VXFORM_NOA(name, opc2, opc3) \ | |
6451 | GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC) \ | |
6452 | { \ | |
6453 | TCGv_ptr rb, rd; \ | |
6454 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6455 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6456 | return; \ | |
6457 | } \ | |
6458 | rb = gen_avr_ptr(rB(ctx->opcode)); \ | |
6459 | rd = gen_avr_ptr(rD(ctx->opcode)); \ | |
6460 | gen_helper_##name (rd, rb); \ | |
6461 | tcg_temp_free_ptr(rb); \ | |
6462 | tcg_temp_free_ptr(rd); \ | |
6463 | } | |
6464 | ||
6465 | GEN_VXFORM_NOA(vupkhsb, 7, 8); | |
6466 | GEN_VXFORM_NOA(vupkhsh, 7, 9); | |
6467 | GEN_VXFORM_NOA(vupklsb, 7, 10); | |
6468 | GEN_VXFORM_NOA(vupklsh, 7, 11); | |
6469 | GEN_VXFORM_NOA(vupkhpx, 7, 13); | |
6470 | GEN_VXFORM_NOA(vupklpx, 7, 15); | |
6471 | GEN_VXFORM_NOA(vlogefp, 5, 7); | |
6472 | GEN_VXFORM_NOA(vrfim, 5, 8); | |
6473 | GEN_VXFORM_NOA(vrfin, 5, 9); | |
6474 | GEN_VXFORM_NOA(vrfip, 5, 10); | |
6475 | GEN_VXFORM_NOA(vrfiz, 5, 11); | |
6476 | ||
6477 | #define GEN_VXFORM_SIMM(name, opc2, opc3) \ | |
6478 | GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC) \ | |
6479 | { \ | |
6480 | TCGv_ptr rd; \ | |
6481 | TCGv_i32 simm; \ | |
6482 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6483 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6484 | return; \ | |
6485 | } \ | |
6486 | simm = tcg_const_i32(SIMM5(ctx->opcode)); \ | |
6487 | rd = gen_avr_ptr(rD(ctx->opcode)); \ | |
6488 | gen_helper_##name (rd, simm); \ | |
6489 | tcg_temp_free_i32(simm); \ | |
6490 | tcg_temp_free_ptr(rd); \ | |
6491 | } | |
6492 | ||
6493 | #define GEN_VXFORM_UIMM(name, opc2, opc3) \ | |
6494 | GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC) \ | |
6495 | { \ | |
6496 | TCGv_ptr rb, rd; \ | |
6497 | TCGv_i32 uimm; \ | |
6498 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6499 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6500 | return; \ | |
6501 | } \ | |
6502 | uimm = tcg_const_i32(UIMM5(ctx->opcode)); \ | |
6503 | rb = gen_avr_ptr(rB(ctx->opcode)); \ | |
6504 | rd = gen_avr_ptr(rD(ctx->opcode)); \ | |
6505 | gen_helper_##name (rd, rb, uimm); \ | |
6506 | tcg_temp_free_i32(uimm); \ | |
6507 | tcg_temp_free_ptr(rb); \ | |
6508 | tcg_temp_free_ptr(rd); \ | |
6509 | } | |
6510 | ||
6511 | GEN_VXFORM_UIMM(vspltb, 6, 8); | |
6512 | GEN_VXFORM_UIMM(vsplth, 6, 9); | |
6513 | GEN_VXFORM_UIMM(vspltw, 6, 10); | |
6514 | GEN_VXFORM_UIMM(vcfux, 5, 12); | |
6515 | GEN_VXFORM_UIMM(vcfsx, 5, 13); | |
6516 | ||
6517 | GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC) | |
6518 | { | |
6519 | TCGv_ptr ra, rb, rd; | |
6520 | TCGv_i32 sh; | |
6521 | if (unlikely(!ctx->altivec_enabled)) { | |
6522 | gen_exception(ctx, POWERPC_EXCP_VPU); | |
6523 | return; | |
6524 | } | |
6525 | ra = gen_avr_ptr(rA(ctx->opcode)); | |
6526 | rb = gen_avr_ptr(rB(ctx->opcode)); | |
6527 | rd = gen_avr_ptr(rD(ctx->opcode)); | |
6528 | sh = tcg_const_i32(VSH(ctx->opcode)); | |
6529 | gen_helper_vsldoi (rd, ra, rb, sh); | |
6530 | tcg_temp_free_ptr(ra); | |
6531 | tcg_temp_free_ptr(rb); | |
6532 | tcg_temp_free_ptr(rd); | |
6533 | tcg_temp_free_i32(sh); | |
6534 | } | |
6535 | ||
6536 | #define GEN_VAFORM_PAIRED(name0, name1, opc2) \ | |
6537 | GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC) \ | |
6538 | { \ | |
6539 | TCGv_ptr ra, rb, rc, rd; \ | |
6540 | if (unlikely(!ctx->altivec_enabled)) { \ | |
6541 | gen_exception(ctx, POWERPC_EXCP_VPU); \ | |
6542 | return; \ | |
6543 | } \ | |
6544 | ra = gen_avr_ptr(rA(ctx->opcode)); \ | |
6545 | rb = gen_avr_ptr(rB(ctx->opcode)); \ | |
6546 | rc = gen_avr_ptr(rC(ctx->opcode)); \ | |
6547 | rd = gen_avr_ptr(rD(ctx->opcode)); \ | |
6548 | if (Rc(ctx->opcode)) { \ | |
6549 | gen_helper_##name1 (rd, ra, rb, rc); \ | |
6550 | } else { \ | |
6551 | gen_helper_##name0 (rd, ra, rb, rc); \ | |
6552 | } \ | |
6553 | tcg_temp_free_ptr(ra); \ | |
6554 | tcg_temp_free_ptr(rb); \ | |
6555 | tcg_temp_free_ptr(rc); \ | |
6556 | tcg_temp_free_ptr(rd); \ | |
6557 | } | |
6558 | ||
6559 | GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16) | |
6560 | ||
6561 | GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC) | |
6562 | { | |
6563 | TCGv_ptr ra, rb, rc, rd; | |
6564 | if (unlikely(!ctx->altivec_enabled)) { | |
6565 | gen_exception(ctx, POWERPC_EXCP_VPU); | |
6566 | return; | |
6567 | } | |
6568 | ra = gen_avr_ptr(rA(ctx->opcode)); | |
6569 | rb = gen_avr_ptr(rB(ctx->opcode)); | |
6570 | rc = gen_avr_ptr(rC(ctx->opcode)); | |
6571 | rd = gen_avr_ptr(rD(ctx->opcode)); | |
6572 | gen_helper_vmladduhm(rd, ra, rb, rc); | |
6573 | tcg_temp_free_ptr(ra); | |
6574 | tcg_temp_free_ptr(rb); | |
6575 | tcg_temp_free_ptr(rc); | |
6576 | tcg_temp_free_ptr(rd); | |
6577 | } | |
6578 | ||
6579 | GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18) | |
6580 | GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19) | |
6581 | GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20) | |
6582 | GEN_VAFORM_PAIRED(vsel, vperm, 21) | |
6583 | ||
6584 | /*** SPE extension ***/ | |
6585 | /* Register moves */ | |
6586 | ||
6587 | static always_inline void gen_load_gpr64(TCGv_i64 t, int reg) { | |
6588 | #if defined(TARGET_PPC64) | |
6589 | tcg_gen_mov_i64(t, cpu_gpr[reg]); | |
6590 | #else | |
6591 | tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]); | |
6592 | #endif | |
6593 | } | |
6594 | ||
6595 | static always_inline void gen_store_gpr64(int reg, TCGv_i64 t) { | |
6596 | #if defined(TARGET_PPC64) | |
6597 | tcg_gen_mov_i64(cpu_gpr[reg], t); | |
6598 | #else | |
6599 | TCGv_i64 tmp = tcg_temp_new_i64(); | |
6600 | tcg_gen_trunc_i64_i32(cpu_gpr[reg], t); | |
6601 | tcg_gen_shri_i64(tmp, t, 32); | |
6602 | tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp); | |
6603 | tcg_temp_free_i64(tmp); | |
6604 | #endif | |
6605 | } | |
6606 | ||
6607 | #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \ | |
6608 | GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \ | |
6609 | { \ | |
6610 | if (Rc(ctx->opcode)) \ | |
6611 | gen_##name1(ctx); \ | |
6612 | else \ | |
6613 | gen_##name0(ctx); \ | |
6614 | } | |
6615 | ||
6616 | /* Handler for undefined SPE opcodes */ | |
6617 | static always_inline void gen_speundef (DisasContext *ctx) | |
6618 | { | |
6619 | gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); | |
6620 | } | |
6621 | ||
6622 | /* SPE logic */ | |
6623 | #if defined(TARGET_PPC64) | |
6624 | #define GEN_SPEOP_LOGIC2(name, tcg_op) \ | |
6625 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6626 | { \ | |
6627 | if (unlikely(!ctx->spe_enabled)) { \ | |
6628 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6629 | return; \ | |
6630 | } \ | |
6631 | tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \ | |
6632 | cpu_gpr[rB(ctx->opcode)]); \ | |
6633 | } | |
6634 | #else | |
6635 | #define GEN_SPEOP_LOGIC2(name, tcg_op) \ | |
6636 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6637 | { \ | |
6638 | if (unlikely(!ctx->spe_enabled)) { \ | |
6639 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6640 | return; \ | |
6641 | } \ | |
6642 | tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \ | |
6643 | cpu_gpr[rB(ctx->opcode)]); \ | |
6644 | tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \ | |
6645 | cpu_gprh[rB(ctx->opcode)]); \ | |
6646 | } | |
6647 | #endif | |
6648 | ||
6649 | GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl); | |
6650 | GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl); | |
6651 | GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl); | |
6652 | GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl); | |
6653 | GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl); | |
6654 | GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl); | |
6655 | GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl); | |
6656 | GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl); | |
6657 | ||
6658 | /* SPE logic immediate */ | |
6659 | #if defined(TARGET_PPC64) | |
6660 | #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \ | |
6661 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6662 | { \ | |
6663 | if (unlikely(!ctx->spe_enabled)) { \ | |
6664 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6665 | return; \ | |
6666 | } \ | |
6667 | TCGv_i32 t0 = tcg_temp_local_new_i32(); \ | |
6668 | TCGv_i32 t1 = tcg_temp_local_new_i32(); \ | |
6669 | TCGv_i64 t2 = tcg_temp_local_new_i64(); \ | |
6670 | tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ | |
6671 | tcg_opi(t0, t0, rB(ctx->opcode)); \ | |
6672 | tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \ | |
6673 | tcg_gen_trunc_i64_i32(t1, t2); \ | |
6674 | tcg_temp_free_i64(t2); \ | |
6675 | tcg_opi(t1, t1, rB(ctx->opcode)); \ | |
6676 | tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \ | |
6677 | tcg_temp_free_i32(t0); \ | |
6678 | tcg_temp_free_i32(t1); \ | |
6679 | } | |
6680 | #else | |
6681 | #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \ | |
6682 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6683 | { \ | |
6684 | if (unlikely(!ctx->spe_enabled)) { \ | |
6685 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6686 | return; \ | |
6687 | } \ | |
6688 | tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \ | |
6689 | rB(ctx->opcode)); \ | |
6690 | tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \ | |
6691 | rB(ctx->opcode)); \ | |
6692 | } | |
6693 | #endif | |
6694 | GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32); | |
6695 | GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32); | |
6696 | GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32); | |
6697 | GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32); | |
6698 | ||
6699 | /* SPE arithmetic */ | |
6700 | #if defined(TARGET_PPC64) | |
6701 | #define GEN_SPEOP_ARITH1(name, tcg_op) \ | |
6702 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6703 | { \ | |
6704 | if (unlikely(!ctx->spe_enabled)) { \ | |
6705 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6706 | return; \ | |
6707 | } \ | |
6708 | TCGv_i32 t0 = tcg_temp_local_new_i32(); \ | |
6709 | TCGv_i32 t1 = tcg_temp_local_new_i32(); \ | |
6710 | TCGv_i64 t2 = tcg_temp_local_new_i64(); \ | |
6711 | tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ | |
6712 | tcg_op(t0, t0); \ | |
6713 | tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \ | |
6714 | tcg_gen_trunc_i64_i32(t1, t2); \ | |
6715 | tcg_temp_free_i64(t2); \ | |
6716 | tcg_op(t1, t1); \ | |
6717 | tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \ | |
6718 | tcg_temp_free_i32(t0); \ | |
6719 | tcg_temp_free_i32(t1); \ | |
6720 | } | |
6721 | #else | |
6722 | #define GEN_SPEOP_ARITH1(name, tcg_op) \ | |
6723 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6724 | { \ | |
6725 | if (unlikely(!ctx->spe_enabled)) { \ | |
6726 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6727 | return; \ | |
6728 | } \ | |
6729 | tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \ | |
6730 | tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \ | |
6731 | } | |
6732 | #endif | |
6733 | ||
6734 | static always_inline void gen_op_evabs (TCGv_i32 ret, TCGv_i32 arg1) | |
6735 | { | |
6736 | int l1 = gen_new_label(); | |
6737 | int l2 = gen_new_label(); | |
6738 | ||
6739 | tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1); | |
6740 | tcg_gen_neg_i32(ret, arg1); | |
6741 | tcg_gen_br(l2); | |
6742 | gen_set_label(l1); | |
6743 | tcg_gen_mov_i32(ret, arg1); | |
6744 | gen_set_label(l2); | |
6745 | } | |
6746 | GEN_SPEOP_ARITH1(evabs, gen_op_evabs); | |
6747 | GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32); | |
6748 | GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32); | |
6749 | GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32); | |
6750 | static always_inline void gen_op_evrndw (TCGv_i32 ret, TCGv_i32 arg1) | |
6751 | { | |
6752 | tcg_gen_addi_i32(ret, arg1, 0x8000); | |
6753 | tcg_gen_ext16u_i32(ret, ret); | |
6754 | } | |
6755 | GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw); | |
6756 | GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32); | |
6757 | GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32); | |
6758 | ||
6759 | #if defined(TARGET_PPC64) | |
6760 | #define GEN_SPEOP_ARITH2(name, tcg_op) \ | |
6761 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6762 | { \ | |
6763 | if (unlikely(!ctx->spe_enabled)) { \ | |
6764 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6765 | return; \ | |
6766 | } \ | |
6767 | TCGv_i32 t0 = tcg_temp_local_new_i32(); \ | |
6768 | TCGv_i32 t1 = tcg_temp_local_new_i32(); \ | |
6769 | TCGv_i32 t2 = tcg_temp_local_new_i32(); \ | |
6770 | TCGv_i64 t3 = tcg_temp_local_new_i64(); \ | |
6771 | tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ | |
6772 | tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \ | |
6773 | tcg_op(t0, t0, t2); \ | |
6774 | tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \ | |
6775 | tcg_gen_trunc_i64_i32(t1, t3); \ | |
6776 | tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \ | |
6777 | tcg_gen_trunc_i64_i32(t2, t3); \ | |
6778 | tcg_temp_free_i64(t3); \ | |
6779 | tcg_op(t1, t1, t2); \ | |
6780 | tcg_temp_free_i32(t2); \ | |
6781 | tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \ | |
6782 | tcg_temp_free_i32(t0); \ | |
6783 | tcg_temp_free_i32(t1); \ | |
6784 | } | |
6785 | #else | |
6786 | #define GEN_SPEOP_ARITH2(name, tcg_op) \ | |
6787 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6788 | { \ | |
6789 | if (unlikely(!ctx->spe_enabled)) { \ | |
6790 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6791 | return; \ | |
6792 | } \ | |
6793 | tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \ | |
6794 | cpu_gpr[rB(ctx->opcode)]); \ | |
6795 | tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \ | |
6796 | cpu_gprh[rB(ctx->opcode)]); \ | |
6797 | } | |
6798 | #endif | |
6799 | ||
6800 | static always_inline void gen_op_evsrwu (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) | |
6801 | { | |
6802 | TCGv_i32 t0; | |
6803 | int l1, l2; | |
6804 | ||
6805 | l1 = gen_new_label(); | |
6806 | l2 = gen_new_label(); | |
6807 | t0 = tcg_temp_local_new_i32(); | |
6808 | /* No error here: 6 bits are used */ | |
6809 | tcg_gen_andi_i32(t0, arg2, 0x3F); | |
6810 | tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1); | |
6811 | tcg_gen_shr_i32(ret, arg1, t0); | |
6812 | tcg_gen_br(l2); | |
6813 | gen_set_label(l1); | |
6814 | tcg_gen_movi_i32(ret, 0); | |
6815 | tcg_gen_br(l2); | |
6816 | tcg_temp_free_i32(t0); | |
6817 | } | |
6818 | GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu); | |
6819 | static always_inline void gen_op_evsrws (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) | |
6820 | { | |
6821 | TCGv_i32 t0; | |
6822 | int l1, l2; | |
6823 | ||
6824 | l1 = gen_new_label(); | |
6825 | l2 = gen_new_label(); | |
6826 | t0 = tcg_temp_local_new_i32(); | |
6827 | /* No error here: 6 bits are used */ | |
6828 | tcg_gen_andi_i32(t0, arg2, 0x3F); | |
6829 | tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1); | |
6830 | tcg_gen_sar_i32(ret, arg1, t0); | |
6831 | tcg_gen_br(l2); | |
6832 | gen_set_label(l1); | |
6833 | tcg_gen_movi_i32(ret, 0); | |
6834 | tcg_gen_br(l2); | |
6835 | tcg_temp_free_i32(t0); | |
6836 | } | |
6837 | GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws); | |
6838 | static always_inline void gen_op_evslw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) | |
6839 | { | |
6840 | TCGv_i32 t0; | |
6841 | int l1, l2; | |
6842 | ||
6843 | l1 = gen_new_label(); | |
6844 | l2 = gen_new_label(); | |
6845 | t0 = tcg_temp_local_new_i32(); | |
6846 | /* No error here: 6 bits are used */ | |
6847 | tcg_gen_andi_i32(t0, arg2, 0x3F); | |
6848 | tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1); | |
6849 | tcg_gen_shl_i32(ret, arg1, t0); | |
6850 | tcg_gen_br(l2); | |
6851 | gen_set_label(l1); | |
6852 | tcg_gen_movi_i32(ret, 0); | |
6853 | tcg_gen_br(l2); | |
6854 | tcg_temp_free_i32(t0); | |
6855 | } | |
6856 | GEN_SPEOP_ARITH2(evslw, gen_op_evslw); | |
6857 | static always_inline void gen_op_evrlw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) | |
6858 | { | |
6859 | TCGv_i32 t0 = tcg_temp_new_i32(); | |
6860 | tcg_gen_andi_i32(t0, arg2, 0x1F); | |
6861 | tcg_gen_rotl_i32(ret, arg1, t0); | |
6862 | tcg_temp_free_i32(t0); | |
6863 | } | |
6864 | GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw); | |
6865 | static always_inline void gen_evmergehi (DisasContext *ctx) | |
6866 | { | |
6867 | if (unlikely(!ctx->spe_enabled)) { | |
6868 | gen_exception(ctx, POWERPC_EXCP_APU); | |
6869 | return; | |
6870 | } | |
6871 | #if defined(TARGET_PPC64) | |
6872 | TCGv t0 = tcg_temp_new(); | |
6873 | TCGv t1 = tcg_temp_new(); | |
6874 | tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32); | |
6875 | tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL); | |
6876 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1); | |
6877 | tcg_temp_free(t0); | |
6878 | tcg_temp_free(t1); | |
6879 | #else | |
6880 | tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]); | |
6881 | tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); | |
6882 | #endif | |
6883 | } | |
6884 | GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32); | |
6885 | static always_inline void gen_op_evsubf (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) | |
6886 | { | |
6887 | tcg_gen_sub_i32(ret, arg2, arg1); | |
6888 | } | |
6889 | GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf); | |
6890 | ||
6891 | /* SPE arithmetic immediate */ | |
6892 | #if defined(TARGET_PPC64) | |
6893 | #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \ | |
6894 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6895 | { \ | |
6896 | if (unlikely(!ctx->spe_enabled)) { \ | |
6897 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6898 | return; \ | |
6899 | } \ | |
6900 | TCGv_i32 t0 = tcg_temp_local_new_i32(); \ | |
6901 | TCGv_i32 t1 = tcg_temp_local_new_i32(); \ | |
6902 | TCGv_i64 t2 = tcg_temp_local_new_i64(); \ | |
6903 | tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \ | |
6904 | tcg_op(t0, t0, rA(ctx->opcode)); \ | |
6905 | tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \ | |
6906 | tcg_gen_trunc_i64_i32(t1, t2); \ | |
6907 | tcg_temp_free_i64(t2); \ | |
6908 | tcg_op(t1, t1, rA(ctx->opcode)); \ | |
6909 | tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \ | |
6910 | tcg_temp_free_i32(t0); \ | |
6911 | tcg_temp_free_i32(t1); \ | |
6912 | } | |
6913 | #else | |
6914 | #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \ | |
6915 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6916 | { \ | |
6917 | if (unlikely(!ctx->spe_enabled)) { \ | |
6918 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6919 | return; \ | |
6920 | } \ | |
6921 | tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ | |
6922 | rA(ctx->opcode)); \ | |
6923 | tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \ | |
6924 | rA(ctx->opcode)); \ | |
6925 | } | |
6926 | #endif | |
6927 | GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32); | |
6928 | GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32); | |
6929 | ||
6930 | /* SPE comparison */ | |
6931 | #if defined(TARGET_PPC64) | |
6932 | #define GEN_SPEOP_COMP(name, tcg_cond) \ | |
6933 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6934 | { \ | |
6935 | if (unlikely(!ctx->spe_enabled)) { \ | |
6936 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6937 | return; \ | |
6938 | } \ | |
6939 | int l1 = gen_new_label(); \ | |
6940 | int l2 = gen_new_label(); \ | |
6941 | int l3 = gen_new_label(); \ | |
6942 | int l4 = gen_new_label(); \ | |
6943 | TCGv_i32 t0 = tcg_temp_local_new_i32(); \ | |
6944 | TCGv_i32 t1 = tcg_temp_local_new_i32(); \ | |
6945 | TCGv_i64 t2 = tcg_temp_local_new_i64(); \ | |
6946 | tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ | |
6947 | tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \ | |
6948 | tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \ | |
6949 | tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \ | |
6950 | tcg_gen_br(l2); \ | |
6951 | gen_set_label(l1); \ | |
6952 | tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \ | |
6953 | CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \ | |
6954 | gen_set_label(l2); \ | |
6955 | tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \ | |
6956 | tcg_gen_trunc_i64_i32(t0, t2); \ | |
6957 | tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \ | |
6958 | tcg_gen_trunc_i64_i32(t1, t2); \ | |
6959 | tcg_temp_free_i64(t2); \ | |
6960 | tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \ | |
6961 | tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \ | |
6962 | ~(CRF_CH | CRF_CH_AND_CL)); \ | |
6963 | tcg_gen_br(l4); \ | |
6964 | gen_set_label(l3); \ | |
6965 | tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \ | |
6966 | CRF_CH | CRF_CH_OR_CL); \ | |
6967 | gen_set_label(l4); \ | |
6968 | tcg_temp_free_i32(t0); \ | |
6969 | tcg_temp_free_i32(t1); \ | |
6970 | } | |
6971 | #else | |
6972 | #define GEN_SPEOP_COMP(name, tcg_cond) \ | |
6973 | static always_inline void gen_##name (DisasContext *ctx) \ | |
6974 | { \ | |
6975 | if (unlikely(!ctx->spe_enabled)) { \ | |
6976 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
6977 | return; \ | |
6978 | } \ | |
6979 | int l1 = gen_new_label(); \ | |
6980 | int l2 = gen_new_label(); \ | |
6981 | int l3 = gen_new_label(); \ | |
6982 | int l4 = gen_new_label(); \ | |
6983 | \ | |
6984 | tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \ | |
6985 | cpu_gpr[rB(ctx->opcode)], l1); \ | |
6986 | tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \ | |
6987 | tcg_gen_br(l2); \ | |
6988 | gen_set_label(l1); \ | |
6989 | tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \ | |
6990 | CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \ | |
6991 | gen_set_label(l2); \ | |
6992 | tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \ | |
6993 | cpu_gprh[rB(ctx->opcode)], l3); \ | |
6994 | tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \ | |
6995 | ~(CRF_CH | CRF_CH_AND_CL)); \ | |
6996 | tcg_gen_br(l4); \ | |
6997 | gen_set_label(l3); \ | |
6998 | tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \ | |
6999 | CRF_CH | CRF_CH_OR_CL); \ | |
7000 | gen_set_label(l4); \ | |
7001 | } | |
7002 | #endif | |
7003 | GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU); | |
7004 | GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT); | |
7005 | GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU); | |
7006 | GEN_SPEOP_COMP(evcmplts, TCG_COND_LT); | |
7007 | GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ); | |
7008 | ||
7009 | /* SPE misc */ | |
7010 | static always_inline void gen_brinc (DisasContext *ctx) | |
7011 | { | |
7012 | /* Note: brinc is usable even if SPE is disabled */ | |
7013 | gen_helper_brinc(cpu_gpr[rD(ctx->opcode)], | |
7014 | cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
7015 | } | |
7016 | static always_inline void gen_evmergelo (DisasContext *ctx) | |
7017 | { | |
7018 | if (unlikely(!ctx->spe_enabled)) { | |
7019 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7020 | return; | |
7021 | } | |
7022 | #if defined(TARGET_PPC64) | |
7023 | TCGv t0 = tcg_temp_new(); | |
7024 | TCGv t1 = tcg_temp_new(); | |
7025 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL); | |
7026 | tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32); | |
7027 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1); | |
7028 | tcg_temp_free(t0); | |
7029 | tcg_temp_free(t1); | |
7030 | #else | |
7031 | tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
7032 | tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
7033 | #endif | |
7034 | } | |
7035 | static always_inline void gen_evmergehilo (DisasContext *ctx) | |
7036 | { | |
7037 | if (unlikely(!ctx->spe_enabled)) { | |
7038 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7039 | return; | |
7040 | } | |
7041 | #if defined(TARGET_PPC64) | |
7042 | TCGv t0 = tcg_temp_new(); | |
7043 | TCGv t1 = tcg_temp_new(); | |
7044 | tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL); | |
7045 | tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL); | |
7046 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1); | |
7047 | tcg_temp_free(t0); | |
7048 | tcg_temp_free(t1); | |
7049 | #else | |
7050 | tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
7051 | tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); | |
7052 | #endif | |
7053 | } | |
7054 | static always_inline void gen_evmergelohi (DisasContext *ctx) | |
7055 | { | |
7056 | if (unlikely(!ctx->spe_enabled)) { | |
7057 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7058 | return; | |
7059 | } | |
7060 | #if defined(TARGET_PPC64) | |
7061 | TCGv t0 = tcg_temp_new(); | |
7062 | TCGv t1 = tcg_temp_new(); | |
7063 | tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32); | |
7064 | tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32); | |
7065 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1); | |
7066 | tcg_temp_free(t0); | |
7067 | tcg_temp_free(t1); | |
7068 | #else | |
7069 | tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]); | |
7070 | tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
7071 | #endif | |
7072 | } | |
7073 | static always_inline void gen_evsplati (DisasContext *ctx) | |
7074 | { | |
7075 | uint64_t imm = ((int32_t)(rA(ctx->opcode) << 11)) >> 27; | |
7076 | ||
7077 | #if defined(TARGET_PPC64) | |
7078 | tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm); | |
7079 | #else | |
7080 | tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm); | |
7081 | tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm); | |
7082 | #endif | |
7083 | } | |
7084 | static always_inline void gen_evsplatfi (DisasContext *ctx) | |
7085 | { | |
7086 | uint64_t imm = rA(ctx->opcode) << 11; | |
7087 | ||
7088 | #if defined(TARGET_PPC64) | |
7089 | tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm); | |
7090 | #else | |
7091 | tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm); | |
7092 | tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm); | |
7093 | #endif | |
7094 | } | |
7095 | ||
7096 | static always_inline void gen_evsel (DisasContext *ctx) | |
7097 | { | |
7098 | int l1 = gen_new_label(); | |
7099 | int l2 = gen_new_label(); | |
7100 | int l3 = gen_new_label(); | |
7101 | int l4 = gen_new_label(); | |
7102 | TCGv_i32 t0 = tcg_temp_local_new_i32(); | |
7103 | #if defined(TARGET_PPC64) | |
7104 | TCGv t1 = tcg_temp_local_new(); | |
7105 | TCGv t2 = tcg_temp_local_new(); | |
7106 | #endif | |
7107 | tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3); | |
7108 | tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1); | |
7109 | #if defined(TARGET_PPC64) | |
7110 | tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL); | |
7111 | #else | |
7112 | tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); | |
7113 | #endif | |
7114 | tcg_gen_br(l2); | |
7115 | gen_set_label(l1); | |
7116 | #if defined(TARGET_PPC64) | |
7117 | tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL); | |
7118 | #else | |
7119 | tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]); | |
7120 | #endif | |
7121 | gen_set_label(l2); | |
7122 | tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2); | |
7123 | tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3); | |
7124 | #if defined(TARGET_PPC64) | |
7125 | tcg_gen_andi_tl(t2, cpu_gpr[rA(ctx->opcode)], 0x00000000FFFFFFFFULL); | |
7126 | #else | |
7127 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); | |
7128 | #endif | |
7129 | tcg_gen_br(l4); | |
7130 | gen_set_label(l3); | |
7131 | #if defined(TARGET_PPC64) | |
7132 | tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFULL); | |
7133 | #else | |
7134 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); | |
7135 | #endif | |
7136 | gen_set_label(l4); | |
7137 | tcg_temp_free_i32(t0); | |
7138 | #if defined(TARGET_PPC64) | |
7139 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2); | |
7140 | tcg_temp_free(t1); | |
7141 | tcg_temp_free(t2); | |
7142 | #endif | |
7143 | } | |
7144 | GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE) | |
7145 | { | |
7146 | gen_evsel(ctx); | |
7147 | } | |
7148 | GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE) | |
7149 | { | |
7150 | gen_evsel(ctx); | |
7151 | } | |
7152 | GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE) | |
7153 | { | |
7154 | gen_evsel(ctx); | |
7155 | } | |
7156 | GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE) | |
7157 | { | |
7158 | gen_evsel(ctx); | |
7159 | } | |
7160 | ||
7161 | GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); //// | |
7162 | GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE); | |
7163 | GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); //// | |
7164 | GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE); | |
7165 | GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); //// | |
7166 | GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); //// | |
7167 | GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); //// | |
7168 | GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); // | |
7169 | GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); //// | |
7170 | GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); //// | |
7171 | GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); //// | |
7172 | GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); //// | |
7173 | GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); //// | |
7174 | GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); //// | |
7175 | GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); //// | |
7176 | GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE); | |
7177 | GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); //// | |
7178 | GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE); | |
7179 | GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); // | |
7180 | GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE); | |
7181 | GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); //// | |
7182 | GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); //// | |
7183 | GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); //// | |
7184 | GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); //// | |
7185 | GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); //// | |
7186 | ||
7187 | /* SPE load and stores */ | |
7188 | static always_inline void gen_addr_spe_imm_index (DisasContext *ctx, TCGv EA, int sh) | |
7189 | { | |
7190 | target_ulong uimm = rB(ctx->opcode); | |
7191 | ||
7192 | if (rA(ctx->opcode) == 0) { | |
7193 | tcg_gen_movi_tl(EA, uimm << sh); | |
7194 | } else { | |
7195 | tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh); | |
7196 | #if defined(TARGET_PPC64) | |
7197 | if (!ctx->sf_mode) { | |
7198 | tcg_gen_ext32u_tl(EA, EA); | |
7199 | } | |
7200 | #endif | |
7201 | } | |
7202 | } | |
7203 | ||
7204 | static always_inline void gen_op_evldd(DisasContext *ctx, TCGv addr) | |
7205 | { | |
7206 | #if defined(TARGET_PPC64) | |
7207 | gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr); | |
7208 | #else | |
7209 | TCGv_i64 t0 = tcg_temp_new_i64(); | |
7210 | gen_qemu_ld64(ctx, t0, addr); | |
7211 | tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0); | |
7212 | tcg_gen_shri_i64(t0, t0, 32); | |
7213 | tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0); | |
7214 | tcg_temp_free_i64(t0); | |
7215 | #endif | |
7216 | } | |
7217 | ||
7218 | static always_inline void gen_op_evldw(DisasContext *ctx, TCGv addr) | |
7219 | { | |
7220 | #if defined(TARGET_PPC64) | |
7221 | TCGv t0 = tcg_temp_new(); | |
7222 | gen_qemu_ld32u(ctx, t0, addr); | |
7223 | tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); | |
7224 | gen_addr_add(ctx, addr, addr, 4); | |
7225 | gen_qemu_ld32u(ctx, t0, addr); | |
7226 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7227 | tcg_temp_free(t0); | |
7228 | #else | |
7229 | gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr); | |
7230 | gen_addr_add(ctx, addr, addr, 4); | |
7231 | gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr); | |
7232 | #endif | |
7233 | } | |
7234 | ||
7235 | static always_inline void gen_op_evldh(DisasContext *ctx, TCGv addr) | |
7236 | { | |
7237 | TCGv t0 = tcg_temp_new(); | |
7238 | #if defined(TARGET_PPC64) | |
7239 | gen_qemu_ld16u(ctx, t0, addr); | |
7240 | tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); | |
7241 | gen_addr_add(ctx, addr, addr, 2); | |
7242 | gen_qemu_ld16u(ctx, t0, addr); | |
7243 | tcg_gen_shli_tl(t0, t0, 32); | |
7244 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7245 | gen_addr_add(ctx, addr, addr, 2); | |
7246 | gen_qemu_ld16u(ctx, t0, addr); | |
7247 | tcg_gen_shli_tl(t0, t0, 16); | |
7248 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7249 | gen_addr_add(ctx, addr, addr, 2); | |
7250 | gen_qemu_ld16u(ctx, t0, addr); | |
7251 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7252 | #else | |
7253 | gen_qemu_ld16u(ctx, t0, addr); | |
7254 | tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); | |
7255 | gen_addr_add(ctx, addr, addr, 2); | |
7256 | gen_qemu_ld16u(ctx, t0, addr); | |
7257 | tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0); | |
7258 | gen_addr_add(ctx, addr, addr, 2); | |
7259 | gen_qemu_ld16u(ctx, t0, addr); | |
7260 | tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); | |
7261 | gen_addr_add(ctx, addr, addr, 2); | |
7262 | gen_qemu_ld16u(ctx, t0, addr); | |
7263 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7264 | #endif | |
7265 | tcg_temp_free(t0); | |
7266 | } | |
7267 | ||
7268 | static always_inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr) | |
7269 | { | |
7270 | TCGv t0 = tcg_temp_new(); | |
7271 | gen_qemu_ld16u(ctx, t0, addr); | |
7272 | #if defined(TARGET_PPC64) | |
7273 | tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); | |
7274 | tcg_gen_shli_tl(t0, t0, 16); | |
7275 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7276 | #else | |
7277 | tcg_gen_shli_tl(t0, t0, 16); | |
7278 | tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); | |
7279 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
7280 | #endif | |
7281 | tcg_temp_free(t0); | |
7282 | } | |
7283 | ||
7284 | static always_inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr) | |
7285 | { | |
7286 | TCGv t0 = tcg_temp_new(); | |
7287 | gen_qemu_ld16u(ctx, t0, addr); | |
7288 | #if defined(TARGET_PPC64) | |
7289 | tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); | |
7290 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7291 | #else | |
7292 | tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); | |
7293 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
7294 | #endif | |
7295 | tcg_temp_free(t0); | |
7296 | } | |
7297 | ||
7298 | static always_inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr) | |
7299 | { | |
7300 | TCGv t0 = tcg_temp_new(); | |
7301 | gen_qemu_ld16s(ctx, t0, addr); | |
7302 | #if defined(TARGET_PPC64) | |
7303 | tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); | |
7304 | tcg_gen_ext32u_tl(t0, t0); | |
7305 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7306 | #else | |
7307 | tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); | |
7308 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
7309 | #endif | |
7310 | tcg_temp_free(t0); | |
7311 | } | |
7312 | ||
7313 | static always_inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr) | |
7314 | { | |
7315 | TCGv t0 = tcg_temp_new(); | |
7316 | #if defined(TARGET_PPC64) | |
7317 | gen_qemu_ld16u(ctx, t0, addr); | |
7318 | tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); | |
7319 | gen_addr_add(ctx, addr, addr, 2); | |
7320 | gen_qemu_ld16u(ctx, t0, addr); | |
7321 | tcg_gen_shli_tl(t0, t0, 16); | |
7322 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7323 | #else | |
7324 | gen_qemu_ld16u(ctx, t0, addr); | |
7325 | tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); | |
7326 | gen_addr_add(ctx, addr, addr, 2); | |
7327 | gen_qemu_ld16u(ctx, t0, addr); | |
7328 | tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16); | |
7329 | #endif | |
7330 | tcg_temp_free(t0); | |
7331 | } | |
7332 | ||
7333 | static always_inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr) | |
7334 | { | |
7335 | #if defined(TARGET_PPC64) | |
7336 | TCGv t0 = tcg_temp_new(); | |
7337 | gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr); | |
7338 | gen_addr_add(ctx, addr, addr, 2); | |
7339 | gen_qemu_ld16u(ctx, t0, addr); | |
7340 | tcg_gen_shli_tl(t0, t0, 32); | |
7341 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7342 | tcg_temp_free(t0); | |
7343 | #else | |
7344 | gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr); | |
7345 | gen_addr_add(ctx, addr, addr, 2); | |
7346 | gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr); | |
7347 | #endif | |
7348 | } | |
7349 | ||
7350 | static always_inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr) | |
7351 | { | |
7352 | #if defined(TARGET_PPC64) | |
7353 | TCGv t0 = tcg_temp_new(); | |
7354 | gen_qemu_ld16s(ctx, t0, addr); | |
7355 | tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
7356 | gen_addr_add(ctx, addr, addr, 2); | |
7357 | gen_qemu_ld16s(ctx, t0, addr); | |
7358 | tcg_gen_shli_tl(t0, t0, 32); | |
7359 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7360 | tcg_temp_free(t0); | |
7361 | #else | |
7362 | gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr); | |
7363 | gen_addr_add(ctx, addr, addr, 2); | |
7364 | gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr); | |
7365 | #endif | |
7366 | } | |
7367 | ||
7368 | static always_inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr) | |
7369 | { | |
7370 | TCGv t0 = tcg_temp_new(); | |
7371 | gen_qemu_ld32u(ctx, t0, addr); | |
7372 | #if defined(TARGET_PPC64) | |
7373 | tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); | |
7374 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7375 | #else | |
7376 | tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); | |
7377 | tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); | |
7378 | #endif | |
7379 | tcg_temp_free(t0); | |
7380 | } | |
7381 | ||
7382 | static always_inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr) | |
7383 | { | |
7384 | TCGv t0 = tcg_temp_new(); | |
7385 | #if defined(TARGET_PPC64) | |
7386 | gen_qemu_ld16u(ctx, t0, addr); | |
7387 | tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); | |
7388 | tcg_gen_shli_tl(t0, t0, 32); | |
7389 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7390 | gen_addr_add(ctx, addr, addr, 2); | |
7391 | gen_qemu_ld16u(ctx, t0, addr); | |
7392 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7393 | tcg_gen_shli_tl(t0, t0, 16); | |
7394 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); | |
7395 | #else | |
7396 | gen_qemu_ld16u(ctx, t0, addr); | |
7397 | tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); | |
7398 | tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0); | |
7399 | gen_addr_add(ctx, addr, addr, 2); | |
7400 | gen_qemu_ld16u(ctx, t0, addr); | |
7401 | tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16); | |
7402 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0); | |
7403 | #endif | |
7404 | tcg_temp_free(t0); | |
7405 | } | |
7406 | ||
7407 | static always_inline void gen_op_evstdd(DisasContext *ctx, TCGv addr) | |
7408 | { | |
7409 | #if defined(TARGET_PPC64) | |
7410 | gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr); | |
7411 | #else | |
7412 | TCGv_i64 t0 = tcg_temp_new_i64(); | |
7413 | tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]); | |
7414 | gen_qemu_st64(ctx, t0, addr); | |
7415 | tcg_temp_free_i64(t0); | |
7416 | #endif | |
7417 | } | |
7418 | ||
7419 | static always_inline void gen_op_evstdw(DisasContext *ctx, TCGv addr) | |
7420 | { | |
7421 | #if defined(TARGET_PPC64) | |
7422 | TCGv t0 = tcg_temp_new(); | |
7423 | tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); | |
7424 | gen_qemu_st32(ctx, t0, addr); | |
7425 | tcg_temp_free(t0); | |
7426 | #else | |
7427 | gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr); | |
7428 | #endif | |
7429 | gen_addr_add(ctx, addr, addr, 4); | |
7430 | gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr); | |
7431 | } | |
7432 | ||
7433 | static always_inline void gen_op_evstdh(DisasContext *ctx, TCGv addr) | |
7434 | { | |
7435 | TCGv t0 = tcg_temp_new(); | |
7436 | #if defined(TARGET_PPC64) | |
7437 | tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48); | |
7438 | #else | |
7439 | tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16); | |
7440 | #endif | |
7441 | gen_qemu_st16(ctx, t0, addr); | |
7442 | gen_addr_add(ctx, addr, addr, 2); | |
7443 | #if defined(TARGET_PPC64) | |
7444 | tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); | |
7445 | gen_qemu_st16(ctx, t0, addr); | |
7446 | #else | |
7447 | gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr); | |
7448 | #endif | |
7449 | gen_addr_add(ctx, addr, addr, 2); | |
7450 | tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16); | |
7451 | gen_qemu_st16(ctx, t0, addr); | |
7452 | tcg_temp_free(t0); | |
7453 | gen_addr_add(ctx, addr, addr, 2); | |
7454 | gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr); | |
7455 | } | |
7456 | ||
7457 | static always_inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr) | |
7458 | { | |
7459 | TCGv t0 = tcg_temp_new(); | |
7460 | #if defined(TARGET_PPC64) | |
7461 | tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48); | |
7462 | #else | |
7463 | tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16); | |
7464 | #endif | |
7465 | gen_qemu_st16(ctx, t0, addr); | |
7466 | gen_addr_add(ctx, addr, addr, 2); | |
7467 | tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16); | |
7468 | gen_qemu_st16(ctx, t0, addr); | |
7469 | tcg_temp_free(t0); | |
7470 | } | |
7471 | ||
7472 | static always_inline void gen_op_evstwho(DisasContext *ctx, TCGv addr) | |
7473 | { | |
7474 | #if defined(TARGET_PPC64) | |
7475 | TCGv t0 = tcg_temp_new(); | |
7476 | tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); | |
7477 | gen_qemu_st16(ctx, t0, addr); | |
7478 | tcg_temp_free(t0); | |
7479 | #else | |
7480 | gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr); | |
7481 | #endif | |
7482 | gen_addr_add(ctx, addr, addr, 2); | |
7483 | gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr); | |
7484 | } | |
7485 | ||
7486 | static always_inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr) | |
7487 | { | |
7488 | #if defined(TARGET_PPC64) | |
7489 | TCGv t0 = tcg_temp_new(); | |
7490 | tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); | |
7491 | gen_qemu_st32(ctx, t0, addr); | |
7492 | tcg_temp_free(t0); | |
7493 | #else | |
7494 | gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr); | |
7495 | #endif | |
7496 | } | |
7497 | ||
7498 | static always_inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr) | |
7499 | { | |
7500 | gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr); | |
7501 | } | |
7502 | ||
7503 | #define GEN_SPEOP_LDST(name, opc2, sh) \ | |
7504 | GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE) \ | |
7505 | { \ | |
7506 | TCGv t0; \ | |
7507 | if (unlikely(!ctx->spe_enabled)) { \ | |
7508 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
7509 | return; \ | |
7510 | } \ | |
7511 | gen_set_access_type(ctx, ACCESS_INT); \ | |
7512 | t0 = tcg_temp_new(); \ | |
7513 | if (Rc(ctx->opcode)) { \ | |
7514 | gen_addr_spe_imm_index(ctx, t0, sh); \ | |
7515 | } else { \ | |
7516 | gen_addr_reg_index(ctx, t0); \ | |
7517 | } \ | |
7518 | gen_op_##name(ctx, t0); \ | |
7519 | tcg_temp_free(t0); \ | |
7520 | } | |
7521 | ||
7522 | GEN_SPEOP_LDST(evldd, 0x00, 3); | |
7523 | GEN_SPEOP_LDST(evldw, 0x01, 3); | |
7524 | GEN_SPEOP_LDST(evldh, 0x02, 3); | |
7525 | GEN_SPEOP_LDST(evlhhesplat, 0x04, 1); | |
7526 | GEN_SPEOP_LDST(evlhhousplat, 0x06, 1); | |
7527 | GEN_SPEOP_LDST(evlhhossplat, 0x07, 1); | |
7528 | GEN_SPEOP_LDST(evlwhe, 0x08, 2); | |
7529 | GEN_SPEOP_LDST(evlwhou, 0x0A, 2); | |
7530 | GEN_SPEOP_LDST(evlwhos, 0x0B, 2); | |
7531 | GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2); | |
7532 | GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2); | |
7533 | ||
7534 | GEN_SPEOP_LDST(evstdd, 0x10, 3); | |
7535 | GEN_SPEOP_LDST(evstdw, 0x11, 3); | |
7536 | GEN_SPEOP_LDST(evstdh, 0x12, 3); | |
7537 | GEN_SPEOP_LDST(evstwhe, 0x18, 2); | |
7538 | GEN_SPEOP_LDST(evstwho, 0x1A, 2); | |
7539 | GEN_SPEOP_LDST(evstwwe, 0x1C, 2); | |
7540 | GEN_SPEOP_LDST(evstwwo, 0x1E, 2); | |
7541 | ||
7542 | /* Multiply and add - TODO */ | |
7543 | #if 0 | |
7544 | GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE); | |
7545 | GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE); | |
7546 | GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE); | |
7547 | GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE); | |
7548 | GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE); | |
7549 | GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE); | |
7550 | GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE); | |
7551 | GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE); | |
7552 | GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE); | |
7553 | GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE); | |
7554 | GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE); | |
7555 | GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE); | |
7556 | ||
7557 | GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE); | |
7558 | GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE); | |
7559 | GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE); | |
7560 | GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE); | |
7561 | GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE); | |
7562 | GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE); | |
7563 | GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE); | |
7564 | GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE); | |
7565 | GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE); | |
7566 | GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE); | |
7567 | GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE); | |
7568 | GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE); | |
7569 | GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE); | |
7570 | GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE); | |
7571 | ||
7572 | GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE); | |
7573 | GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE); | |
7574 | GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE); | |
7575 | GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE); | |
7576 | GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE); | |
7577 | GEN_SPE(evmra, speundef, 0x07, 0x13, 0x0000F800, PPC_SPE); | |
7578 | ||
7579 | GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE); | |
7580 | GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE); | |
7581 | GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE); | |
7582 | GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE); | |
7583 | GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE); | |
7584 | GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE); | |
7585 | GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE); | |
7586 | GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE); | |
7587 | GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE); | |
7588 | GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE); | |
7589 | GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE); | |
7590 | GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE); | |
7591 | ||
7592 | GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE); | |
7593 | GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE); | |
7594 | GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE); | |
7595 | GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE); | |
7596 | GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE); | |
7597 | ||
7598 | GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE); | |
7599 | GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE); | |
7600 | GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE); | |
7601 | GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE); | |
7602 | GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE); | |
7603 | GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE); | |
7604 | GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE); | |
7605 | GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE); | |
7606 | GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE); | |
7607 | GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE); | |
7608 | GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE); | |
7609 | GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE); | |
7610 | ||
7611 | GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE); | |
7612 | GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE); | |
7613 | GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE); | |
7614 | GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE); | |
7615 | GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE); | |
7616 | #endif | |
7617 | ||
7618 | /*** SPE floating-point extension ***/ | |
7619 | #if defined(TARGET_PPC64) | |
7620 | #define GEN_SPEFPUOP_CONV_32_32(name) \ | |
7621 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7622 | { \ | |
7623 | TCGv_i32 t0; \ | |
7624 | TCGv t1; \ | |
7625 | t0 = tcg_temp_new_i32(); \ | |
7626 | tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \ | |
7627 | gen_helper_##name(t0, t0); \ | |
7628 | t1 = tcg_temp_new(); \ | |
7629 | tcg_gen_extu_i32_tl(t1, t0); \ | |
7630 | tcg_temp_free_i32(t0); \ | |
7631 | tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \ | |
7632 | 0xFFFFFFFF00000000ULL); \ | |
7633 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \ | |
7634 | tcg_temp_free(t1); \ | |
7635 | } | |
7636 | #define GEN_SPEFPUOP_CONV_32_64(name) \ | |
7637 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7638 | { \ | |
7639 | TCGv_i32 t0; \ | |
7640 | TCGv t1; \ | |
7641 | t0 = tcg_temp_new_i32(); \ | |
7642 | gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \ | |
7643 | t1 = tcg_temp_new(); \ | |
7644 | tcg_gen_extu_i32_tl(t1, t0); \ | |
7645 | tcg_temp_free_i32(t0); \ | |
7646 | tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \ | |
7647 | 0xFFFFFFFF00000000ULL); \ | |
7648 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \ | |
7649 | tcg_temp_free(t1); \ | |
7650 | } | |
7651 | #define GEN_SPEFPUOP_CONV_64_32(name) \ | |
7652 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7653 | { \ | |
7654 | TCGv_i32 t0 = tcg_temp_new_i32(); \ | |
7655 | tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \ | |
7656 | gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \ | |
7657 | tcg_temp_free_i32(t0); \ | |
7658 | } | |
7659 | #define GEN_SPEFPUOP_CONV_64_64(name) \ | |
7660 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7661 | { \ | |
7662 | gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ | |
7663 | } | |
7664 | #define GEN_SPEFPUOP_ARITH2_32_32(name) \ | |
7665 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7666 | { \ | |
7667 | TCGv_i32 t0, t1; \ | |
7668 | TCGv_i64 t2; \ | |
7669 | if (unlikely(!ctx->spe_enabled)) { \ | |
7670 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
7671 | return; \ | |
7672 | } \ | |
7673 | t0 = tcg_temp_new_i32(); \ | |
7674 | t1 = tcg_temp_new_i32(); \ | |
7675 | tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ | |
7676 | tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \ | |
7677 | gen_helper_##name(t0, t0, t1); \ | |
7678 | tcg_temp_free_i32(t1); \ | |
7679 | t2 = tcg_temp_new(); \ | |
7680 | tcg_gen_extu_i32_tl(t2, t0); \ | |
7681 | tcg_temp_free_i32(t0); \ | |
7682 | tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \ | |
7683 | 0xFFFFFFFF00000000ULL); \ | |
7684 | tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \ | |
7685 | tcg_temp_free(t2); \ | |
7686 | } | |
7687 | #define GEN_SPEFPUOP_ARITH2_64_64(name) \ | |
7688 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7689 | { \ | |
7690 | if (unlikely(!ctx->spe_enabled)) { \ | |
7691 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
7692 | return; \ | |
7693 | } \ | |
7694 | gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \ | |
7695 | cpu_gpr[rB(ctx->opcode)]); \ | |
7696 | } | |
7697 | #define GEN_SPEFPUOP_COMP_32(name) \ | |
7698 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7699 | { \ | |
7700 | TCGv_i32 t0, t1; \ | |
7701 | if (unlikely(!ctx->spe_enabled)) { \ | |
7702 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
7703 | return; \ | |
7704 | } \ | |
7705 | t0 = tcg_temp_new_i32(); \ | |
7706 | t1 = tcg_temp_new_i32(); \ | |
7707 | tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ | |
7708 | tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \ | |
7709 | gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \ | |
7710 | tcg_temp_free_i32(t0); \ | |
7711 | tcg_temp_free_i32(t1); \ | |
7712 | } | |
7713 | #define GEN_SPEFPUOP_COMP_64(name) \ | |
7714 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7715 | { \ | |
7716 | if (unlikely(!ctx->spe_enabled)) { \ | |
7717 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
7718 | return; \ | |
7719 | } \ | |
7720 | gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \ | |
7721 | cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ | |
7722 | } | |
7723 | #else | |
7724 | #define GEN_SPEFPUOP_CONV_32_32(name) \ | |
7725 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7726 | { \ | |
7727 | gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ | |
7728 | } | |
7729 | #define GEN_SPEFPUOP_CONV_32_64(name) \ | |
7730 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7731 | { \ | |
7732 | TCGv_i64 t0 = tcg_temp_new_i64(); \ | |
7733 | gen_load_gpr64(t0, rB(ctx->opcode)); \ | |
7734 | gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \ | |
7735 | tcg_temp_free_i64(t0); \ | |
7736 | } | |
7737 | #define GEN_SPEFPUOP_CONV_64_32(name) \ | |
7738 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7739 | { \ | |
7740 | TCGv_i64 t0 = tcg_temp_new_i64(); \ | |
7741 | gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \ | |
7742 | gen_store_gpr64(rD(ctx->opcode), t0); \ | |
7743 | tcg_temp_free_i64(t0); \ | |
7744 | } | |
7745 | #define GEN_SPEFPUOP_CONV_64_64(name) \ | |
7746 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7747 | { \ | |
7748 | TCGv_i64 t0 = tcg_temp_new_i64(); \ | |
7749 | gen_load_gpr64(t0, rB(ctx->opcode)); \ | |
7750 | gen_helper_##name(t0, t0); \ | |
7751 | gen_store_gpr64(rD(ctx->opcode), t0); \ | |
7752 | tcg_temp_free_i64(t0); \ | |
7753 | } | |
7754 | #define GEN_SPEFPUOP_ARITH2_32_32(name) \ | |
7755 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7756 | { \ | |
7757 | if (unlikely(!ctx->spe_enabled)) { \ | |
7758 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
7759 | return; \ | |
7760 | } \ | |
7761 | gen_helper_##name(cpu_gpr[rD(ctx->opcode)], \ | |
7762 | cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ | |
7763 | } | |
7764 | #define GEN_SPEFPUOP_ARITH2_64_64(name) \ | |
7765 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7766 | { \ | |
7767 | TCGv_i64 t0, t1; \ | |
7768 | if (unlikely(!ctx->spe_enabled)) { \ | |
7769 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
7770 | return; \ | |
7771 | } \ | |
7772 | t0 = tcg_temp_new_i64(); \ | |
7773 | t1 = tcg_temp_new_i64(); \ | |
7774 | gen_load_gpr64(t0, rA(ctx->opcode)); \ | |
7775 | gen_load_gpr64(t1, rB(ctx->opcode)); \ | |
7776 | gen_helper_##name(t0, t0, t1); \ | |
7777 | gen_store_gpr64(rD(ctx->opcode), t0); \ | |
7778 | tcg_temp_free_i64(t0); \ | |
7779 | tcg_temp_free_i64(t1); \ | |
7780 | } | |
7781 | #define GEN_SPEFPUOP_COMP_32(name) \ | |
7782 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7783 | { \ | |
7784 | if (unlikely(!ctx->spe_enabled)) { \ | |
7785 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
7786 | return; \ | |
7787 | } \ | |
7788 | gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \ | |
7789 | cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ | |
7790 | } | |
7791 | #define GEN_SPEFPUOP_COMP_64(name) \ | |
7792 | static always_inline void gen_##name (DisasContext *ctx) \ | |
7793 | { \ | |
7794 | TCGv_i64 t0, t1; \ | |
7795 | if (unlikely(!ctx->spe_enabled)) { \ | |
7796 | gen_exception(ctx, POWERPC_EXCP_APU); \ | |
7797 | return; \ | |
7798 | } \ | |
7799 | t0 = tcg_temp_new_i64(); \ | |
7800 | t1 = tcg_temp_new_i64(); \ | |
7801 | gen_load_gpr64(t0, rA(ctx->opcode)); \ | |
7802 | gen_load_gpr64(t1, rB(ctx->opcode)); \ | |
7803 | gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \ | |
7804 | tcg_temp_free_i64(t0); \ | |
7805 | tcg_temp_free_i64(t1); \ | |
7806 | } | |
7807 | #endif | |
7808 | ||
7809 | /* Single precision floating-point vectors operations */ | |
7810 | /* Arithmetic */ | |
7811 | GEN_SPEFPUOP_ARITH2_64_64(evfsadd); | |
7812 | GEN_SPEFPUOP_ARITH2_64_64(evfssub); | |
7813 | GEN_SPEFPUOP_ARITH2_64_64(evfsmul); | |
7814 | GEN_SPEFPUOP_ARITH2_64_64(evfsdiv); | |
7815 | static always_inline void gen_evfsabs (DisasContext *ctx) | |
7816 | { | |
7817 | if (unlikely(!ctx->spe_enabled)) { | |
7818 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7819 | return; | |
7820 | } | |
7821 | #if defined(TARGET_PPC64) | |
7822 | tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL); | |
7823 | #else | |
7824 | tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000); | |
7825 | tcg_gen_andi_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000); | |
7826 | #endif | |
7827 | } | |
7828 | static always_inline void gen_evfsnabs (DisasContext *ctx) | |
7829 | { | |
7830 | if (unlikely(!ctx->spe_enabled)) { | |
7831 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7832 | return; | |
7833 | } | |
7834 | #if defined(TARGET_PPC64) | |
7835 | tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL); | |
7836 | #else | |
7837 | tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000); | |
7838 | tcg_gen_ori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000); | |
7839 | #endif | |
7840 | } | |
7841 | static always_inline void gen_evfsneg (DisasContext *ctx) | |
7842 | { | |
7843 | if (unlikely(!ctx->spe_enabled)) { | |
7844 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7845 | return; | |
7846 | } | |
7847 | #if defined(TARGET_PPC64) | |
7848 | tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL); | |
7849 | #else | |
7850 | tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000); | |
7851 | tcg_gen_xori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000); | |
7852 | #endif | |
7853 | } | |
7854 | ||
7855 | /* Conversion */ | |
7856 | GEN_SPEFPUOP_CONV_64_64(evfscfui); | |
7857 | GEN_SPEFPUOP_CONV_64_64(evfscfsi); | |
7858 | GEN_SPEFPUOP_CONV_64_64(evfscfuf); | |
7859 | GEN_SPEFPUOP_CONV_64_64(evfscfsf); | |
7860 | GEN_SPEFPUOP_CONV_64_64(evfsctui); | |
7861 | GEN_SPEFPUOP_CONV_64_64(evfsctsi); | |
7862 | GEN_SPEFPUOP_CONV_64_64(evfsctuf); | |
7863 | GEN_SPEFPUOP_CONV_64_64(evfsctsf); | |
7864 | GEN_SPEFPUOP_CONV_64_64(evfsctuiz); | |
7865 | GEN_SPEFPUOP_CONV_64_64(evfsctsiz); | |
7866 | ||
7867 | /* Comparison */ | |
7868 | GEN_SPEFPUOP_COMP_64(evfscmpgt); | |
7869 | GEN_SPEFPUOP_COMP_64(evfscmplt); | |
7870 | GEN_SPEFPUOP_COMP_64(evfscmpeq); | |
7871 | GEN_SPEFPUOP_COMP_64(evfststgt); | |
7872 | GEN_SPEFPUOP_COMP_64(evfststlt); | |
7873 | GEN_SPEFPUOP_COMP_64(evfststeq); | |
7874 | ||
7875 | /* Opcodes definitions */ | |
7876 | GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPEFPU); // | |
7877 | GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPEFPU); // | |
7878 | GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPEFPU); // | |
7879 | GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPEFPU); // | |
7880 | GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPEFPU); // | |
7881 | GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPEFPU); // | |
7882 | GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPEFPU); // | |
7883 | GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPEFPU); // | |
7884 | GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPEFPU); // | |
7885 | GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPEFPU); // | |
7886 | GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPEFPU); // | |
7887 | GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPEFPU); // | |
7888 | GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPEFPU); // | |
7889 | GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPEFPU); // | |
7890 | ||
7891 | /* Single precision floating-point operations */ | |
7892 | /* Arithmetic */ | |
7893 | GEN_SPEFPUOP_ARITH2_32_32(efsadd); | |
7894 | GEN_SPEFPUOP_ARITH2_32_32(efssub); | |
7895 | GEN_SPEFPUOP_ARITH2_32_32(efsmul); | |
7896 | GEN_SPEFPUOP_ARITH2_32_32(efsdiv); | |
7897 | static always_inline void gen_efsabs (DisasContext *ctx) | |
7898 | { | |
7899 | if (unlikely(!ctx->spe_enabled)) { | |
7900 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7901 | return; | |
7902 | } | |
7903 | tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL); | |
7904 | } | |
7905 | static always_inline void gen_efsnabs (DisasContext *ctx) | |
7906 | { | |
7907 | if (unlikely(!ctx->spe_enabled)) { | |
7908 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7909 | return; | |
7910 | } | |
7911 | tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000); | |
7912 | } | |
7913 | static always_inline void gen_efsneg (DisasContext *ctx) | |
7914 | { | |
7915 | if (unlikely(!ctx->spe_enabled)) { | |
7916 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7917 | return; | |
7918 | } | |
7919 | tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000); | |
7920 | } | |
7921 | ||
7922 | /* Conversion */ | |
7923 | GEN_SPEFPUOP_CONV_32_32(efscfui); | |
7924 | GEN_SPEFPUOP_CONV_32_32(efscfsi); | |
7925 | GEN_SPEFPUOP_CONV_32_32(efscfuf); | |
7926 | GEN_SPEFPUOP_CONV_32_32(efscfsf); | |
7927 | GEN_SPEFPUOP_CONV_32_32(efsctui); | |
7928 | GEN_SPEFPUOP_CONV_32_32(efsctsi); | |
7929 | GEN_SPEFPUOP_CONV_32_32(efsctuf); | |
7930 | GEN_SPEFPUOP_CONV_32_32(efsctsf); | |
7931 | GEN_SPEFPUOP_CONV_32_32(efsctuiz); | |
7932 | GEN_SPEFPUOP_CONV_32_32(efsctsiz); | |
7933 | GEN_SPEFPUOP_CONV_32_64(efscfd); | |
7934 | ||
7935 | /* Comparison */ | |
7936 | GEN_SPEFPUOP_COMP_32(efscmpgt); | |
7937 | GEN_SPEFPUOP_COMP_32(efscmplt); | |
7938 | GEN_SPEFPUOP_COMP_32(efscmpeq); | |
7939 | GEN_SPEFPUOP_COMP_32(efststgt); | |
7940 | GEN_SPEFPUOP_COMP_32(efststlt); | |
7941 | GEN_SPEFPUOP_COMP_32(efststeq); | |
7942 | ||
7943 | /* Opcodes definitions */ | |
7944 | GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPEFPU); // | |
7945 | GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPEFPU); // | |
7946 | GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPEFPU); // | |
7947 | GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPEFPU); // | |
7948 | GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPEFPU); // | |
7949 | GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPEFPU); // | |
7950 | GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPEFPU); // | |
7951 | GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPEFPU); // | |
7952 | GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPEFPU); // | |
7953 | GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPEFPU); // | |
7954 | GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPEFPU); // | |
7955 | GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPEFPU); // | |
7956 | GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPEFPU); // | |
7957 | GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPEFPU); // | |
7958 | ||
7959 | /* Double precision floating-point operations */ | |
7960 | /* Arithmetic */ | |
7961 | GEN_SPEFPUOP_ARITH2_64_64(efdadd); | |
7962 | GEN_SPEFPUOP_ARITH2_64_64(efdsub); | |
7963 | GEN_SPEFPUOP_ARITH2_64_64(efdmul); | |
7964 | GEN_SPEFPUOP_ARITH2_64_64(efddiv); | |
7965 | static always_inline void gen_efdabs (DisasContext *ctx) | |
7966 | { | |
7967 | if (unlikely(!ctx->spe_enabled)) { | |
7968 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7969 | return; | |
7970 | } | |
7971 | #if defined(TARGET_PPC64) | |
7972 | tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL); | |
7973 | #else | |
7974 | tcg_gen_andi_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000); | |
7975 | #endif | |
7976 | } | |
7977 | static always_inline void gen_efdnabs (DisasContext *ctx) | |
7978 | { | |
7979 | if (unlikely(!ctx->spe_enabled)) { | |
7980 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7981 | return; | |
7982 | } | |
7983 | #if defined(TARGET_PPC64) | |
7984 | tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL); | |
7985 | #else | |
7986 | tcg_gen_ori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000); | |
7987 | #endif | |
7988 | } | |
7989 | static always_inline void gen_efdneg (DisasContext *ctx) | |
7990 | { | |
7991 | if (unlikely(!ctx->spe_enabled)) { | |
7992 | gen_exception(ctx, POWERPC_EXCP_APU); | |
7993 | return; | |
7994 | } | |
7995 | #if defined(TARGET_PPC64) | |
7996 | tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL); | |
7997 | #else | |
7998 | tcg_gen_xori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000); | |
7999 | #endif | |
8000 | } | |
8001 | ||
8002 | /* Conversion */ | |
8003 | GEN_SPEFPUOP_CONV_64_32(efdcfui); | |
8004 | GEN_SPEFPUOP_CONV_64_32(efdcfsi); | |
8005 | GEN_SPEFPUOP_CONV_64_32(efdcfuf); | |
8006 | GEN_SPEFPUOP_CONV_64_32(efdcfsf); | |
8007 | GEN_SPEFPUOP_CONV_32_64(efdctui); | |
8008 | GEN_SPEFPUOP_CONV_32_64(efdctsi); | |
8009 | GEN_SPEFPUOP_CONV_32_64(efdctuf); | |
8010 | GEN_SPEFPUOP_CONV_32_64(efdctsf); | |
8011 | GEN_SPEFPUOP_CONV_32_64(efdctuiz); | |
8012 | GEN_SPEFPUOP_CONV_32_64(efdctsiz); | |
8013 | GEN_SPEFPUOP_CONV_64_32(efdcfs); | |
8014 | GEN_SPEFPUOP_CONV_64_64(efdcfuid); | |
8015 | GEN_SPEFPUOP_CONV_64_64(efdcfsid); | |
8016 | GEN_SPEFPUOP_CONV_64_64(efdctuidz); | |
8017 | GEN_SPEFPUOP_CONV_64_64(efdctsidz); | |
8018 | ||
8019 | /* Comparison */ | |
8020 | GEN_SPEFPUOP_COMP_64(efdcmpgt); | |
8021 | GEN_SPEFPUOP_COMP_64(efdcmplt); | |
8022 | GEN_SPEFPUOP_COMP_64(efdcmpeq); | |
8023 | GEN_SPEFPUOP_COMP_64(efdtstgt); | |
8024 | GEN_SPEFPUOP_COMP_64(efdtstlt); | |
8025 | GEN_SPEFPUOP_COMP_64(efdtsteq); | |
8026 | ||
8027 | /* Opcodes definitions */ | |
8028 | GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPEFPU); // | |
8029 | GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPEFPU); // | |
8030 | GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPEFPU); // | |
8031 | GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPEFPU); // | |
8032 | GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPEFPU); // | |
8033 | GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPEFPU); // | |
8034 | GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPEFPU); // | |
8035 | GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPEFPU); // | |
8036 | GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPEFPU); // | |
8037 | GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPEFPU); // | |
8038 | GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPEFPU); // | |
8039 | GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPEFPU); // | |
8040 | GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPEFPU); // | |
8041 | GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPEFPU); // | |
8042 | GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPEFPU); // | |
8043 | GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPEFPU); // | |
8044 | ||
8045 | /* End opcode list */ | |
8046 | GEN_OPCODE_MARK(end); | |
8047 | ||
8048 | #include "translate_init.c" | |
8049 | #include "helper_regs.h" | |
8050 | ||
8051 | /*****************************************************************************/ | |
8052 | /* Misc PowerPC helpers */ | |
8053 | void cpu_dump_state (CPUState *env, FILE *f, | |
8054 | int (*cpu_fprintf)(FILE *f, const char *fmt, ...), | |
8055 | int flags) | |
8056 | { | |
8057 | #define RGPL 4 | |
8058 | #define RFPL 4 | |
8059 | ||
8060 | int i; | |
8061 | ||
8062 | cpu_fprintf(f, "NIP " ADDRX " LR " ADDRX " CTR " ADDRX " XER %08x\n", | |
8063 | env->nip, env->lr, env->ctr, env->xer); | |
8064 | cpu_fprintf(f, "MSR " ADDRX " HID0 " ADDRX " HF " ADDRX " idx %d\n", | |
8065 | env->msr, env->spr[SPR_HID0], env->hflags, env->mmu_idx); | |
8066 | #if !defined(NO_TIMER_DUMP) | |
8067 | cpu_fprintf(f, "TB %08x %08x " | |
8068 | #if !defined(CONFIG_USER_ONLY) | |
8069 | "DECR %08x" | |
8070 | #endif | |
8071 | "\n", | |
8072 | cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) | |
8073 | #if !defined(CONFIG_USER_ONLY) | |
8074 | , cpu_ppc_load_decr(env) | |
8075 | #endif | |
8076 | ); | |
8077 | #endif | |
8078 | for (i = 0; i < 32; i++) { | |
8079 | if ((i & (RGPL - 1)) == 0) | |
8080 | cpu_fprintf(f, "GPR%02d", i); | |
8081 | cpu_fprintf(f, " " REGX, ppc_dump_gpr(env, i)); | |
8082 | if ((i & (RGPL - 1)) == (RGPL - 1)) | |
8083 | cpu_fprintf(f, "\n"); | |
8084 | } | |
8085 | cpu_fprintf(f, "CR "); | |
8086 | for (i = 0; i < 8; i++) | |
8087 | cpu_fprintf(f, "%01x", env->crf[i]); | |
8088 | cpu_fprintf(f, " ["); | |
8089 | for (i = 0; i < 8; i++) { | |
8090 | char a = '-'; | |
8091 | if (env->crf[i] & 0x08) | |
8092 | a = 'L'; | |
8093 | else if (env->crf[i] & 0x04) | |
8094 | a = 'G'; | |
8095 | else if (env->crf[i] & 0x02) | |
8096 | a = 'E'; | |
8097 | cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); | |
8098 | } | |
8099 | cpu_fprintf(f, " ] RES " ADDRX "\n", env->reserve); | |
8100 | for (i = 0; i < 32; i++) { | |
8101 | if ((i & (RFPL - 1)) == 0) | |
8102 | cpu_fprintf(f, "FPR%02d", i); | |
8103 | cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i])); | |
8104 | if ((i & (RFPL - 1)) == (RFPL - 1)) | |
8105 | cpu_fprintf(f, "\n"); | |
8106 | } | |
8107 | cpu_fprintf(f, "FPSCR %08x\n", env->fpscr); | |
8108 | #if !defined(CONFIG_USER_ONLY) | |
8109 | cpu_fprintf(f, "SRR0 " ADDRX " SRR1 " ADDRX " SDR1 " ADDRX "\n", | |
8110 | env->spr[SPR_SRR0], env->spr[SPR_SRR1], env->sdr1); | |
8111 | #endif | |
8112 | ||
8113 | #undef RGPL | |
8114 | #undef RFPL | |
8115 | } | |
8116 | ||
8117 | void cpu_dump_statistics (CPUState *env, FILE*f, | |
8118 | int (*cpu_fprintf)(FILE *f, const char *fmt, ...), | |
8119 | int flags) | |
8120 | { | |
8121 | #if defined(DO_PPC_STATISTICS) | |
8122 | opc_handler_t **t1, **t2, **t3, *handler; | |
8123 | int op1, op2, op3; | |
8124 | ||
8125 | t1 = env->opcodes; | |
8126 | for (op1 = 0; op1 < 64; op1++) { | |
8127 | handler = t1[op1]; | |
8128 | if (is_indirect_opcode(handler)) { | |
8129 | t2 = ind_table(handler); | |
8130 | for (op2 = 0; op2 < 32; op2++) { | |
8131 | handler = t2[op2]; | |
8132 | if (is_indirect_opcode(handler)) { | |
8133 | t3 = ind_table(handler); | |
8134 | for (op3 = 0; op3 < 32; op3++) { | |
8135 | handler = t3[op3]; | |
8136 | if (handler->count == 0) | |
8137 | continue; | |
8138 | cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " | |
8139 | "%016llx %lld\n", | |
8140 | op1, op2, op3, op1, (op3 << 5) | op2, | |
8141 | handler->oname, | |
8142 | handler->count, handler->count); | |
8143 | } | |
8144 | } else { | |
8145 | if (handler->count == 0) | |
8146 | continue; | |
8147 | cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " | |
8148 | "%016llx %lld\n", | |
8149 | op1, op2, op1, op2, handler->oname, | |
8150 | handler->count, handler->count); | |
8151 | } | |
8152 | } | |
8153 | } else { | |
8154 | if (handler->count == 0) | |
8155 | continue; | |
8156 | cpu_fprintf(f, "%02x (%02x ) %16s: %016llx %lld\n", | |
8157 | op1, op1, handler->oname, | |
8158 | handler->count, handler->count); | |
8159 | } | |
8160 | } | |
8161 | #endif | |
8162 | } | |
8163 | ||
8164 | /*****************************************************************************/ | |
8165 | static always_inline void gen_intermediate_code_internal (CPUState *env, | |
8166 | TranslationBlock *tb, | |
8167 | int search_pc) | |
8168 | { | |
8169 | DisasContext ctx, *ctxp = &ctx; | |
8170 | opc_handler_t **table, *handler; | |
8171 | target_ulong pc_start; | |
8172 | uint16_t *gen_opc_end; | |
8173 | CPUBreakpoint *bp; | |
8174 | int j, lj = -1; | |
8175 | int num_insns; | |
8176 | int max_insns; | |
8177 | ||
8178 | pc_start = tb->pc; | |
8179 | gen_opc_end = gen_opc_buf + OPC_MAX_SIZE; | |
8180 | ctx.nip = pc_start; | |
8181 | ctx.tb = tb; | |
8182 | ctx.exception = POWERPC_EXCP_NONE; | |
8183 | ctx.spr_cb = env->spr_cb; | |
8184 | ctx.mem_idx = env->mmu_idx; | |
8185 | ctx.access_type = -1; | |
8186 | ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0; | |
8187 | #if defined(TARGET_PPC64) | |
8188 | ctx.sf_mode = msr_sf; | |
8189 | #endif | |
8190 | ctx.fpu_enabled = msr_fp; | |
8191 | if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) | |
8192 | ctx.spe_enabled = msr_spe; | |
8193 | else | |
8194 | ctx.spe_enabled = 0; | |
8195 | if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) | |
8196 | ctx.altivec_enabled = msr_vr; | |
8197 | else | |
8198 | ctx.altivec_enabled = 0; | |
8199 | if ((env->flags & POWERPC_FLAG_SE) && msr_se) | |
8200 | ctx.singlestep_enabled = CPU_SINGLE_STEP; | |
8201 | else | |
8202 | ctx.singlestep_enabled = 0; | |
8203 | if ((env->flags & POWERPC_FLAG_BE) && msr_be) | |
8204 | ctx.singlestep_enabled |= CPU_BRANCH_STEP; | |
8205 | if (unlikely(env->singlestep_enabled)) | |
8206 | ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP; | |
8207 | #if defined (DO_SINGLE_STEP) && 0 | |
8208 | /* Single step trace mode */ | |
8209 | msr_se = 1; | |
8210 | #endif | |
8211 | num_insns = 0; | |
8212 | max_insns = tb->cflags & CF_COUNT_MASK; | |
8213 | if (max_insns == 0) | |
8214 | max_insns = CF_COUNT_MASK; | |
8215 | ||
8216 | gen_icount_start(); | |
8217 | /* Set env in case of segfault during code fetch */ | |
8218 | while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) { | |
8219 | if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) { | |
8220 | TAILQ_FOREACH(bp, &env->breakpoints, entry) { | |
8221 | if (bp->pc == ctx.nip) { | |
8222 | gen_debug_exception(ctxp); | |
8223 | break; | |
8224 | } | |
8225 | } | |
8226 | } | |
8227 | if (unlikely(search_pc)) { | |
8228 | j = gen_opc_ptr - gen_opc_buf; | |
8229 | if (lj < j) { | |
8230 | lj++; | |
8231 | while (lj < j) | |
8232 | gen_opc_instr_start[lj++] = 0; | |
8233 | gen_opc_pc[lj] = ctx.nip; | |
8234 | gen_opc_instr_start[lj] = 1; | |
8235 | gen_opc_icount[lj] = num_insns; | |
8236 | } | |
8237 | } | |
8238 | LOG_DISAS("----------------\n"); | |
8239 | LOG_DISAS("nip=" ADDRX " super=%d ir=%d\n", | |
8240 | ctx.nip, ctx.mem_idx, (int)msr_ir); | |
8241 | if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO)) | |
8242 | gen_io_start(); | |
8243 | if (unlikely(ctx.le_mode)) { | |
8244 | ctx.opcode = bswap32(ldl_code(ctx.nip)); | |
8245 | } else { | |
8246 | ctx.opcode = ldl_code(ctx.nip); | |
8247 | } | |
8248 | LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n", | |
8249 | ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode), | |
8250 | opc3(ctx.opcode), little_endian ? "little" : "big"); | |
8251 | ctx.nip += 4; | |
8252 | table = env->opcodes; | |
8253 | num_insns++; | |
8254 | handler = table[opc1(ctx.opcode)]; | |
8255 | if (is_indirect_opcode(handler)) { | |
8256 | table = ind_table(handler); | |
8257 | handler = table[opc2(ctx.opcode)]; | |
8258 | if (is_indirect_opcode(handler)) { | |
8259 | table = ind_table(handler); | |
8260 | handler = table[opc3(ctx.opcode)]; | |
8261 | } | |
8262 | } | |
8263 | /* Is opcode *REALLY* valid ? */ | |
8264 | if (unlikely(handler->handler == &gen_invalid)) { | |
8265 | if (qemu_log_enabled()) { | |
8266 | qemu_log("invalid/unsupported opcode: " | |
8267 | "%02x - %02x - %02x (%08x) " ADDRX " %d\n", | |
8268 | opc1(ctx.opcode), opc2(ctx.opcode), | |
8269 | opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir); | |
8270 | } else { | |
8271 | printf("invalid/unsupported opcode: " | |
8272 | "%02x - %02x - %02x (%08x) " ADDRX " %d\n", | |
8273 | opc1(ctx.opcode), opc2(ctx.opcode), | |
8274 | opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir); | |
8275 | } | |
8276 | } else { | |
8277 | if (unlikely((ctx.opcode & handler->inval) != 0)) { | |
8278 | if (qemu_log_enabled()) { | |
8279 | qemu_log("invalid bits: %08x for opcode: " | |
8280 | "%02x - %02x - %02x (%08x) " ADDRX "\n", | |
8281 | ctx.opcode & handler->inval, opc1(ctx.opcode), | |
8282 | opc2(ctx.opcode), opc3(ctx.opcode), | |
8283 | ctx.opcode, ctx.nip - 4); | |
8284 | } else { | |
8285 | printf("invalid bits: %08x for opcode: " | |
8286 | "%02x - %02x - %02x (%08x) " ADDRX "\n", | |
8287 | ctx.opcode & handler->inval, opc1(ctx.opcode), | |
8288 | opc2(ctx.opcode), opc3(ctx.opcode), | |
8289 | ctx.opcode, ctx.nip - 4); | |
8290 | } | |
8291 | gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL); | |
8292 | break; | |
8293 | } | |
8294 | } | |
8295 | (*(handler->handler))(&ctx); | |
8296 | #if defined(DO_PPC_STATISTICS) | |
8297 | handler->count++; | |
8298 | #endif | |
8299 | /* Check trace mode exceptions */ | |
8300 | if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP && | |
8301 | (ctx.nip <= 0x100 || ctx.nip > 0xF00) && | |
8302 | ctx.exception != POWERPC_SYSCALL && | |
8303 | ctx.exception != POWERPC_EXCP_TRAP && | |
8304 | ctx.exception != POWERPC_EXCP_BRANCH)) { | |
8305 | gen_exception(ctxp, POWERPC_EXCP_TRACE); | |
8306 | } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) || | |
8307 | (env->singlestep_enabled) || | |
8308 | num_insns >= max_insns)) { | |
8309 | /* if we reach a page boundary or are single stepping, stop | |
8310 | * generation | |
8311 | */ | |
8312 | break; | |
8313 | } | |
8314 | #if defined (DO_SINGLE_STEP) | |
8315 | break; | |
8316 | #endif | |
8317 | } | |
8318 | if (tb->cflags & CF_LAST_IO) | |
8319 | gen_io_end(); | |
8320 | if (ctx.exception == POWERPC_EXCP_NONE) { | |
8321 | gen_goto_tb(&ctx, 0, ctx.nip); | |
8322 | } else if (ctx.exception != POWERPC_EXCP_BRANCH) { | |
8323 | if (unlikely(env->singlestep_enabled)) { | |
8324 | gen_debug_exception(ctxp); | |
8325 | } | |
8326 | /* Generate the return instruction */ | |
8327 | tcg_gen_exit_tb(0); | |
8328 | } | |
8329 | gen_icount_end(tb, num_insns); | |
8330 | *gen_opc_ptr = INDEX_op_end; | |
8331 | if (unlikely(search_pc)) { | |
8332 | j = gen_opc_ptr - gen_opc_buf; | |
8333 | lj++; | |
8334 | while (lj <= j) | |
8335 | gen_opc_instr_start[lj++] = 0; | |
8336 | } else { | |
8337 | tb->size = ctx.nip - pc_start; | |
8338 | tb->icount = num_insns; | |
8339 | } | |
8340 | #if defined(DEBUG_DISAS) | |
8341 | qemu_log_mask(CPU_LOG_TB_CPU, "---------------- excp: %04x\n", ctx.exception); | |
8342 | log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0); | |
8343 | if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) { | |
8344 | int flags; | |
8345 | flags = env->bfd_mach; | |
8346 | flags |= ctx.le_mode << 16; | |
8347 | qemu_log("IN: %s\n", lookup_symbol(pc_start)); | |
8348 | log_target_disas(pc_start, ctx.nip - pc_start, flags); | |
8349 | qemu_log("\n"); | |
8350 | } | |
8351 | #endif | |
8352 | } | |
8353 | ||
8354 | void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb) | |
8355 | { | |
8356 | gen_intermediate_code_internal(env, tb, 0); | |
8357 | } | |
8358 | ||
8359 | void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb) | |
8360 | { | |
8361 | gen_intermediate_code_internal(env, tb, 1); | |
8362 | } | |
8363 | ||
8364 | void gen_pc_load(CPUState *env, TranslationBlock *tb, | |
8365 | unsigned long searched_pc, int pc_pos, void *puc) | |
8366 | { | |
8367 | env->nip = gen_opc_pc[pc_pos]; | |
8368 | } |