2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
23 uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
27 static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
29 int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30 return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
33 static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
35 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36 return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
40 #if defined(TARGET_PPC64) || defined(TARGET_PPCSPE)
41 static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
43 uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44 return ((tmp & 0xFF00000000000000ULL) >> 56) |
45 ((tmp & 0x00FF000000000000ULL) >> 40) |
46 ((tmp & 0x0000FF0000000000ULL) >> 24) |
47 ((tmp & 0x000000FF00000000ULL) >> 8) |
48 ((tmp & 0x00000000FF000000ULL) << 8) |
49 ((tmp & 0x0000000000FF0000ULL) << 24) |
50 ((tmp & 0x000000000000FF00ULL) << 40) |
51 ((tmp & 0x00000000000000FFULL) << 54);
55 #if defined(TARGET_PPC64)
56 static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
58 return (int32_t)glue(ldl, MEMSUFFIX)(EA);
61 static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
63 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64 return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
65 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
69 static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data)
71 uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
72 glue(stw, MEMSUFFIX)(EA, tmp);
75 static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data)
77 uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
78 ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
79 glue(stl, MEMSUFFIX)(EA, tmp);
82 #if defined(TARGET_PPC64) || defined(TARGET_PPCSPE)
83 static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data)
85 uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
86 ((data & 0x00FF000000000000ULL) >> 40) |
87 ((data & 0x0000FF0000000000ULL) >> 24) |
88 ((data & 0x000000FF00000000ULL) >> 8) |
89 ((data & 0x00000000FF000000ULL) << 8) |
90 ((data & 0x0000000000FF0000ULL) << 24) |
91 ((data & 0x000000000000FF00ULL) << 40) |
92 ((data & 0x00000000000000FFULL) << 56);
93 glue(stq, MEMSUFFIX)(EA, tmp);
97 /*** Integer load ***/
98 #define PPC_LD_OP(name, op) \
99 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
101 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
105 #if defined(TARGET_PPC64)
106 #define PPC_LD_OP_64(name, op) \
107 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
109 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
114 #define PPC_ST_OP(name, op) \
115 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
117 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
121 #if defined(TARGET_PPC64)
122 #define PPC_ST_OP_64(name, op) \
123 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
125 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
134 #if defined(TARGET_PPC64)
137 PPC_LD_OP_64(d, ldq);
138 PPC_LD_OP_64(wa, ldsl);
139 PPC_LD_OP_64(bz, ldub);
140 PPC_LD_OP_64(ha, ldsw);
141 PPC_LD_OP_64(hz, lduw);
142 PPC_LD_OP_64(wz, ldl);
145 PPC_LD_OP(ha_le, ld16rs);
146 PPC_LD_OP(hz_le, ld16r);
147 PPC_LD_OP(wz_le, ld32r);
148 #if defined(TARGET_PPC64)
149 PPC_LD_OP(d_le, ld64r);
150 PPC_LD_OP(wa_le, ld32rs);
151 PPC_LD_OP_64(d_le, ld64r);
152 PPC_LD_OP_64(wa_le, ld32rs);
153 PPC_LD_OP_64(ha_le, ld16rs);
154 PPC_LD_OP_64(hz_le, ld16r);
155 PPC_LD_OP_64(wz_le, ld32r);
158 /*** Integer store ***/
162 #if defined(TARGET_PPC64)
164 PPC_ST_OP_64(d, stq);
165 PPC_ST_OP_64(b, stb);
166 PPC_ST_OP_64(h, stw);
167 PPC_ST_OP_64(w, stl);
170 PPC_ST_OP(h_le, st16r);
171 PPC_ST_OP(w_le, st32r);
172 #if defined(TARGET_PPC64)
173 PPC_ST_OP(d_le, st64r);
174 PPC_ST_OP_64(d_le, st64r);
175 PPC_ST_OP_64(h_le, st16r);
176 PPC_ST_OP_64(w_le, st32r);
179 /*** Integer load and store with byte reverse ***/
180 PPC_LD_OP(hbr, ld16r);
181 PPC_LD_OP(wbr, ld32r);
182 PPC_ST_OP(hbr, st16r);
183 PPC_ST_OP(wbr, st32r);
184 #if defined(TARGET_PPC64)
185 PPC_LD_OP_64(hbr, ld16r);
186 PPC_LD_OP_64(wbr, ld32r);
187 PPC_ST_OP_64(hbr, st16r);
188 PPC_ST_OP_64(wbr, st32r);
191 PPC_LD_OP(hbr_le, lduw);
192 PPC_LD_OP(wbr_le, ldl);
193 PPC_ST_OP(hbr_le, stw);
194 PPC_ST_OP(wbr_le, stl);
195 #if defined(TARGET_PPC64)
196 PPC_LD_OP_64(hbr_le, lduw);
197 PPC_LD_OP_64(wbr_le, ldl);
198 PPC_ST_OP_64(hbr_le, stw);
199 PPC_ST_OP_64(wbr_le, stl);
202 /*** Integer load and store multiple ***/
203 void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
205 glue(do_lmw, MEMSUFFIX)(PARAM1);
209 #if defined(TARGET_PPC64)
210 void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
212 glue(do_lmw_64, MEMSUFFIX)(PARAM1);
217 void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
219 glue(do_lmw_le, MEMSUFFIX)(PARAM1);
223 #if defined(TARGET_PPC64)
224 void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
226 glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
231 void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
233 glue(do_stmw, MEMSUFFIX)(PARAM1);
237 #if defined(TARGET_PPC64)
238 void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
240 glue(do_stmw_64, MEMSUFFIX)(PARAM1);
245 void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
247 glue(do_stmw_le, MEMSUFFIX)(PARAM1);
251 #if defined(TARGET_PPC64)
252 void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
254 glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
259 /*** Integer load and store strings ***/
260 void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
262 glue(do_lsw, MEMSUFFIX)(PARAM1);
266 #if defined(TARGET_PPC64)
267 void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
269 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
274 void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
276 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
280 #if defined(TARGET_PPC64)
281 void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
283 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
288 /* PPC32 specification says we must generate an exception if
289 * rA is in the range of registers to be loaded.
290 * In an other hand, IBM says this is valid, but rA won't be loaded.
291 * For now, I'll follow the spec...
293 void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
295 /* Note: T1 comes from xer_bc then no cast is needed */
296 if (likely(T1 != 0)) {
297 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
298 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
299 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
301 glue(do_lsw, MEMSUFFIX)(PARAM1);
307 #if defined(TARGET_PPC64)
308 void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
310 /* Note: T1 comes from xer_bc then no cast is needed */
311 if (likely(T1 != 0)) {
312 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
313 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
314 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
316 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
323 void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
325 /* Note: T1 comes from xer_bc then no cast is needed */
326 if (likely(T1 != 0)) {
327 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
328 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
329 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
331 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
337 #if defined(TARGET_PPC64)
338 void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
340 /* Note: T1 comes from xer_bc then no cast is needed */
341 if (likely(T1 != 0)) {
342 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
343 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
344 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
346 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
353 void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
355 glue(do_stsw, MEMSUFFIX)(PARAM1);
359 #if defined(TARGET_PPC64)
360 void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
362 glue(do_stsw_64, MEMSUFFIX)(PARAM1);
367 void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
369 glue(do_stsw_le, MEMSUFFIX)(PARAM1);
373 #if defined(TARGET_PPC64)
374 void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
376 glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
381 /*** Floating-point store ***/
382 #define PPC_STF_OP(name, op) \
383 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
385 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
389 #if defined(TARGET_PPC64)
390 #define PPC_STF_OP_64(name, op) \
391 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
393 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
398 PPC_STF_OP(fd, stfq);
399 PPC_STF_OP(fs, stfl);
400 #if defined(TARGET_PPC64)
401 PPC_STF_OP_64(fd, stfq);
402 PPC_STF_OP_64(fs, stfl);
405 static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
413 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
414 ((u.u & 0x00FF000000000000ULL) >> 40) |
415 ((u.u & 0x0000FF0000000000ULL) >> 24) |
416 ((u.u & 0x000000FF00000000ULL) >> 8) |
417 ((u.u & 0x00000000FF000000ULL) << 8) |
418 ((u.u & 0x0000000000FF0000ULL) << 24) |
419 ((u.u & 0x000000000000FF00ULL) << 40) |
420 ((u.u & 0x00000000000000FFULL) << 56);
421 glue(stfq, MEMSUFFIX)(EA, u.d);
424 static inline void glue(stflr, MEMSUFFIX) (target_ulong EA, float f)
432 u.u = ((u.u & 0xFF000000UL) >> 24) |
433 ((u.u & 0x00FF0000ULL) >> 8) |
434 ((u.u & 0x0000FF00UL) << 8) |
435 ((u.u & 0x000000FFULL) << 24);
436 glue(stfl, MEMSUFFIX)(EA, u.f);
439 PPC_STF_OP(fd_le, stfqr);
440 PPC_STF_OP(fs_le, stflr);
441 #if defined(TARGET_PPC64)
442 PPC_STF_OP_64(fd_le, stfqr);
443 PPC_STF_OP_64(fs_le, stflr);
446 /*** Floating-point load ***/
447 #define PPC_LDF_OP(name, op) \
448 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
450 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
454 #if defined(TARGET_PPC64)
455 #define PPC_LDF_OP_64(name, op) \
456 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
458 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
463 PPC_LDF_OP(fd, ldfq);
464 PPC_LDF_OP(fs, ldfl);
465 #if defined(TARGET_PPC64)
466 PPC_LDF_OP_64(fd, ldfq);
467 PPC_LDF_OP_64(fs, ldfl);
470 static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
477 u.d = glue(ldfq, MEMSUFFIX)(EA);
478 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
479 ((u.u & 0x00FF000000000000ULL) >> 40) |
480 ((u.u & 0x0000FF0000000000ULL) >> 24) |
481 ((u.u & 0x000000FF00000000ULL) >> 8) |
482 ((u.u & 0x00000000FF000000ULL) << 8) |
483 ((u.u & 0x0000000000FF0000ULL) << 24) |
484 ((u.u & 0x000000000000FF00ULL) << 40) |
485 ((u.u & 0x00000000000000FFULL) << 56);
490 static inline float glue(ldflr, MEMSUFFIX) (target_ulong EA)
497 u.f = glue(ldfl, MEMSUFFIX)(EA);
498 u.u = ((u.u & 0xFF000000UL) >> 24) |
499 ((u.u & 0x00FF0000ULL) >> 8) |
500 ((u.u & 0x0000FF00UL) << 8) |
501 ((u.u & 0x000000FFULL) << 24);
506 PPC_LDF_OP(fd_le, ldfqr);
507 PPC_LDF_OP(fs_le, ldflr);
508 #if defined(TARGET_PPC64)
509 PPC_LDF_OP_64(fd_le, ldfqr);
510 PPC_LDF_OP_64(fs_le, ldflr);
513 /* Load and set reservation */
514 void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
516 if (unlikely(T0 & 0x03)) {
517 do_raise_exception(EXCP_ALIGN);
519 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
520 regs->reserve = (uint32_t)T0;
525 #if defined(TARGET_PPC64)
526 void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
528 if (unlikely(T0 & 0x03)) {
529 do_raise_exception(EXCP_ALIGN);
531 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
532 regs->reserve = (uint64_t)T0;
537 void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
539 if (unlikely(T0 & 0x03)) {
540 do_raise_exception(EXCP_ALIGN);
542 T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
543 regs->reserve = (uint64_t)T0;
549 void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
551 if (unlikely(T0 & 0x03)) {
552 do_raise_exception(EXCP_ALIGN);
554 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
555 regs->reserve = (uint32_t)T0;
560 #if defined(TARGET_PPC64)
561 void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
563 if (unlikely(T0 & 0x03)) {
564 do_raise_exception(EXCP_ALIGN);
566 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
567 regs->reserve = (uint64_t)T0;
572 void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
574 if (unlikely(T0 & 0x03)) {
575 do_raise_exception(EXCP_ALIGN);
577 T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
578 regs->reserve = (uint64_t)T0;
584 /* Store with reservation */
585 void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
587 if (unlikely(T0 & 0x03)) {
588 do_raise_exception(EXCP_ALIGN);
590 if (unlikely(regs->reserve != (uint32_t)T0)) {
591 env->crf[0] = xer_ov;
593 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
594 env->crf[0] = xer_ov | 0x02;
601 #if defined(TARGET_PPC64)
602 void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
604 if (unlikely(T0 & 0x03)) {
605 do_raise_exception(EXCP_ALIGN);
607 if (unlikely(regs->reserve != (uint64_t)T0)) {
608 env->crf[0] = xer_ov;
610 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
611 env->crf[0] = xer_ov | 0x02;
618 void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
620 if (unlikely(T0 & 0x03)) {
621 do_raise_exception(EXCP_ALIGN);
623 if (unlikely(regs->reserve != (uint64_t)T0)) {
624 env->crf[0] = xer_ov;
626 glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
627 env->crf[0] = xer_ov | 0x02;
635 void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
637 if (unlikely(T0 & 0x03)) {
638 do_raise_exception(EXCP_ALIGN);
640 if (unlikely(regs->reserve != (uint32_t)T0)) {
641 env->crf[0] = xer_ov;
643 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
644 env->crf[0] = xer_ov | 0x02;
651 #if defined(TARGET_PPC64)
652 void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
654 if (unlikely(T0 & 0x03)) {
655 do_raise_exception(EXCP_ALIGN);
657 if (unlikely(regs->reserve != (uint64_t)T0)) {
658 env->crf[0] = xer_ov;
660 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
661 env->crf[0] = xer_ov | 0x02;
668 void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
670 if (unlikely(T0 & 0x03)) {
671 do_raise_exception(EXCP_ALIGN);
673 if (unlikely(regs->reserve != (uint64_t)T0)) {
674 env->crf[0] = xer_ov;
676 glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
677 env->crf[0] = xer_ov | 0x02;
685 void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
687 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
688 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
689 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
690 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
691 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
692 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
693 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
694 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
695 #if DCACHE_LINE_SIZE == 64
696 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
697 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
698 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
699 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
700 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
701 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
702 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
703 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
704 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
709 #if defined(TARGET_PPC64)
710 void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
712 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
713 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
714 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
715 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
716 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
717 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
718 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
719 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
720 #if DCACHE_LINE_SIZE == 64
721 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
722 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
723 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
724 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
725 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
726 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
727 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
728 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
729 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
735 /* Instruction cache block invalidate */
736 void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
738 glue(do_icbi, MEMSUFFIX)();
742 #if defined(TARGET_PPC64)
743 void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
745 glue(do_icbi_64, MEMSUFFIX)();
750 /* External access */
751 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
753 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
757 #if defined(TARGET_PPC64)
758 void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
760 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
765 void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
767 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
771 #if defined(TARGET_PPC64)
772 void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
774 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
779 void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
781 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
785 #if defined(TARGET_PPC64)
786 void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
788 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
793 void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
795 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
799 #if defined(TARGET_PPC64)
800 void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
802 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
807 /* XXX: those micro-ops need tests ! */
808 /* PowerPC 601 specific instructions (POWER bridge) */
809 void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
811 /* When byte count is 0, do nothing */
812 if (likely(T1 != 0)) {
813 glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
818 /* POWER2 quad load and store */
819 /* XXX: TAGs are not managed */
820 void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
822 glue(do_POWER2_lfq, MEMSUFFIX)();
826 void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
828 glue(do_POWER2_lfq_le, MEMSUFFIX)();
832 void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
834 glue(do_POWER2_stfq, MEMSUFFIX)();
838 void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
840 glue(do_POWER2_stfq_le, MEMSUFFIX)();
844 #if defined(TARGET_PPCSPE)
846 #define _PPC_SPE_LD_OP(name, op) \
847 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
849 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
853 #if defined(TARGET_PPC64)
854 #define _PPC_SPE_LD_OP_64(name, op) \
855 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
857 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
860 #define PPC_SPE_LD_OP(name, op) \
861 _PPC_SPE_LD_OP(name, op); \
862 _PPC_SPE_LD_OP_64(name, op)
864 #define PPC_SPE_LD_OP(name, op) \
865 _PPC_SPE_LD_OP(name, op)
869 #define _PPC_SPE_ST_OP(name, op) \
870 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
872 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
876 #if defined(TARGET_PPC64)
877 #define _PPC_SPE_ST_OP_64(name, op) \
878 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
880 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
883 #define PPC_SPE_ST_OP(name, op) \
884 _PPC_SPE_ST_OP(name, op); \
885 _PPC_SPE_ST_OP_64(name, op)
887 #define PPC_SPE_ST_OP(name, op) \
888 _PPC_SPE_ST_OP(name, op)
891 #if !defined(TARGET_PPC64)
892 PPC_SPE_LD_OP(dd, ldq);
893 PPC_SPE_ST_OP(dd, stq);
894 PPC_SPE_LD_OP(dd_le, ld64r);
895 PPC_SPE_ST_OP(dd_le, st64r);
897 static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
900 ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
901 ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
904 PPC_SPE_LD_OP(dw, spe_ldw);
905 static inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, uint64_t data)
907 glue(stl, MEMSUFFIX)(EA, data >> 32);
908 glue(stl, MEMSUFFIX)(EA + 4, data);
910 PPC_SPE_ST_OP(dw, spe_stdw);
911 static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
914 ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
915 ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
918 PPC_SPE_LD_OP(dw_le, spe_ldw_le);
919 static inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
922 glue(st32r, MEMSUFFIX)(EA, data >> 32);
923 glue(st32r, MEMSUFFIX)(EA + 4, data);
925 PPC_SPE_ST_OP(dw_le, spe_stdw_le);
926 static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
929 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
930 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
931 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
932 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
935 PPC_SPE_LD_OP(dh, spe_ldh);
936 static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data)
938 glue(stw, MEMSUFFIX)(EA, data >> 48);
939 glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
940 glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
941 glue(stw, MEMSUFFIX)(EA + 6, data);
943 PPC_SPE_ST_OP(dh, spe_stdh);
944 static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
947 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
948 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
949 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
950 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
953 PPC_SPE_LD_OP(dh_le, spe_ldh_le);
954 static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
957 glue(st16r, MEMSUFFIX)(EA, data >> 48);
958 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
959 glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
960 glue(st16r, MEMSUFFIX)(EA + 6, data);
962 PPC_SPE_ST_OP(dh_le, spe_stdh_le);
963 static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
966 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
967 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
970 PPC_SPE_LD_OP(whe, spe_lwhe);
971 static inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, uint64_t data)
973 glue(stw, MEMSUFFIX)(EA, data >> 48);
974 glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
976 PPC_SPE_ST_OP(whe, spe_stwhe);
977 static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
980 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
981 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
984 PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
985 static inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
988 glue(st16r, MEMSUFFIX)(EA, data >> 48);
989 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
991 PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
992 static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
995 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
996 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
999 PPC_SPE_LD_OP(whou, spe_lwhou);
1000 static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1003 ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1004 ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1007 PPC_SPE_LD_OP(whos, spe_lwhos);
1008 static inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, uint64_t data)
1010 glue(stw, MEMSUFFIX)(EA, data >> 32);
1011 glue(stw, MEMSUFFIX)(EA + 2, data);
1013 PPC_SPE_ST_OP(who, spe_stwho);
1014 static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1017 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1018 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1021 PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1022 static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1025 ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1026 ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1029 PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1030 static inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1033 glue(st16r, MEMSUFFIX)(EA, data >> 32);
1034 glue(st16r, MEMSUFFIX)(EA + 2, data);
1036 PPC_SPE_ST_OP(who_le, spe_stwho_le);
1037 #if !defined(TARGET_PPC64)
1038 static inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, uint64_t data)
1040 glue(stl, MEMSUFFIX)(EA, data);
1042 PPC_SPE_ST_OP(wwo, spe_stwwo);
1043 static inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1046 glue(st32r, MEMSUFFIX)(EA, data);
1048 PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1050 static inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1053 tmp = glue(lduw, MEMSUFFIX)(EA);
1054 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1056 PPC_SPE_LD_OP(h, spe_lh);
1057 static inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1060 tmp = glue(ld16r, MEMSUFFIX)(EA);
1061 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1063 PPC_SPE_LD_OP(h_le, spe_lh_le);
1064 static inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1067 tmp = glue(ldl, MEMSUFFIX)(EA);
1068 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1070 PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1071 static inline uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1074 tmp = glue(ld32r, MEMSUFFIX)(EA);
1075 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1077 PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1078 static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1082 tmp = glue(lduw, MEMSUFFIX)(EA);
1083 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1084 tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1085 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1088 PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1089 static inline uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1093 tmp = glue(ld16r, MEMSUFFIX)(EA);
1094 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1095 tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1096 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1099 PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1100 #endif /* defined(TARGET_PPCSPE) */