]> Git Repo - qemu.git/blame - target-ppc/op_mem.h
Fix rfi instruction: do not depend on current execution mode
[qemu.git] / target-ppc / op_mem.h
CommitLineData
76a66253
JM
1/*
2 * PowerPC emulation micro-operations for qemu.
3 *
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
9a64fbe4 20
0fa85d43 21static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
9a64fbe4 22{
ac9eb073 23 uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
9a64fbe4
FB
24 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
25}
26
111bfab3
FB
27static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
28{
29 int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
76a66253 30 return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
111bfab3
FB
31}
32
0fa85d43 33static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
9a64fbe4 34{
ac9eb073 35 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
9a64fbe4
FB
36 return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
38}
39
0487d6a8 40#if defined(TARGET_PPC64) || defined(TARGET_PPCSPE)
d9bce9d9
JM
41static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
42{
43 uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44 return ((tmp & 0xFF00000000000000ULL) >> 56) |
45 ((tmp & 0x00FF000000000000ULL) >> 40) |
46 ((tmp & 0x0000FF0000000000ULL) >> 24) |
47 ((tmp & 0x000000FF00000000ULL) >> 8) |
48 ((tmp & 0x00000000FF000000ULL) << 8) |
49 ((tmp & 0x0000000000FF0000ULL) << 24) |
50 ((tmp & 0x000000000000FF00ULL) << 40) |
51 ((tmp & 0x00000000000000FFULL) << 54);
52}
0487d6a8
JM
53#endif
54
55#if defined(TARGET_PPC64)
56static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
57{
58 return (int32_t)glue(ldl, MEMSUFFIX)(EA);
59}
d9bce9d9
JM
60
61static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
62{
63 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64 return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
65 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
66}
67#endif
68
0fa85d43 69static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data)
9a64fbe4
FB
70{
71 uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
ac9eb073 72 glue(stw, MEMSUFFIX)(EA, tmp);
9a64fbe4
FB
73}
74
0fa85d43 75static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data)
9a64fbe4
FB
76{
77 uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
78 ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
ac9eb073 79 glue(stl, MEMSUFFIX)(EA, tmp);
9a64fbe4
FB
80}
81
0487d6a8 82#if defined(TARGET_PPC64) || defined(TARGET_PPCSPE)
d9bce9d9
JM
83static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data)
84{
85 uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
86 ((data & 0x00FF000000000000ULL) >> 40) |
87 ((data & 0x0000FF0000000000ULL) >> 24) |
88 ((data & 0x000000FF00000000ULL) >> 8) |
89 ((data & 0x00000000FF000000ULL) << 8) |
90 ((data & 0x0000000000FF0000ULL) << 24) |
91 ((data & 0x000000000000FF00ULL) << 40) |
92 ((data & 0x00000000000000FFULL) << 56);
93 glue(stq, MEMSUFFIX)(EA, tmp);
94}
95#endif
96
9a64fbe4
FB
97/*** Integer load ***/
98#define PPC_LD_OP(name, op) \
d9bce9d9 99void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
9a64fbe4 100{ \
d9bce9d9 101 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
9a64fbe4
FB
102 RETURN(); \
103}
104
d9bce9d9
JM
105#if defined(TARGET_PPC64)
106#define PPC_LD_OP_64(name, op) \
107void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
108{ \
109 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
110 RETURN(); \
111}
112#endif
113
9a64fbe4 114#define PPC_ST_OP(name, op) \
d9bce9d9 115void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
9a64fbe4 116{ \
d9bce9d9 117 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
9a64fbe4
FB
118 RETURN(); \
119}
120
d9bce9d9
JM
121#if defined(TARGET_PPC64)
122#define PPC_ST_OP_64(name, op) \
123void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
124{ \
125 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
126 RETURN(); \
127}
128#endif
129
ac9eb073
FB
130PPC_LD_OP(bz, ldub);
131PPC_LD_OP(ha, ldsw);
132PPC_LD_OP(hz, lduw);
133PPC_LD_OP(wz, ldl);
d9bce9d9
JM
134#if defined(TARGET_PPC64)
135PPC_LD_OP(d, ldq);
136PPC_LD_OP(wa, ldsl);
137PPC_LD_OP_64(d, ldq);
138PPC_LD_OP_64(wa, ldsl);
139PPC_LD_OP_64(bz, ldub);
140PPC_LD_OP_64(ha, ldsw);
141PPC_LD_OP_64(hz, lduw);
142PPC_LD_OP_64(wz, ldl);
143#endif
9a64fbe4 144
111bfab3
FB
145PPC_LD_OP(ha_le, ld16rs);
146PPC_LD_OP(hz_le, ld16r);
147PPC_LD_OP(wz_le, ld32r);
d9bce9d9
JM
148#if defined(TARGET_PPC64)
149PPC_LD_OP(d_le, ld64r);
150PPC_LD_OP(wa_le, ld32rs);
151PPC_LD_OP_64(d_le, ld64r);
152PPC_LD_OP_64(wa_le, ld32rs);
153PPC_LD_OP_64(ha_le, ld16rs);
154PPC_LD_OP_64(hz_le, ld16r);
155PPC_LD_OP_64(wz_le, ld32r);
156#endif
111bfab3 157
9a64fbe4 158/*** Integer store ***/
ac9eb073
FB
159PPC_ST_OP(b, stb);
160PPC_ST_OP(h, stw);
161PPC_ST_OP(w, stl);
d9bce9d9
JM
162#if defined(TARGET_PPC64)
163PPC_ST_OP(d, stq);
164PPC_ST_OP_64(d, stq);
165PPC_ST_OP_64(b, stb);
166PPC_ST_OP_64(h, stw);
167PPC_ST_OP_64(w, stl);
168#endif
9a64fbe4 169
111bfab3
FB
170PPC_ST_OP(h_le, st16r);
171PPC_ST_OP(w_le, st32r);
d9bce9d9
JM
172#if defined(TARGET_PPC64)
173PPC_ST_OP(d_le, st64r);
174PPC_ST_OP_64(d_le, st64r);
175PPC_ST_OP_64(h_le, st16r);
176PPC_ST_OP_64(w_le, st32r);
177#endif
111bfab3 178
9a64fbe4 179/*** Integer load and store with byte reverse ***/
ac9eb073
FB
180PPC_LD_OP(hbr, ld16r);
181PPC_LD_OP(wbr, ld32r);
182PPC_ST_OP(hbr, st16r);
183PPC_ST_OP(wbr, st32r);
d9bce9d9
JM
184#if defined(TARGET_PPC64)
185PPC_LD_OP_64(hbr, ld16r);
186PPC_LD_OP_64(wbr, ld32r);
187PPC_ST_OP_64(hbr, st16r);
188PPC_ST_OP_64(wbr, st32r);
189#endif
9a64fbe4 190
111bfab3
FB
191PPC_LD_OP(hbr_le, lduw);
192PPC_LD_OP(wbr_le, ldl);
193PPC_ST_OP(hbr_le, stw);
194PPC_ST_OP(wbr_le, stl);
d9bce9d9
JM
195#if defined(TARGET_PPC64)
196PPC_LD_OP_64(hbr_le, lduw);
197PPC_LD_OP_64(wbr_le, ldl);
198PPC_ST_OP_64(hbr_le, stw);
199PPC_ST_OP_64(wbr_le, stl);
200#endif
111bfab3 201
9a64fbe4 202/*** Integer load and store multiple ***/
d9bce9d9 203void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
9a64fbe4 204{
76a66253 205 glue(do_lmw, MEMSUFFIX)(PARAM1);
9a64fbe4
FB
206 RETURN();
207}
208
d9bce9d9
JM
209#if defined(TARGET_PPC64)
210void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
211{
212 glue(do_lmw_64, MEMSUFFIX)(PARAM1);
213 RETURN();
214}
215#endif
216
217void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
9a64fbe4 218{
76a66253 219 glue(do_lmw_le, MEMSUFFIX)(PARAM1);
9a64fbe4
FB
220 RETURN();
221}
222
d9bce9d9
JM
223#if defined(TARGET_PPC64)
224void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
225{
226 glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
227 RETURN();
228}
229#endif
230
231void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
111bfab3 232{
76a66253 233 glue(do_stmw, MEMSUFFIX)(PARAM1);
111bfab3
FB
234 RETURN();
235}
236
d9bce9d9
JM
237#if defined(TARGET_PPC64)
238void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
239{
240 glue(do_stmw_64, MEMSUFFIX)(PARAM1);
241 RETURN();
242}
243#endif
244
245void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
111bfab3 246{
76a66253 247 glue(do_stmw_le, MEMSUFFIX)(PARAM1);
111bfab3
FB
248 RETURN();
249}
250
d9bce9d9
JM
251#if defined(TARGET_PPC64)
252void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
253{
254 glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
255 RETURN();
256}
257#endif
258
9a64fbe4 259/*** Integer load and store strings ***/
d9bce9d9
JM
260void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
261{
262 glue(do_lsw, MEMSUFFIX)(PARAM1);
263 RETURN();
264}
265
266#if defined(TARGET_PPC64)
267void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
9a64fbe4 268{
d9bce9d9 269 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
9a64fbe4
FB
270 RETURN();
271}
d9bce9d9 272#endif
9a64fbe4 273
d9bce9d9 274void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
111bfab3 275{
d9bce9d9 276 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
111bfab3
FB
277 RETURN();
278}
279
d9bce9d9
JM
280#if defined(TARGET_PPC64)
281void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
282{
283 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
284 RETURN();
285}
286#endif
287
9a64fbe4
FB
288/* PPC32 specification says we must generate an exception if
289 * rA is in the range of registers to be loaded.
290 * In an other hand, IBM says this is valid, but rA won't be loaded.
291 * For now, I'll follow the spec...
292 */
d9bce9d9
JM
293void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
294{
295 /* Note: T1 comes from xer_bc then no cast is needed */
296 if (likely(T1 != 0)) {
297 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
298 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
299 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
300 } else {
301 glue(do_lsw, MEMSUFFIX)(PARAM1);
302 }
303 }
304 RETURN();
305}
306
307#if defined(TARGET_PPC64)
308void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
309{
310 /* Note: T1 comes from xer_bc then no cast is needed */
311 if (likely(T1 != 0)) {
312 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
313 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
314 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
315 } else {
316 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
317 }
318 }
319 RETURN();
320}
321#endif
322
323void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
9a64fbe4 324{
d9bce9d9
JM
325 /* Note: T1 comes from xer_bc then no cast is needed */
326 if (likely(T1 != 0)) {
76a66253
JM
327 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
328 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
9fddaa0c 329 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
9a64fbe4 330 } else {
d9bce9d9 331 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
9a64fbe4
FB
332 }
333 }
334 RETURN();
335}
336
d9bce9d9
JM
337#if defined(TARGET_PPC64)
338void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
111bfab3 339{
d9bce9d9
JM
340 /* Note: T1 comes from xer_bc then no cast is needed */
341 if (likely(T1 != 0)) {
76a66253
JM
342 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
343 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
111bfab3
FB
344 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
345 } else {
d9bce9d9 346 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
111bfab3
FB
347 }
348 }
349 RETURN();
350}
d9bce9d9
JM
351#endif
352
353void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
354{
355 glue(do_stsw, MEMSUFFIX)(PARAM1);
356 RETURN();
357}
358
359#if defined(TARGET_PPC64)
360void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
361{
362 glue(do_stsw_64, MEMSUFFIX)(PARAM1);
363 RETURN();
364}
365#endif
111bfab3 366
d9bce9d9 367void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
9a64fbe4 368{
d9bce9d9 369 glue(do_stsw_le, MEMSUFFIX)(PARAM1);
9a64fbe4
FB
370 RETURN();
371}
372
d9bce9d9
JM
373#if defined(TARGET_PPC64)
374void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
111bfab3 375{
d9bce9d9 376 glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
111bfab3
FB
377 RETURN();
378}
d9bce9d9 379#endif
111bfab3 380
9a64fbe4
FB
381/*** Floating-point store ***/
382#define PPC_STF_OP(name, op) \
d9bce9d9
JM
383void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
384{ \
385 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
386 RETURN(); \
387}
388
389#if defined(TARGET_PPC64)
390#define PPC_STF_OP_64(name, op) \
391void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
9a64fbe4 392{ \
d9bce9d9 393 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
9a64fbe4
FB
394 RETURN(); \
395}
d9bce9d9 396#endif
9a64fbe4
FB
397
398PPC_STF_OP(fd, stfq);
399PPC_STF_OP(fs, stfl);
d9bce9d9
JM
400#if defined(TARGET_PPC64)
401PPC_STF_OP_64(fd, stfq);
402PPC_STF_OP_64(fs, stfl);
403#endif
9a64fbe4 404
111bfab3
FB
405static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
406{
407 union {
408 double d;
409 uint64_t u;
410 } u;
411
412 u.d = d;
413 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
414 ((u.u & 0x00FF000000000000ULL) >> 40) |
415 ((u.u & 0x0000FF0000000000ULL) >> 24) |
416 ((u.u & 0x000000FF00000000ULL) >> 8) |
417 ((u.u & 0x00000000FF000000ULL) << 8) |
418 ((u.u & 0x0000000000FF0000ULL) << 24) |
419 ((u.u & 0x000000000000FF00ULL) << 40) |
420 ((u.u & 0x00000000000000FFULL) << 56);
421 glue(stfq, MEMSUFFIX)(EA, u.d);
422}
423
424static inline void glue(stflr, MEMSUFFIX) (target_ulong EA, float f)
425{
426 union {
427 float f;
428 uint32_t u;
429 } u;
430
431 u.f = f;
432 u.u = ((u.u & 0xFF000000UL) >> 24) |
433 ((u.u & 0x00FF0000ULL) >> 8) |
434 ((u.u & 0x0000FF00UL) << 8) |
435 ((u.u & 0x000000FFULL) << 24);
436 glue(stfl, MEMSUFFIX)(EA, u.f);
437}
438
439PPC_STF_OP(fd_le, stfqr);
440PPC_STF_OP(fs_le, stflr);
d9bce9d9
JM
441#if defined(TARGET_PPC64)
442PPC_STF_OP_64(fd_le, stfqr);
443PPC_STF_OP_64(fs_le, stflr);
444#endif
111bfab3 445
9a64fbe4
FB
446/*** Floating-point load ***/
447#define PPC_LDF_OP(name, op) \
d9bce9d9
JM
448void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
449{ \
450 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
451 RETURN(); \
452}
453
454#if defined(TARGET_PPC64)
455#define PPC_LDF_OP_64(name, op) \
456void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
9a64fbe4 457{ \
d9bce9d9 458 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
9a64fbe4
FB
459 RETURN(); \
460}
d9bce9d9 461#endif
9a64fbe4
FB
462
463PPC_LDF_OP(fd, ldfq);
464PPC_LDF_OP(fs, ldfl);
d9bce9d9
JM
465#if defined(TARGET_PPC64)
466PPC_LDF_OP_64(fd, ldfq);
467PPC_LDF_OP_64(fs, ldfl);
468#endif
9a64fbe4 469
111bfab3
FB
470static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
471{
472 union {
473 double d;
474 uint64_t u;
475 } u;
476
477 u.d = glue(ldfq, MEMSUFFIX)(EA);
478 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
479 ((u.u & 0x00FF000000000000ULL) >> 40) |
480 ((u.u & 0x0000FF0000000000ULL) >> 24) |
481 ((u.u & 0x000000FF00000000ULL) >> 8) |
482 ((u.u & 0x00000000FF000000ULL) << 8) |
483 ((u.u & 0x0000000000FF0000ULL) << 24) |
484 ((u.u & 0x000000000000FF00ULL) << 40) |
485 ((u.u & 0x00000000000000FFULL) << 56);
486
487 return u.d;
488}
489
490static inline float glue(ldflr, MEMSUFFIX) (target_ulong EA)
491{
492 union {
493 float f;
494 uint32_t u;
495 } u;
496
497 u.f = glue(ldfl, MEMSUFFIX)(EA);
498 u.u = ((u.u & 0xFF000000UL) >> 24) |
499 ((u.u & 0x00FF0000ULL) >> 8) |
500 ((u.u & 0x0000FF00UL) << 8) |
501 ((u.u & 0x000000FFULL) << 24);
502
503 return u.f;
504}
505
506PPC_LDF_OP(fd_le, ldfqr);
507PPC_LDF_OP(fs_le, ldflr);
d9bce9d9
JM
508#if defined(TARGET_PPC64)
509PPC_LDF_OP_64(fd_le, ldfqr);
510PPC_LDF_OP_64(fs_le, ldflr);
511#endif
111bfab3 512
985a19d6 513/* Load and set reservation */
d9bce9d9
JM
514void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
515{
516 if (unlikely(T0 & 0x03)) {
517 do_raise_exception(EXCP_ALIGN);
518 } else {
519 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
520 regs->reserve = (uint32_t)T0;
521 }
522 RETURN();
523}
524
525#if defined(TARGET_PPC64)
526void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
527{
528 if (unlikely(T0 & 0x03)) {
529 do_raise_exception(EXCP_ALIGN);
530 } else {
531 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
532 regs->reserve = (uint64_t)T0;
533 }
534 RETURN();
535}
536
426613db
JM
537void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
538{
539 if (unlikely(T0 & 0x03)) {
540 do_raise_exception(EXCP_ALIGN);
541 } else {
542 T1 = glue(ldq, MEMSUFFIX)((uint32_t)T0);
543 regs->reserve = (uint32_t)T0;
544 }
545 RETURN();
546}
547
d9bce9d9
JM
548void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
549{
550 if (unlikely(T0 & 0x03)) {
551 do_raise_exception(EXCP_ALIGN);
552 } else {
553 T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
554 regs->reserve = (uint64_t)T0;
555 }
556 RETURN();
557}
558#endif
559
560void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
561{
562 if (unlikely(T0 & 0x03)) {
563 do_raise_exception(EXCP_ALIGN);
564 } else {
565 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
566 regs->reserve = (uint32_t)T0;
567 }
568 RETURN();
569}
570
571#if defined(TARGET_PPC64)
572void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
985a19d6 573{
76a66253 574 if (unlikely(T0 & 0x03)) {
9fddaa0c 575 do_raise_exception(EXCP_ALIGN);
985a19d6 576 } else {
d9bce9d9
JM
577 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
578 regs->reserve = (uint64_t)T0;
985a19d6
FB
579 }
580 RETURN();
581}
582
426613db
JM
583void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
584{
585 if (unlikely(T0 & 0x03)) {
586 do_raise_exception(EXCP_ALIGN);
587 } else {
588 T1 = glue(ld64r, MEMSUFFIX)((uint32_t)T0);
589 regs->reserve = (uint32_t)T0;
590 }
591 RETURN();
592}
593
d9bce9d9 594void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
111bfab3 595{
76a66253 596 if (unlikely(T0 & 0x03)) {
111bfab3
FB
597 do_raise_exception(EXCP_ALIGN);
598 } else {
d9bce9d9
JM
599 T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
600 regs->reserve = (uint64_t)T0;
111bfab3
FB
601 }
602 RETURN();
603}
d9bce9d9 604#endif
111bfab3 605
9a64fbe4 606/* Store with reservation */
d9bce9d9
JM
607void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
608{
609 if (unlikely(T0 & 0x03)) {
610 do_raise_exception(EXCP_ALIGN);
611 } else {
612 if (unlikely(regs->reserve != (uint32_t)T0)) {
613 env->crf[0] = xer_ov;
614 } else {
615 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
616 env->crf[0] = xer_ov | 0x02;
617 }
618 }
619 regs->reserve = -1;
620 RETURN();
621}
622
623#if defined(TARGET_PPC64)
624void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
625{
626 if (unlikely(T0 & 0x03)) {
627 do_raise_exception(EXCP_ALIGN);
628 } else {
629 if (unlikely(regs->reserve != (uint64_t)T0)) {
630 env->crf[0] = xer_ov;
631 } else {
632 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
633 env->crf[0] = xer_ov | 0x02;
634 }
635 }
636 regs->reserve = -1;
637 RETURN();
638}
639
426613db
JM
640void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
641{
642 if (unlikely(T0 & 0x03)) {
643 do_raise_exception(EXCP_ALIGN);
644 } else {
645 if (unlikely(regs->reserve != (uint32_t)T0)) {
646 env->crf[0] = xer_ov;
647 } else {
648 glue(stq, MEMSUFFIX)((uint32_t)T0, T1);
649 env->crf[0] = xer_ov | 0x02;
650 }
651 }
652 regs->reserve = -1;
653 RETURN();
654}
655
d9bce9d9 656void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
9a64fbe4 657{
76a66253 658 if (unlikely(T0 & 0x03)) {
9fddaa0c 659 do_raise_exception(EXCP_ALIGN);
9a64fbe4 660 } else {
d9bce9d9 661 if (unlikely(regs->reserve != (uint64_t)T0)) {
9a64fbe4
FB
662 env->crf[0] = xer_ov;
663 } else {
d9bce9d9
JM
664 glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
665 env->crf[0] = xer_ov | 0x02;
666 }
667 }
668 regs->reserve = -1;
669 RETURN();
670}
671#endif
672
673void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
674{
675 if (unlikely(T0 & 0x03)) {
676 do_raise_exception(EXCP_ALIGN);
677 } else {
678 if (unlikely(regs->reserve != (uint32_t)T0)) {
679 env->crf[0] = xer_ov;
680 } else {
681 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
9a64fbe4
FB
682 env->crf[0] = xer_ov | 0x02;
683 }
684 }
76a66253 685 regs->reserve = -1;
9a64fbe4
FB
686 RETURN();
687}
688
d9bce9d9
JM
689#if defined(TARGET_PPC64)
690void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
111bfab3 691{
76a66253 692 if (unlikely(T0 & 0x03)) {
111bfab3
FB
693 do_raise_exception(EXCP_ALIGN);
694 } else {
d9bce9d9 695 if (unlikely(regs->reserve != (uint64_t)T0)) {
111bfab3
FB
696 env->crf[0] = xer_ov;
697 } else {
d9bce9d9 698 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
111bfab3
FB
699 env->crf[0] = xer_ov | 0x02;
700 }
701 }
76a66253 702 regs->reserve = -1;
111bfab3
FB
703 RETURN();
704}
705
426613db
JM
706void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
707{
708 if (unlikely(T0 & 0x03)) {
709 do_raise_exception(EXCP_ALIGN);
710 } else {
711 if (unlikely(regs->reserve != (uint32_t)T0)) {
712 env->crf[0] = xer_ov;
713 } else {
714 glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
715 env->crf[0] = xer_ov | 0x02;
716 }
717 }
718 regs->reserve = -1;
719 RETURN();
720}
721
d9bce9d9
JM
722void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
723{
724 if (unlikely(T0 & 0x03)) {
725 do_raise_exception(EXCP_ALIGN);
726 } else {
727 if (unlikely(regs->reserve != (uint64_t)T0)) {
728 env->crf[0] = xer_ov;
729 } else {
730 glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
731 env->crf[0] = xer_ov | 0x02;
732 }
733 }
734 regs->reserve = -1;
735 RETURN();
736}
737#endif
738
739void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
740{
741 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
742 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
743 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
744 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
745 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
746 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
747 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
748 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
749#if DCACHE_LINE_SIZE == 64
750 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
751 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
752 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
753 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
754 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
755 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
756 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
757 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
758 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
759#endif
760 RETURN();
761}
762
763#if defined(TARGET_PPC64)
764void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
9a64fbe4 765{
d9bce9d9
JM
766 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
767 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
768 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
769 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
770 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
771 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
772 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
773 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
76a66253
JM
774#if DCACHE_LINE_SIZE == 64
775 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
d9bce9d9
JM
776 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
777 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
778 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
779 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
780 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
781 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
782 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
783 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
76a66253 784#endif
9a64fbe4
FB
785 RETURN();
786}
d9bce9d9 787#endif
9a64fbe4 788
36f69651
JM
789/* Instruction cache block invalidate */
790void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
791{
792 glue(do_icbi, MEMSUFFIX)();
793 RETURN();
794}
795
796#if defined(TARGET_PPC64)
797void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
798{
799 glue(do_icbi_64, MEMSUFFIX)();
800 RETURN();
801}
802#endif
803
9a64fbe4 804/* External access */
d9bce9d9
JM
805void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
806{
807 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
808 RETURN();
809}
810
811#if defined(TARGET_PPC64)
812void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
813{
814 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
815 RETURN();
816}
817#endif
818
819void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
820{
821 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
822 RETURN();
823}
824
825#if defined(TARGET_PPC64)
826void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
9a64fbe4 827{
d9bce9d9 828 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
9a64fbe4
FB
829 RETURN();
830}
d9bce9d9 831#endif
9a64fbe4 832
d9bce9d9 833void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
9a64fbe4 834{
d9bce9d9 835 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
9a64fbe4
FB
836 RETURN();
837}
838
d9bce9d9
JM
839#if defined(TARGET_PPC64)
840void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
111bfab3 841{
d9bce9d9 842 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
111bfab3
FB
843 RETURN();
844}
d9bce9d9 845#endif
111bfab3 846
d9bce9d9 847void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
111bfab3 848{
d9bce9d9 849 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
111bfab3
FB
850 RETURN();
851}
852
d9bce9d9
JM
853#if defined(TARGET_PPC64)
854void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
855{
856 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
857 RETURN();
858}
859#endif
860
76a66253
JM
861/* XXX: those micro-ops need tests ! */
862/* PowerPC 601 specific instructions (POWER bridge) */
863void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
864{
865 /* When byte count is 0, do nothing */
d9bce9d9 866 if (likely(T1 != 0)) {
76a66253
JM
867 glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
868 }
869 RETURN();
870}
871
872/* POWER2 quad load and store */
873/* XXX: TAGs are not managed */
874void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
875{
876 glue(do_POWER2_lfq, MEMSUFFIX)();
877 RETURN();
878}
879
880void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
881{
882 glue(do_POWER2_lfq_le, MEMSUFFIX)();
883 RETURN();
884}
885
886void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
887{
888 glue(do_POWER2_stfq, MEMSUFFIX)();
889 RETURN();
890}
891
892void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
893{
894 glue(do_POWER2_stfq_le, MEMSUFFIX)();
895 RETURN();
896}
897
0487d6a8
JM
898#if defined(TARGET_PPCSPE)
899/* SPE extension */
900#define _PPC_SPE_LD_OP(name, op) \
901void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
902{ \
903 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
904 RETURN(); \
905}
906
907#if defined(TARGET_PPC64)
908#define _PPC_SPE_LD_OP_64(name, op) \
909void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
910{ \
911 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
912 RETURN(); \
913}
914#define PPC_SPE_LD_OP(name, op) \
915_PPC_SPE_LD_OP(name, op); \
916_PPC_SPE_LD_OP_64(name, op)
917#else
918#define PPC_SPE_LD_OP(name, op) \
919_PPC_SPE_LD_OP(name, op)
920#endif
921
922
923#define _PPC_SPE_ST_OP(name, op) \
924void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
925{ \
926 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
927 RETURN(); \
928}
929
930#if defined(TARGET_PPC64)
931#define _PPC_SPE_ST_OP_64(name, op) \
932void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
933{ \
934 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
935 RETURN(); \
936}
937#define PPC_SPE_ST_OP(name, op) \
938_PPC_SPE_ST_OP(name, op); \
939_PPC_SPE_ST_OP_64(name, op)
940#else
941#define PPC_SPE_ST_OP(name, op) \
942_PPC_SPE_ST_OP(name, op)
943#endif
944
945#if !defined(TARGET_PPC64)
946PPC_SPE_LD_OP(dd, ldq);
947PPC_SPE_ST_OP(dd, stq);
948PPC_SPE_LD_OP(dd_le, ld64r);
949PPC_SPE_ST_OP(dd_le, st64r);
950#endif
951static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
952{
953 uint64_t ret;
954 ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
955 ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
956 return ret;
957}
958PPC_SPE_LD_OP(dw, spe_ldw);
959static inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, uint64_t data)
960{
961 glue(stl, MEMSUFFIX)(EA, data >> 32);
962 glue(stl, MEMSUFFIX)(EA + 4, data);
963}
964PPC_SPE_ST_OP(dw, spe_stdw);
965static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
966{
967 uint64_t ret;
968 ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
969 ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
970 return ret;
971}
972PPC_SPE_LD_OP(dw_le, spe_ldw_le);
973static inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
974 uint64_t data)
975{
976 glue(st32r, MEMSUFFIX)(EA, data >> 32);
977 glue(st32r, MEMSUFFIX)(EA + 4, data);
978}
979PPC_SPE_ST_OP(dw_le, spe_stdw_le);
980static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
981{
982 uint64_t ret;
983 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
984 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
985 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
986 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
987 return ret;
988}
989PPC_SPE_LD_OP(dh, spe_ldh);
990static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data)
991{
992 glue(stw, MEMSUFFIX)(EA, data >> 48);
993 glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
994 glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
995 glue(stw, MEMSUFFIX)(EA + 6, data);
996}
997PPC_SPE_ST_OP(dh, spe_stdh);
998static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
999{
1000 uint64_t ret;
1001 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1002 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
1003 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
1004 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
1005 return ret;
1006}
1007PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1008static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1009 uint64_t data)
1010{
1011 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1012 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1013 glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1014 glue(st16r, MEMSUFFIX)(EA + 6, data);
1015}
1016PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1017static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1018{
1019 uint64_t ret;
1020 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1021 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
1022 return ret;
1023}
1024PPC_SPE_LD_OP(whe, spe_lwhe);
1025static inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, uint64_t data)
1026{
1027 glue(stw, MEMSUFFIX)(EA, data >> 48);
1028 glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1029}
1030PPC_SPE_ST_OP(whe, spe_stwhe);
1031static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1032{
1033 uint64_t ret;
1034 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1035 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
1036 return ret;
1037}
1038PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1039static inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1040 uint64_t data)
1041{
1042 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1043 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1044}
1045PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1046static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1047{
1048 uint64_t ret;
1049 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
1050 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
1051 return ret;
1052}
1053PPC_SPE_LD_OP(whou, spe_lwhou);
1054static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1055{
1056 uint64_t ret;
1057 ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1058 ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1059 return ret;
1060}
1061PPC_SPE_LD_OP(whos, spe_lwhos);
1062static inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, uint64_t data)
1063{
1064 glue(stw, MEMSUFFIX)(EA, data >> 32);
1065 glue(stw, MEMSUFFIX)(EA + 2, data);
1066}
1067PPC_SPE_ST_OP(who, spe_stwho);
1068static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1069{
1070 uint64_t ret;
1071 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1072 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1073 return ret;
1074}
1075PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1076static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1077{
1078 uint64_t ret;
1079 ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1080 ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1081 return ret;
1082}
1083PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1084static inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1085 uint64_t data)
1086{
1087 glue(st16r, MEMSUFFIX)(EA, data >> 32);
1088 glue(st16r, MEMSUFFIX)(EA + 2, data);
1089}
1090PPC_SPE_ST_OP(who_le, spe_stwho_le);
1091#if !defined(TARGET_PPC64)
1092static inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, uint64_t data)
1093{
1094 glue(stl, MEMSUFFIX)(EA, data);
1095}
1096PPC_SPE_ST_OP(wwo, spe_stwwo);
1097static inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1098 uint64_t data)
1099{
1100 glue(st32r, MEMSUFFIX)(EA, data);
1101}
1102PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1103#endif
1104static inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1105{
1106 uint16_t tmp;
1107 tmp = glue(lduw, MEMSUFFIX)(EA);
1108 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1109}
1110PPC_SPE_LD_OP(h, spe_lh);
1111static inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1112{
1113 uint16_t tmp;
1114 tmp = glue(ld16r, MEMSUFFIX)(EA);
1115 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1116}
1117PPC_SPE_LD_OP(h_le, spe_lh_le);
1118static inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1119{
1120 uint32_t tmp;
1121 tmp = glue(ldl, MEMSUFFIX)(EA);
1122 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1123}
1124PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1125static inline uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1126{
1127 uint32_t tmp;
1128 tmp = glue(ld32r, MEMSUFFIX)(EA);
1129 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1130}
1131PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1132static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1133{
1134 uint64_t ret;
1135 uint16_t tmp;
1136 tmp = glue(lduw, MEMSUFFIX)(EA);
1137 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1138 tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1139 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1140 return ret;
1141}
1142PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1143static inline uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1144{
1145 uint64_t ret;
1146 uint16_t tmp;
1147 tmp = glue(ld16r, MEMSUFFIX)(EA);
1148 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1149 tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1150 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1151 return ret;
1152}
1153PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1154#endif /* defined(TARGET_PPCSPE) */
1155
9a64fbe4 1156#undef MEMSUFFIX
This page took 0.252864 seconds and 4 git commands to generate.