root / target-ppc / op_mem.h @ 64adab3f
History | View | Annotate | Download (33.1 kB)
1 |
/*
|
---|---|
2 |
* PowerPC emulation micro-operations for qemu.
|
3 |
*
|
4 |
* Copyright (c) 2003-2007 Jocelyn Mayer
|
5 |
*
|
6 |
* This library is free software; you can redistribute it and/or
|
7 |
* modify it under the terms of the GNU Lesser General Public
|
8 |
* License as published by the Free Software Foundation; either
|
9 |
* version 2 of the License, or (at your option) any later version.
|
10 |
*
|
11 |
* This library is distributed in the hope that it will be useful,
|
12 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
14 |
* Lesser General Public License for more details.
|
15 |
*
|
16 |
* You should have received a copy of the GNU Lesser General Public
|
17 |
* License along with this library; if not, write to the Free Software
|
18 |
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
19 |
*/
|
20 |
|
21 |
#include "op_mem_access.h" |
22 |
|
23 |
/*** Integer load and store multiple ***/
|
24 |
void OPPROTO glue(op_lmw, MEMSUFFIX) (void) |
25 |
{ |
26 |
glue(do_lmw, MEMSUFFIX)(PARAM1); |
27 |
RETURN(); |
28 |
} |
29 |
|
30 |
#if defined(TARGET_PPC64)
|
31 |
void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void) |
32 |
{ |
33 |
glue(do_lmw_64, MEMSUFFIX)(PARAM1); |
34 |
RETURN(); |
35 |
} |
36 |
#endif
|
37 |
|
38 |
void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void) |
39 |
{ |
40 |
glue(do_lmw_le, MEMSUFFIX)(PARAM1); |
41 |
RETURN(); |
42 |
} |
43 |
|
44 |
#if defined(TARGET_PPC64)
|
45 |
void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void) |
46 |
{ |
47 |
glue(do_lmw_le_64, MEMSUFFIX)(PARAM1); |
48 |
RETURN(); |
49 |
} |
50 |
#endif
|
51 |
|
52 |
void OPPROTO glue(op_stmw, MEMSUFFIX) (void) |
53 |
{ |
54 |
glue(do_stmw, MEMSUFFIX)(PARAM1); |
55 |
RETURN(); |
56 |
} |
57 |
|
58 |
#if defined(TARGET_PPC64)
|
59 |
void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void) |
60 |
{ |
61 |
glue(do_stmw_64, MEMSUFFIX)(PARAM1); |
62 |
RETURN(); |
63 |
} |
64 |
#endif
|
65 |
|
66 |
void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void) |
67 |
{ |
68 |
glue(do_stmw_le, MEMSUFFIX)(PARAM1); |
69 |
RETURN(); |
70 |
} |
71 |
|
72 |
#if defined(TARGET_PPC64)
|
73 |
void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void) |
74 |
{ |
75 |
glue(do_stmw_le_64, MEMSUFFIX)(PARAM1); |
76 |
RETURN(); |
77 |
} |
78 |
#endif
|
79 |
|
80 |
/*** Integer load and store strings ***/
|
81 |
void OPPROTO glue(op_lswi, MEMSUFFIX) (void) |
82 |
{ |
83 |
glue(do_lsw, MEMSUFFIX)(PARAM1); |
84 |
RETURN(); |
85 |
} |
86 |
|
87 |
#if defined(TARGET_PPC64)
|
88 |
void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void) |
89 |
{ |
90 |
glue(do_lsw_64, MEMSUFFIX)(PARAM1); |
91 |
RETURN(); |
92 |
} |
93 |
#endif
|
94 |
|
95 |
/* PPC32 specification says we must generate an exception if
|
96 |
* rA is in the range of registers to be loaded.
|
97 |
* In an other hand, IBM says this is valid, but rA won't be loaded.
|
98 |
* For now, I'll follow the spec...
|
99 |
*/
|
100 |
void OPPROTO glue(op_lswx, MEMSUFFIX) (void) |
101 |
{ |
102 |
/* Note: T1 comes from xer_bc then no cast is needed */
|
103 |
if (likely(T1 != 0)) { |
104 |
if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
|
105 |
(PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) { |
106 |
raise_exception_err(env, POWERPC_EXCP_PROGRAM, |
107 |
POWERPC_EXCP_INVAL | |
108 |
POWERPC_EXCP_INVAL_LSWX); |
109 |
} else {
|
110 |
glue(do_lsw, MEMSUFFIX)(PARAM1); |
111 |
} |
112 |
} |
113 |
RETURN(); |
114 |
} |
115 |
|
116 |
#if defined(TARGET_PPC64)
|
117 |
void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void) |
118 |
{ |
119 |
/* Note: T1 comes from xer_bc then no cast is needed */
|
120 |
if (likely(T1 != 0)) { |
121 |
if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
|
122 |
(PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) { |
123 |
raise_exception_err(env, POWERPC_EXCP_PROGRAM, |
124 |
POWERPC_EXCP_INVAL | |
125 |
POWERPC_EXCP_INVAL_LSWX); |
126 |
} else {
|
127 |
glue(do_lsw_64, MEMSUFFIX)(PARAM1); |
128 |
} |
129 |
} |
130 |
RETURN(); |
131 |
} |
132 |
#endif
|
133 |
|
134 |
void OPPROTO glue(op_stsw, MEMSUFFIX) (void) |
135 |
{ |
136 |
glue(do_stsw, MEMSUFFIX)(PARAM1); |
137 |
RETURN(); |
138 |
} |
139 |
|
140 |
#if defined(TARGET_PPC64)
|
141 |
void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void) |
142 |
{ |
143 |
glue(do_stsw_64, MEMSUFFIX)(PARAM1); |
144 |
RETURN(); |
145 |
} |
146 |
#endif
|
147 |
|
148 |
/*** Floating-point store ***/
|
149 |
#define PPC_STF_OP(name, op) \
|
150 |
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \ |
151 |
{ \ |
152 |
glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \ |
153 |
RETURN(); \ |
154 |
} |
155 |
|
156 |
#if defined(TARGET_PPC64)
|
157 |
#define PPC_STF_OP_64(name, op) \
|
158 |
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \ |
159 |
{ \ |
160 |
glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \ |
161 |
RETURN(); \ |
162 |
} |
163 |
#endif
|
164 |
|
165 |
static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, float64 d) |
166 |
{ |
167 |
glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status)); |
168 |
} |
169 |
|
170 |
static always_inline void glue(stfiw, MEMSUFFIX) (target_ulong EA, float64 d) |
171 |
{ |
172 |
CPU_DoubleU u; |
173 |
|
174 |
/* Store the low order 32 bits without any conversion */
|
175 |
u.d = d; |
176 |
glue(st32, MEMSUFFIX)(EA, u.l.lower); |
177 |
} |
178 |
|
179 |
PPC_STF_OP(fd, stfq); |
180 |
PPC_STF_OP(fs, stfs); |
181 |
PPC_STF_OP(fiw, stfiw); |
182 |
#if defined(TARGET_PPC64)
|
183 |
PPC_STF_OP_64(fd, stfq); |
184 |
PPC_STF_OP_64(fs, stfs); |
185 |
PPC_STF_OP_64(fiw, stfiw); |
186 |
#endif
|
187 |
|
188 |
static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, float64 d) |
189 |
{ |
190 |
CPU_DoubleU u; |
191 |
|
192 |
u.d = d; |
193 |
u.ll = bswap64(u.ll); |
194 |
glue(stfq, MEMSUFFIX)(EA, u.d); |
195 |
} |
196 |
|
197 |
static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, float64 d) |
198 |
{ |
199 |
CPU_FloatU u; |
200 |
|
201 |
u.f = float64_to_float32(d, &env->fp_status); |
202 |
u.l = bswap32(u.l); |
203 |
glue(stfl, MEMSUFFIX)(EA, u.f); |
204 |
} |
205 |
|
206 |
static always_inline void glue(stfiwr, MEMSUFFIX) (target_ulong EA, float64 d) |
207 |
{ |
208 |
CPU_DoubleU u; |
209 |
|
210 |
/* Store the low order 32 bits without any conversion */
|
211 |
u.d = d; |
212 |
u.l.lower = bswap32(u.l.lower); |
213 |
glue(st32, MEMSUFFIX)(EA, u.l.lower); |
214 |
} |
215 |
|
216 |
PPC_STF_OP(fd_le, stfqr); |
217 |
PPC_STF_OP(fs_le, stfsr); |
218 |
PPC_STF_OP(fiw_le, stfiwr); |
219 |
#if defined(TARGET_PPC64)
|
220 |
PPC_STF_OP_64(fd_le, stfqr); |
221 |
PPC_STF_OP_64(fs_le, stfsr); |
222 |
PPC_STF_OP_64(fiw_le, stfiwr); |
223 |
#endif
|
224 |
|
225 |
/*** Floating-point load ***/
|
226 |
#define PPC_LDF_OP(name, op) \
|
227 |
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \ |
228 |
{ \ |
229 |
FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \ |
230 |
RETURN(); \ |
231 |
} |
232 |
|
233 |
#if defined(TARGET_PPC64)
|
234 |
#define PPC_LDF_OP_64(name, op) \
|
235 |
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \ |
236 |
{ \ |
237 |
FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \ |
238 |
RETURN(); \ |
239 |
} |
240 |
#endif
|
241 |
|
242 |
static always_inline float64 glue(ldfs, MEMSUFFIX) (target_ulong EA)
|
243 |
{ |
244 |
return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
|
245 |
} |
246 |
|
247 |
PPC_LDF_OP(fd, ldfq); |
248 |
PPC_LDF_OP(fs, ldfs); |
249 |
#if defined(TARGET_PPC64)
|
250 |
PPC_LDF_OP_64(fd, ldfq); |
251 |
PPC_LDF_OP_64(fs, ldfs); |
252 |
#endif
|
253 |
|
254 |
static always_inline float64 glue(ldfqr, MEMSUFFIX) (target_ulong EA)
|
255 |
{ |
256 |
CPU_DoubleU u; |
257 |
|
258 |
u.d = glue(ldfq, MEMSUFFIX)(EA); |
259 |
u.ll = bswap64(u.ll); |
260 |
|
261 |
return u.d;
|
262 |
} |
263 |
|
264 |
static always_inline float64 glue(ldfsr, MEMSUFFIX) (target_ulong EA)
|
265 |
{ |
266 |
CPU_FloatU u; |
267 |
|
268 |
u.f = glue(ldfl, MEMSUFFIX)(EA); |
269 |
u.l = bswap32(u.l); |
270 |
|
271 |
return float32_to_float64(u.f, &env->fp_status);
|
272 |
} |
273 |
|
274 |
PPC_LDF_OP(fd_le, ldfqr); |
275 |
PPC_LDF_OP(fs_le, ldfsr); |
276 |
#if defined(TARGET_PPC64)
|
277 |
PPC_LDF_OP_64(fd_le, ldfqr); |
278 |
PPC_LDF_OP_64(fs_le, ldfsr); |
279 |
#endif
|
280 |
|
281 |
/* Load and set reservation */
|
282 |
void OPPROTO glue(op_lwarx, MEMSUFFIX) (void) |
283 |
{ |
284 |
if (unlikely(T0 & 0x03)) { |
285 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
286 |
} else {
|
287 |
T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0); |
288 |
env->reserve = (uint32_t)T0; |
289 |
} |
290 |
RETURN(); |
291 |
} |
292 |
|
293 |
#if defined(TARGET_PPC64)
|
294 |
void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void) |
295 |
{ |
296 |
if (unlikely(T0 & 0x03)) { |
297 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
298 |
} else {
|
299 |
T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0); |
300 |
env->reserve = (uint64_t)T0; |
301 |
} |
302 |
RETURN(); |
303 |
} |
304 |
|
305 |
void OPPROTO glue(op_ldarx, MEMSUFFIX) (void) |
306 |
{ |
307 |
if (unlikely(T0 & 0x03)) { |
308 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
309 |
} else {
|
310 |
T1 = glue(ldu64, MEMSUFFIX)((uint32_t)T0); |
311 |
env->reserve = (uint32_t)T0; |
312 |
} |
313 |
RETURN(); |
314 |
} |
315 |
|
316 |
void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void) |
317 |
{ |
318 |
if (unlikely(T0 & 0x03)) { |
319 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
320 |
} else {
|
321 |
T1 = glue(ldu64, MEMSUFFIX)((uint64_t)T0); |
322 |
env->reserve = (uint64_t)T0; |
323 |
} |
324 |
RETURN(); |
325 |
} |
326 |
#endif
|
327 |
|
328 |
void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void) |
329 |
{ |
330 |
if (unlikely(T0 & 0x03)) { |
331 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
332 |
} else {
|
333 |
T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0); |
334 |
env->reserve = (uint32_t)T0; |
335 |
} |
336 |
RETURN(); |
337 |
} |
338 |
|
339 |
#if defined(TARGET_PPC64)
|
340 |
void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void) |
341 |
{ |
342 |
if (unlikely(T0 & 0x03)) { |
343 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
344 |
} else {
|
345 |
T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0); |
346 |
env->reserve = (uint64_t)T0; |
347 |
} |
348 |
RETURN(); |
349 |
} |
350 |
|
351 |
void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void) |
352 |
{ |
353 |
if (unlikely(T0 & 0x03)) { |
354 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
355 |
} else {
|
356 |
T1 = glue(ldu64r, MEMSUFFIX)((uint32_t)T0); |
357 |
env->reserve = (uint32_t)T0; |
358 |
} |
359 |
RETURN(); |
360 |
} |
361 |
|
362 |
void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void) |
363 |
{ |
364 |
if (unlikely(T0 & 0x03)) { |
365 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
366 |
} else {
|
367 |
T1 = glue(ldu64r, MEMSUFFIX)((uint64_t)T0); |
368 |
env->reserve = (uint64_t)T0; |
369 |
} |
370 |
RETURN(); |
371 |
} |
372 |
#endif
|
373 |
|
374 |
/* Store with reservation */
|
375 |
void OPPROTO glue(op_stwcx, MEMSUFFIX) (void) |
376 |
{ |
377 |
if (unlikely(T0 & 0x03)) { |
378 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
379 |
} else {
|
380 |
if (unlikely(env->reserve != (uint32_t)T0)) {
|
381 |
env->crf[0] = xer_so;
|
382 |
} else {
|
383 |
glue(st32, MEMSUFFIX)((uint32_t)T0, T1); |
384 |
env->crf[0] = xer_so | 0x02; |
385 |
} |
386 |
} |
387 |
env->reserve = (target_ulong)-1ULL;
|
388 |
RETURN(); |
389 |
} |
390 |
|
391 |
#if defined(TARGET_PPC64)
|
392 |
void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void) |
393 |
{ |
394 |
if (unlikely(T0 & 0x03)) { |
395 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
396 |
} else {
|
397 |
if (unlikely(env->reserve != (uint64_t)T0)) {
|
398 |
env->crf[0] = xer_so;
|
399 |
} else {
|
400 |
glue(st32, MEMSUFFIX)((uint64_t)T0, T1); |
401 |
env->crf[0] = xer_so | 0x02; |
402 |
} |
403 |
} |
404 |
env->reserve = (target_ulong)-1ULL;
|
405 |
RETURN(); |
406 |
} |
407 |
|
408 |
void OPPROTO glue(op_stdcx, MEMSUFFIX) (void) |
409 |
{ |
410 |
if (unlikely(T0 & 0x03)) { |
411 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
412 |
} else {
|
413 |
if (unlikely(env->reserve != (uint32_t)T0)) {
|
414 |
env->crf[0] = xer_so;
|
415 |
} else {
|
416 |
glue(st64, MEMSUFFIX)((uint32_t)T0, T1); |
417 |
env->crf[0] = xer_so | 0x02; |
418 |
} |
419 |
} |
420 |
env->reserve = (target_ulong)-1ULL;
|
421 |
RETURN(); |
422 |
} |
423 |
|
424 |
void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void) |
425 |
{ |
426 |
if (unlikely(T0 & 0x03)) { |
427 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
428 |
} else {
|
429 |
if (unlikely(env->reserve != (uint64_t)T0)) {
|
430 |
env->crf[0] = xer_so;
|
431 |
} else {
|
432 |
glue(st64, MEMSUFFIX)((uint64_t)T0, T1); |
433 |
env->crf[0] = xer_so | 0x02; |
434 |
} |
435 |
} |
436 |
env->reserve = (target_ulong)-1ULL;
|
437 |
RETURN(); |
438 |
} |
439 |
#endif
|
440 |
|
441 |
void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void) |
442 |
{ |
443 |
if (unlikely(T0 & 0x03)) { |
444 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
445 |
} else {
|
446 |
if (unlikely(env->reserve != (uint32_t)T0)) {
|
447 |
env->crf[0] = xer_so;
|
448 |
} else {
|
449 |
glue(st32r, MEMSUFFIX)((uint32_t)T0, T1); |
450 |
env->crf[0] = xer_so | 0x02; |
451 |
} |
452 |
} |
453 |
env->reserve = (target_ulong)-1ULL;
|
454 |
RETURN(); |
455 |
} |
456 |
|
457 |
#if defined(TARGET_PPC64)
|
458 |
void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void) |
459 |
{ |
460 |
if (unlikely(T0 & 0x03)) { |
461 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
462 |
} else {
|
463 |
if (unlikely(env->reserve != (uint64_t)T0)) {
|
464 |
env->crf[0] = xer_so;
|
465 |
} else {
|
466 |
glue(st32r, MEMSUFFIX)((uint64_t)T0, T1); |
467 |
env->crf[0] = xer_so | 0x02; |
468 |
} |
469 |
} |
470 |
env->reserve = (target_ulong)-1ULL;
|
471 |
RETURN(); |
472 |
} |
473 |
|
474 |
void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void) |
475 |
{ |
476 |
if (unlikely(T0 & 0x03)) { |
477 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
478 |
} else {
|
479 |
if (unlikely(env->reserve != (uint32_t)T0)) {
|
480 |
env->crf[0] = xer_so;
|
481 |
} else {
|
482 |
glue(st64r, MEMSUFFIX)((uint32_t)T0, T1); |
483 |
env->crf[0] = xer_so | 0x02; |
484 |
} |
485 |
} |
486 |
env->reserve = (target_ulong)-1ULL;
|
487 |
RETURN(); |
488 |
} |
489 |
|
490 |
void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void) |
491 |
{ |
492 |
if (unlikely(T0 & 0x03)) { |
493 |
raise_exception(env, POWERPC_EXCP_ALIGN); |
494 |
} else {
|
495 |
if (unlikely(env->reserve != (uint64_t)T0)) {
|
496 |
env->crf[0] = xer_so;
|
497 |
} else {
|
498 |
glue(st64r, MEMSUFFIX)((uint64_t)T0, T1); |
499 |
env->crf[0] = xer_so | 0x02; |
500 |
} |
501 |
} |
502 |
env->reserve = (target_ulong)-1ULL;
|
503 |
RETURN(); |
504 |
} |
505 |
#endif
|
506 |
|
507 |
void OPPROTO glue(op_dcbz_l32, MEMSUFFIX) (void) |
508 |
{ |
509 |
T0 &= ~((uint32_t)31);
|
510 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0); |
511 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0); |
512 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0); |
513 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0); |
514 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0); |
515 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0); |
516 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0); |
517 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0); |
518 |
RETURN(); |
519 |
} |
520 |
|
521 |
void OPPROTO glue(op_dcbz_l64, MEMSUFFIX) (void) |
522 |
{ |
523 |
T0 &= ~((uint32_t)63);
|
524 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0); |
525 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0); |
526 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0); |
527 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0); |
528 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0); |
529 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0); |
530 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0); |
531 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0); |
532 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0); |
533 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0); |
534 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0); |
535 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0); |
536 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0); |
537 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0); |
538 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0); |
539 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0); |
540 |
RETURN(); |
541 |
} |
542 |
|
543 |
void OPPROTO glue(op_dcbz_l128, MEMSUFFIX) (void) |
544 |
{ |
545 |
T0 &= ~((uint32_t)127);
|
546 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0); |
547 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0); |
548 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0); |
549 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0); |
550 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0); |
551 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0); |
552 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0); |
553 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0); |
554 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0); |
555 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0); |
556 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0); |
557 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0); |
558 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0); |
559 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0); |
560 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0); |
561 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0); |
562 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0); |
563 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0); |
564 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0); |
565 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0); |
566 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0); |
567 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0); |
568 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0); |
569 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0); |
570 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0); |
571 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0); |
572 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0); |
573 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0); |
574 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0); |
575 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0); |
576 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0); |
577 |
glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0); |
578 |
RETURN(); |
579 |
} |
580 |
|
581 |
void OPPROTO glue(op_dcbz, MEMSUFFIX) (void) |
582 |
{ |
583 |
glue(do_dcbz, MEMSUFFIX)(); |
584 |
RETURN(); |
585 |
} |
586 |
|
587 |
#if defined(TARGET_PPC64)
|
588 |
void OPPROTO glue(op_dcbz_l32_64, MEMSUFFIX) (void) |
589 |
{ |
590 |
T0 &= ~((uint64_t)31);
|
591 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0); |
592 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0); |
593 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0); |
594 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0); |
595 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0); |
596 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0); |
597 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0); |
598 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0); |
599 |
RETURN(); |
600 |
} |
601 |
|
602 |
void OPPROTO glue(op_dcbz_l64_64, MEMSUFFIX) (void) |
603 |
{ |
604 |
T0 &= ~((uint64_t)63);
|
605 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0); |
606 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0); |
607 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0); |
608 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0); |
609 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0); |
610 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0); |
611 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0); |
612 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0); |
613 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0); |
614 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0); |
615 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0); |
616 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0); |
617 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0); |
618 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0); |
619 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0); |
620 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0); |
621 |
RETURN(); |
622 |
} |
623 |
|
624 |
void OPPROTO glue(op_dcbz_l128_64, MEMSUFFIX) (void) |
625 |
{ |
626 |
T0 &= ~((uint64_t)127);
|
627 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0); |
628 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0); |
629 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0); |
630 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0); |
631 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0); |
632 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0); |
633 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0); |
634 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0); |
635 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0); |
636 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0); |
637 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0); |
638 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0); |
639 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0); |
640 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0); |
641 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0); |
642 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0); |
643 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0); |
644 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0); |
645 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0); |
646 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0); |
647 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0); |
648 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0); |
649 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0); |
650 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0); |
651 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0); |
652 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0); |
653 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0); |
654 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0); |
655 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0); |
656 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0); |
657 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0); |
658 |
glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0); |
659 |
RETURN(); |
660 |
} |
661 |
|
662 |
void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void) |
663 |
{ |
664 |
glue(do_dcbz_64, MEMSUFFIX)(); |
665 |
RETURN(); |
666 |
} |
667 |
#endif
|
668 |
|
669 |
/* Instruction cache block invalidate */
|
670 |
void OPPROTO glue(op_icbi, MEMSUFFIX) (void) |
671 |
{ |
672 |
glue(do_icbi, MEMSUFFIX)(); |
673 |
RETURN(); |
674 |
} |
675 |
|
676 |
#if defined(TARGET_PPC64)
|
677 |
void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void) |
678 |
{ |
679 |
glue(do_icbi_64, MEMSUFFIX)(); |
680 |
RETURN(); |
681 |
} |
682 |
#endif
|
683 |
|
684 |
/* External access */
|
685 |
void OPPROTO glue(op_eciwx, MEMSUFFIX) (void) |
686 |
{ |
687 |
T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0); |
688 |
RETURN(); |
689 |
} |
690 |
|
691 |
#if defined(TARGET_PPC64)
|
692 |
void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void) |
693 |
{ |
694 |
T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0); |
695 |
RETURN(); |
696 |
} |
697 |
#endif
|
698 |
|
699 |
void OPPROTO glue(op_ecowx, MEMSUFFIX) (void) |
700 |
{ |
701 |
glue(st32, MEMSUFFIX)((uint32_t)T0, T1); |
702 |
RETURN(); |
703 |
} |
704 |
|
705 |
#if defined(TARGET_PPC64)
|
706 |
void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void) |
707 |
{ |
708 |
glue(st32, MEMSUFFIX)((uint64_t)T0, T1); |
709 |
RETURN(); |
710 |
} |
711 |
#endif
|
712 |
|
713 |
void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void) |
714 |
{ |
715 |
T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0); |
716 |
RETURN(); |
717 |
} |
718 |
|
719 |
#if defined(TARGET_PPC64)
|
720 |
void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void) |
721 |
{ |
722 |
T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0); |
723 |
RETURN(); |
724 |
} |
725 |
#endif
|
726 |
|
727 |
void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void) |
728 |
{ |
729 |
glue(st32r, MEMSUFFIX)((uint32_t)T0, T1); |
730 |
RETURN(); |
731 |
} |
732 |
|
733 |
#if defined(TARGET_PPC64)
|
734 |
void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void) |
735 |
{ |
736 |
glue(st32r, MEMSUFFIX)((uint64_t)T0, T1); |
737 |
RETURN(); |
738 |
} |
739 |
#endif
|
740 |
|
741 |
/* XXX: those micro-ops need tests ! */
|
742 |
/* PowerPC 601 specific instructions (POWER bridge) */
|
743 |
void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void) |
744 |
{ |
745 |
/* When byte count is 0, do nothing */
|
746 |
if (likely(T1 != 0)) { |
747 |
glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3); |
748 |
} |
749 |
RETURN(); |
750 |
} |
751 |
|
752 |
/* POWER2 quad load and store */
|
753 |
/* XXX: TAGs are not managed */
|
754 |
void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void) |
755 |
{ |
756 |
glue(do_POWER2_lfq, MEMSUFFIX)(); |
757 |
RETURN(); |
758 |
} |
759 |
|
760 |
void glue(op_POWER2_lfq_le, MEMSUFFIX) (void) |
761 |
{ |
762 |
glue(do_POWER2_lfq_le, MEMSUFFIX)(); |
763 |
RETURN(); |
764 |
} |
765 |
|
766 |
void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void) |
767 |
{ |
768 |
glue(do_POWER2_stfq, MEMSUFFIX)(); |
769 |
RETURN(); |
770 |
} |
771 |
|
772 |
void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void) |
773 |
{ |
774 |
glue(do_POWER2_stfq_le, MEMSUFFIX)(); |
775 |
RETURN(); |
776 |
} |
777 |
|
778 |
/* Altivec vector extension */
|
779 |
#if defined(WORDS_BIGENDIAN)
|
780 |
#define VR_DWORD0 0 |
781 |
#define VR_DWORD1 1 |
782 |
#else
|
783 |
#define VR_DWORD0 1 |
784 |
#define VR_DWORD1 0 |
785 |
#endif
|
786 |
void OPPROTO glue(op_vr_lvx, MEMSUFFIX) (void) |
787 |
{ |
788 |
AVR0.u64[VR_DWORD0] = glue(ldu64, MEMSUFFIX)((uint32_t)T0); |
789 |
AVR0.u64[VR_DWORD1] = glue(ldu64, MEMSUFFIX)((uint32_t)T0 + 8);
|
790 |
} |
791 |
|
792 |
void OPPROTO glue(op_vr_lvx_le, MEMSUFFIX) (void) |
793 |
{ |
794 |
AVR0.u64[VR_DWORD1] = glue(ldu64r, MEMSUFFIX)((uint32_t)T0); |
795 |
AVR0.u64[VR_DWORD0] = glue(ldu64r, MEMSUFFIX)((uint32_t)T0 + 8);
|
796 |
} |
797 |
|
798 |
void OPPROTO glue(op_vr_stvx, MEMSUFFIX) (void) |
799 |
{ |
800 |
glue(st64, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD0]); |
801 |
glue(st64, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD1]);
|
802 |
} |
803 |
|
804 |
void OPPROTO glue(op_vr_stvx_le, MEMSUFFIX) (void) |
805 |
{ |
806 |
glue(st64r, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD1]); |
807 |
glue(st64r, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD0]);
|
808 |
} |
809 |
|
810 |
#if defined(TARGET_PPC64)
|
811 |
void OPPROTO glue(op_vr_lvx_64, MEMSUFFIX) (void) |
812 |
{ |
813 |
AVR0.u64[VR_DWORD0] = glue(ldu64, MEMSUFFIX)((uint64_t)T0); |
814 |
AVR0.u64[VR_DWORD1] = glue(ldu64, MEMSUFFIX)((uint64_t)T0 + 8);
|
815 |
} |
816 |
|
817 |
void OPPROTO glue(op_vr_lvx_le_64, MEMSUFFIX) (void) |
818 |
{ |
819 |
AVR0.u64[VR_DWORD1] = glue(ldu64r, MEMSUFFIX)((uint64_t)T0); |
820 |
AVR0.u64[VR_DWORD0] = glue(ldu64r, MEMSUFFIX)((uint64_t)T0 + 8);
|
821 |
} |
822 |
|
823 |
void OPPROTO glue(op_vr_stvx_64, MEMSUFFIX) (void) |
824 |
{ |
825 |
glue(st64, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD0]); |
826 |
glue(st64, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD1]);
|
827 |
} |
828 |
|
829 |
void OPPROTO glue(op_vr_stvx_le_64, MEMSUFFIX) (void) |
830 |
{ |
831 |
glue(st64r, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD1]); |
832 |
glue(st64r, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD0]);
|
833 |
} |
834 |
#endif
|
835 |
#undef VR_DWORD0
|
836 |
#undef VR_DWORD1
|
837 |
|
838 |
/* SPE extension */
|
839 |
#define _PPC_SPE_LD_OP(name, op) \
|
840 |
void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \ |
841 |
{ \ |
842 |
T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \ |
843 |
RETURN(); \ |
844 |
} |
845 |
|
846 |
#if defined(TARGET_PPC64)
|
847 |
#define _PPC_SPE_LD_OP_64(name, op) \
|
848 |
void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \ |
849 |
{ \ |
850 |
T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \ |
851 |
RETURN(); \ |
852 |
} |
853 |
#define PPC_SPE_LD_OP(name, op) \
|
854 |
_PPC_SPE_LD_OP(name, op); \ |
855 |
_PPC_SPE_LD_OP_64(name, op) |
856 |
#else
|
857 |
#define PPC_SPE_LD_OP(name, op) \
|
858 |
_PPC_SPE_LD_OP(name, op) |
859 |
#endif
|
860 |
|
861 |
#define _PPC_SPE_ST_OP(name, op) \
|
862 |
void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \ |
863 |
{ \ |
864 |
glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \ |
865 |
RETURN(); \ |
866 |
} |
867 |
|
868 |
#if defined(TARGET_PPC64)
|
869 |
#define _PPC_SPE_ST_OP_64(name, op) \
|
870 |
void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \ |
871 |
{ \ |
872 |
glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \ |
873 |
RETURN(); \ |
874 |
} |
875 |
#define PPC_SPE_ST_OP(name, op) \
|
876 |
_PPC_SPE_ST_OP(name, op); \ |
877 |
_PPC_SPE_ST_OP_64(name, op) |
878 |
#else
|
879 |
#define PPC_SPE_ST_OP(name, op) \
|
880 |
_PPC_SPE_ST_OP(name, op) |
881 |
#endif
|
882 |
|
883 |
PPC_SPE_LD_OP(dd, ldu64); |
884 |
PPC_SPE_ST_OP(dd, st64); |
885 |
PPC_SPE_LD_OP(dd_le, ldu64r); |
886 |
PPC_SPE_ST_OP(dd_le, st64r); |
887 |
static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
|
888 |
{ |
889 |
uint64_t ret; |
890 |
ret = (uint64_t)glue(ldu32, MEMSUFFIX)(EA) << 32;
|
891 |
ret |= (uint64_t)glue(ldu32, MEMSUFFIX)(EA + 4);
|
892 |
return ret;
|
893 |
} |
894 |
PPC_SPE_LD_OP(dw, spe_ldw); |
895 |
static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, |
896 |
uint64_t data) |
897 |
{ |
898 |
glue(st32, MEMSUFFIX)(EA, data >> 32);
|
899 |
glue(st32, MEMSUFFIX)(EA + 4, data);
|
900 |
} |
901 |
PPC_SPE_ST_OP(dw, spe_stdw); |
902 |
static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
|
903 |
{ |
904 |
uint64_t ret; |
905 |
ret = (uint64_t)glue(ldu32r, MEMSUFFIX)(EA) << 32;
|
906 |
ret |= (uint64_t)glue(ldu32r, MEMSUFFIX)(EA + 4);
|
907 |
return ret;
|
908 |
} |
909 |
PPC_SPE_LD_OP(dw_le, spe_ldw_le); |
910 |
static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA, |
911 |
uint64_t data) |
912 |
{ |
913 |
glue(st32r, MEMSUFFIX)(EA, data >> 32);
|
914 |
glue(st32r, MEMSUFFIX)(EA + 4, data);
|
915 |
} |
916 |
PPC_SPE_ST_OP(dw_le, spe_stdw_le); |
917 |
static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
|
918 |
{ |
919 |
uint64_t ret; |
920 |
ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48;
|
921 |
ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 32; |
922 |
ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 4) << 16; |
923 |
ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 6);
|
924 |
return ret;
|
925 |
} |
926 |
PPC_SPE_LD_OP(dh, spe_ldh); |
927 |
static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, |
928 |
uint64_t data) |
929 |
{ |
930 |
glue(st16, MEMSUFFIX)(EA, data >> 48);
|
931 |
glue(st16, MEMSUFFIX)(EA + 2, data >> 32); |
932 |
glue(st16, MEMSUFFIX)(EA + 4, data >> 16); |
933 |
glue(st16, MEMSUFFIX)(EA + 6, data);
|
934 |
} |
935 |
PPC_SPE_ST_OP(dh, spe_stdh); |
936 |
static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
|
937 |
{ |
938 |
uint64_t ret; |
939 |
ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48;
|
940 |
ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 32; |
941 |
ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 4) << 16; |
942 |
ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 6);
|
943 |
return ret;
|
944 |
} |
945 |
PPC_SPE_LD_OP(dh_le, spe_ldh_le); |
946 |
static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA, |
947 |
uint64_t data) |
948 |
{ |
949 |
glue(st16r, MEMSUFFIX)(EA, data >> 48);
|
950 |
glue(st16r, MEMSUFFIX)(EA + 2, data >> 32); |
951 |
glue(st16r, MEMSUFFIX)(EA + 4, data >> 16); |
952 |
glue(st16r, MEMSUFFIX)(EA + 6, data);
|
953 |
} |
954 |
PPC_SPE_ST_OP(dh_le, spe_stdh_le); |
955 |
static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
|
956 |
{ |
957 |
uint64_t ret; |
958 |
ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48;
|
959 |
ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 16; |
960 |
return ret;
|
961 |
} |
962 |
PPC_SPE_LD_OP(whe, spe_lwhe); |
963 |
static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, |
964 |
uint64_t data) |
965 |
{ |
966 |
glue(st16, MEMSUFFIX)(EA, data >> 48);
|
967 |
glue(st16, MEMSUFFIX)(EA + 2, data >> 16); |
968 |
} |
969 |
PPC_SPE_ST_OP(whe, spe_stwhe); |
970 |
static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
|
971 |
{ |
972 |
uint64_t ret; |
973 |
ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48;
|
974 |
ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 16; |
975 |
return ret;
|
976 |
} |
977 |
PPC_SPE_LD_OP(whe_le, spe_lwhe_le); |
978 |
static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA, |
979 |
uint64_t data) |
980 |
{ |
981 |
glue(st16r, MEMSUFFIX)(EA, data >> 48);
|
982 |
glue(st16r, MEMSUFFIX)(EA + 2, data >> 16); |
983 |
} |
984 |
PPC_SPE_ST_OP(whe_le, spe_stwhe_le); |
985 |
static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
|
986 |
{ |
987 |
uint64_t ret; |
988 |
ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 32;
|
989 |
ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2);
|
990 |
return ret;
|
991 |
} |
992 |
PPC_SPE_LD_OP(whou, spe_lwhou); |
993 |
static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
|
994 |
{ |
995 |
uint64_t ret; |
996 |
ret = ((uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA))) << 32;
|
997 |
ret |= (uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA + 2));
|
998 |
return ret;
|
999 |
} |
1000 |
PPC_SPE_LD_OP(whos, spe_lwhos); |
1001 |
static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, |
1002 |
uint64_t data) |
1003 |
{ |
1004 |
glue(st16, MEMSUFFIX)(EA, data >> 32);
|
1005 |
glue(st16, MEMSUFFIX)(EA + 2, data);
|
1006 |
} |
1007 |
PPC_SPE_ST_OP(who, spe_stwho); |
1008 |
static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
|
1009 |
{ |
1010 |
uint64_t ret; |
1011 |
ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 32;
|
1012 |
ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2);
|
1013 |
return ret;
|
1014 |
} |
1015 |
PPC_SPE_LD_OP(whou_le, spe_lwhou_le); |
1016 |
static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
|
1017 |
{ |
1018 |
uint64_t ret; |
1019 |
ret = ((uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA))) << 32;
|
1020 |
ret |= (uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA + 2));
|
1021 |
return ret;
|
1022 |
} |
1023 |
PPC_SPE_LD_OP(whos_le, spe_lwhos_le); |
1024 |
static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA, |
1025 |
uint64_t data) |
1026 |
{ |
1027 |
glue(st16r, MEMSUFFIX)(EA, data >> 32);
|
1028 |
glue(st16r, MEMSUFFIX)(EA + 2, data);
|
1029 |
} |
1030 |
PPC_SPE_ST_OP(who_le, spe_stwho_le); |
1031 |
static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, |
1032 |
uint64_t data) |
1033 |
{ |
1034 |
glue(st32, MEMSUFFIX)(EA, data); |
1035 |
} |
1036 |
PPC_SPE_ST_OP(wwo, spe_stwwo); |
1037 |
static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA, |
1038 |
uint64_t data) |
1039 |
{ |
1040 |
glue(st32r, MEMSUFFIX)(EA, data); |
1041 |
} |
1042 |
PPC_SPE_ST_OP(wwo_le, spe_stwwo_le); |
1043 |
static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
|
1044 |
{ |
1045 |
uint16_t tmp; |
1046 |
tmp = glue(ldu16, MEMSUFFIX)(EA); |
1047 |
return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16); |
1048 |
} |
1049 |
PPC_SPE_LD_OP(h, spe_lh); |
1050 |
static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
|
1051 |
{ |
1052 |
uint16_t tmp; |
1053 |
tmp = glue(ldu16r, MEMSUFFIX)(EA); |
1054 |
return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16); |
1055 |
} |
1056 |
PPC_SPE_LD_OP(h_le, spe_lh_le); |
1057 |
static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
|
1058 |
{ |
1059 |
uint32_t tmp; |
1060 |
tmp = glue(ldu32, MEMSUFFIX)(EA); |
1061 |
return ((uint64_t)tmp << 32) | (uint64_t)tmp; |
1062 |
} |
1063 |
PPC_SPE_LD_OP(wwsplat, spe_lwwsplat); |
1064 |
static always_inline
|
1065 |
uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA) |
1066 |
{ |
1067 |
uint32_t tmp; |
1068 |
tmp = glue(ldu32r, MEMSUFFIX)(EA); |
1069 |
return ((uint64_t)tmp << 32) | (uint64_t)tmp; |
1070 |
} |
1071 |
PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le); |
1072 |
static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
|
1073 |
{ |
1074 |
uint64_t ret; |
1075 |
uint16_t tmp; |
1076 |
tmp = glue(ldu16, MEMSUFFIX)(EA); |
1077 |
ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32); |
1078 |
tmp = glue(ldu16, MEMSUFFIX)(EA + 2);
|
1079 |
ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
|
1080 |
return ret;
|
1081 |
} |
1082 |
PPC_SPE_LD_OP(whsplat, spe_lwhsplat); |
1083 |
static always_inline
|
1084 |
uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA) |
1085 |
{ |
1086 |
uint64_t ret; |
1087 |
uint16_t tmp; |
1088 |
tmp = glue(ldu16r, MEMSUFFIX)(EA); |
1089 |
ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32); |
1090 |
tmp = glue(ldu16r, MEMSUFFIX)(EA + 2);
|
1091 |
ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
|
1092 |
return ret;
|
1093 |
} |
1094 |
PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le); |
1095 |
|
1096 |
#undef MEMSUFFIX
|