root / target-ppc / op_mem.h @ 36f69651
History | View | Annotate | Download (22.8 kB)
1 |
/*
|
---|---|
2 |
* PowerPC emulation micro-operations for qemu.
|
3 |
*
|
4 |
* Copyright (c) 2003-2007 Jocelyn Mayer
|
5 |
*
|
6 |
* This library is free software; you can redistribute it and/or
|
7 |
* modify it under the terms of the GNU Lesser General Public
|
8 |
* License as published by the Free Software Foundation; either
|
9 |
* version 2 of the License, or (at your option) any later version.
|
10 |
*
|
11 |
* This library is distributed in the hope that it will be useful,
|
12 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
14 |
* Lesser General Public License for more details.
|
15 |
*
|
16 |
* You should have received a copy of the GNU Lesser General Public
|
17 |
* License along with this library; if not, write to the Free Software
|
18 |
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
19 |
*/
|
20 |
|
21 |
static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA) |
22 |
{ |
23 |
uint16_t tmp = glue(lduw, MEMSUFFIX)(EA); |
24 |
return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8); |
25 |
} |
26 |
|
27 |
static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA) |
28 |
{ |
29 |
int16_t tmp = glue(lduw, MEMSUFFIX)(EA); |
30 |
return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8); |
31 |
} |
32 |
|
33 |
static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA) |
34 |
{ |
35 |
uint32_t tmp = glue(ldl, MEMSUFFIX)(EA); |
36 |
return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) | |
37 |
((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24); |
38 |
} |
39 |
|
40 |
#if defined(TARGET_PPC64)
|
41 |
static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA) |
42 |
{ |
43 |
return (int32_t)glue(ldl, MEMSUFFIX)(EA);
|
44 |
} |
45 |
|
46 |
static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA) |
47 |
{ |
48 |
uint64_t tmp = glue(ldq, MEMSUFFIX)(EA); |
49 |
return ((tmp & 0xFF00000000000000ULL) >> 56) | |
50 |
((tmp & 0x00FF000000000000ULL) >> 40) | |
51 |
((tmp & 0x0000FF0000000000ULL) >> 24) | |
52 |
((tmp & 0x000000FF00000000ULL) >> 8) | |
53 |
((tmp & 0x00000000FF000000ULL) << 8) | |
54 |
((tmp & 0x0000000000FF0000ULL) << 24) | |
55 |
((tmp & 0x000000000000FF00ULL) << 40) | |
56 |
((tmp & 0x00000000000000FFULL) << 54); |
57 |
} |
58 |
|
59 |
static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA) |
60 |
{ |
61 |
uint32_t tmp = glue(ldl, MEMSUFFIX)(EA); |
62 |
return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) | |
63 |
((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24); |
64 |
} |
65 |
#endif
|
66 |
|
67 |
static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data) |
68 |
{ |
69 |
uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8); |
70 |
glue(stw, MEMSUFFIX)(EA, tmp); |
71 |
} |
72 |
|
73 |
static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data) |
74 |
{ |
75 |
uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) | |
76 |
((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24); |
77 |
glue(stl, MEMSUFFIX)(EA, tmp); |
78 |
} |
79 |
|
80 |
#if defined(TARGET_PPC64)
|
81 |
static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data) |
82 |
{ |
83 |
uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) | |
84 |
((data & 0x00FF000000000000ULL) >> 40) | |
85 |
((data & 0x0000FF0000000000ULL) >> 24) | |
86 |
((data & 0x000000FF00000000ULL) >> 8) | |
87 |
((data & 0x00000000FF000000ULL) << 8) | |
88 |
((data & 0x0000000000FF0000ULL) << 24) | |
89 |
((data & 0x000000000000FF00ULL) << 40) | |
90 |
((data & 0x00000000000000FFULL) << 56); |
91 |
glue(stq, MEMSUFFIX)(EA, tmp); |
92 |
} |
93 |
#endif
|
94 |
|
95 |
/*** Integer load ***/
|
96 |
#define PPC_LD_OP(name, op) \
|
97 |
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \ |
98 |
{ \ |
99 |
T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \ |
100 |
RETURN(); \ |
101 |
} |
102 |
|
103 |
#if defined(TARGET_PPC64)
|
104 |
#define PPC_LD_OP_64(name, op) \
|
105 |
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \ |
106 |
{ \ |
107 |
T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \ |
108 |
RETURN(); \ |
109 |
} |
110 |
#endif
|
111 |
|
112 |
#define PPC_ST_OP(name, op) \
|
113 |
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \ |
114 |
{ \ |
115 |
glue(op, MEMSUFFIX)((uint32_t)T0, T1); \ |
116 |
RETURN(); \ |
117 |
} |
118 |
|
119 |
#if defined(TARGET_PPC64)
|
120 |
#define PPC_ST_OP_64(name, op) \
|
121 |
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \ |
122 |
{ \ |
123 |
glue(op, MEMSUFFIX)((uint64_t)T0, T1); \ |
124 |
RETURN(); \ |
125 |
} |
126 |
#endif
|
127 |
|
128 |
PPC_LD_OP(bz, ldub); |
129 |
PPC_LD_OP(ha, ldsw); |
130 |
PPC_LD_OP(hz, lduw); |
131 |
PPC_LD_OP(wz, ldl); |
132 |
#if defined(TARGET_PPC64)
|
133 |
PPC_LD_OP(d, ldq); |
134 |
PPC_LD_OP(wa, ldsl); |
135 |
PPC_LD_OP_64(d, ldq); |
136 |
PPC_LD_OP_64(wa, ldsl); |
137 |
PPC_LD_OP_64(bz, ldub); |
138 |
PPC_LD_OP_64(ha, ldsw); |
139 |
PPC_LD_OP_64(hz, lduw); |
140 |
PPC_LD_OP_64(wz, ldl); |
141 |
#endif
|
142 |
|
143 |
PPC_LD_OP(ha_le, ld16rs); |
144 |
PPC_LD_OP(hz_le, ld16r); |
145 |
PPC_LD_OP(wz_le, ld32r); |
146 |
#if defined(TARGET_PPC64)
|
147 |
PPC_LD_OP(d_le, ld64r); |
148 |
PPC_LD_OP(wa_le, ld32rs); |
149 |
PPC_LD_OP_64(d_le, ld64r); |
150 |
PPC_LD_OP_64(wa_le, ld32rs); |
151 |
PPC_LD_OP_64(ha_le, ld16rs); |
152 |
PPC_LD_OP_64(hz_le, ld16r); |
153 |
PPC_LD_OP_64(wz_le, ld32r); |
154 |
#endif
|
155 |
|
156 |
/*** Integer store ***/
|
157 |
PPC_ST_OP(b, stb); |
158 |
PPC_ST_OP(h, stw); |
159 |
PPC_ST_OP(w, stl); |
160 |
#if defined(TARGET_PPC64)
|
161 |
PPC_ST_OP(d, stq); |
162 |
PPC_ST_OP_64(d, stq); |
163 |
PPC_ST_OP_64(b, stb); |
164 |
PPC_ST_OP_64(h, stw); |
165 |
PPC_ST_OP_64(w, stl); |
166 |
#endif
|
167 |
|
168 |
PPC_ST_OP(h_le, st16r); |
169 |
PPC_ST_OP(w_le, st32r); |
170 |
#if defined(TARGET_PPC64)
|
171 |
PPC_ST_OP(d_le, st64r); |
172 |
PPC_ST_OP_64(d_le, st64r); |
173 |
PPC_ST_OP_64(h_le, st16r); |
174 |
PPC_ST_OP_64(w_le, st32r); |
175 |
#endif
|
176 |
|
177 |
/*** Integer load and store with byte reverse ***/
|
178 |
PPC_LD_OP(hbr, ld16r); |
179 |
PPC_LD_OP(wbr, ld32r); |
180 |
PPC_ST_OP(hbr, st16r); |
181 |
PPC_ST_OP(wbr, st32r); |
182 |
#if defined(TARGET_PPC64)
|
183 |
PPC_LD_OP_64(hbr, ld16r); |
184 |
PPC_LD_OP_64(wbr, ld32r); |
185 |
PPC_ST_OP_64(hbr, st16r); |
186 |
PPC_ST_OP_64(wbr, st32r); |
187 |
#endif
|
188 |
|
189 |
PPC_LD_OP(hbr_le, lduw); |
190 |
PPC_LD_OP(wbr_le, ldl); |
191 |
PPC_ST_OP(hbr_le, stw); |
192 |
PPC_ST_OP(wbr_le, stl); |
193 |
#if defined(TARGET_PPC64)
|
194 |
PPC_LD_OP_64(hbr_le, lduw); |
195 |
PPC_LD_OP_64(wbr_le, ldl); |
196 |
PPC_ST_OP_64(hbr_le, stw); |
197 |
PPC_ST_OP_64(wbr_le, stl); |
198 |
#endif
|
199 |
|
200 |
/*** Integer load and store multiple ***/
|
201 |
void OPPROTO glue(op_lmw, MEMSUFFIX) (void) |
202 |
{ |
203 |
glue(do_lmw, MEMSUFFIX)(PARAM1); |
204 |
RETURN(); |
205 |
} |
206 |
|
207 |
#if defined(TARGET_PPC64)
|
208 |
void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void) |
209 |
{ |
210 |
glue(do_lmw_64, MEMSUFFIX)(PARAM1); |
211 |
RETURN(); |
212 |
} |
213 |
#endif
|
214 |
|
215 |
void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void) |
216 |
{ |
217 |
glue(do_lmw_le, MEMSUFFIX)(PARAM1); |
218 |
RETURN(); |
219 |
} |
220 |
|
221 |
#if defined(TARGET_PPC64)
|
222 |
void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void) |
223 |
{ |
224 |
glue(do_lmw_le_64, MEMSUFFIX)(PARAM1); |
225 |
RETURN(); |
226 |
} |
227 |
#endif
|
228 |
|
229 |
void OPPROTO glue(op_stmw, MEMSUFFIX) (void) |
230 |
{ |
231 |
glue(do_stmw, MEMSUFFIX)(PARAM1); |
232 |
RETURN(); |
233 |
} |
234 |
|
235 |
#if defined(TARGET_PPC64)
|
236 |
void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void) |
237 |
{ |
238 |
glue(do_stmw_64, MEMSUFFIX)(PARAM1); |
239 |
RETURN(); |
240 |
} |
241 |
#endif
|
242 |
|
243 |
void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void) |
244 |
{ |
245 |
glue(do_stmw_le, MEMSUFFIX)(PARAM1); |
246 |
RETURN(); |
247 |
} |
248 |
|
249 |
#if defined(TARGET_PPC64)
|
250 |
void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void) |
251 |
{ |
252 |
glue(do_stmw_le_64, MEMSUFFIX)(PARAM1); |
253 |
RETURN(); |
254 |
} |
255 |
#endif
|
256 |
|
257 |
/*** Integer load and store strings ***/
|
258 |
void OPPROTO glue(op_lswi, MEMSUFFIX) (void) |
259 |
{ |
260 |
glue(do_lsw, MEMSUFFIX)(PARAM1); |
261 |
RETURN(); |
262 |
} |
263 |
|
264 |
#if defined(TARGET_PPC64)
|
265 |
void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void) |
266 |
{ |
267 |
glue(do_lsw_64, MEMSUFFIX)(PARAM1); |
268 |
RETURN(); |
269 |
} |
270 |
#endif
|
271 |
|
272 |
void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void) |
273 |
{ |
274 |
glue(do_lsw_le, MEMSUFFIX)(PARAM1); |
275 |
RETURN(); |
276 |
} |
277 |
|
278 |
#if defined(TARGET_PPC64)
|
279 |
void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void) |
280 |
{ |
281 |
glue(do_lsw_le_64, MEMSUFFIX)(PARAM1); |
282 |
RETURN(); |
283 |
} |
284 |
#endif
|
285 |
|
286 |
/* PPC32 specification says we must generate an exception if
|
287 |
* rA is in the range of registers to be loaded.
|
288 |
* In an other hand, IBM says this is valid, but rA won't be loaded.
|
289 |
* For now, I'll follow the spec...
|
290 |
*/
|
291 |
void OPPROTO glue(op_lswx, MEMSUFFIX) (void) |
292 |
{ |
293 |
/* Note: T1 comes from xer_bc then no cast is needed */
|
294 |
if (likely(T1 != 0)) { |
295 |
if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
|
296 |
(PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) { |
297 |
do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX); |
298 |
} else {
|
299 |
glue(do_lsw, MEMSUFFIX)(PARAM1); |
300 |
} |
301 |
} |
302 |
RETURN(); |
303 |
} |
304 |
|
305 |
#if defined(TARGET_PPC64)
|
306 |
void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void) |
307 |
{ |
308 |
/* Note: T1 comes from xer_bc then no cast is needed */
|
309 |
if (likely(T1 != 0)) { |
310 |
if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
|
311 |
(PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) { |
312 |
do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX); |
313 |
} else {
|
314 |
glue(do_lsw_64, MEMSUFFIX)(PARAM1); |
315 |
} |
316 |
} |
317 |
RETURN(); |
318 |
} |
319 |
#endif
|
320 |
|
321 |
void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void) |
322 |
{ |
323 |
/* Note: T1 comes from xer_bc then no cast is needed */
|
324 |
if (likely(T1 != 0)) { |
325 |
if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
|
326 |
(PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) { |
327 |
do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX); |
328 |
} else {
|
329 |
glue(do_lsw_le, MEMSUFFIX)(PARAM1); |
330 |
} |
331 |
} |
332 |
RETURN(); |
333 |
} |
334 |
|
335 |
#if defined(TARGET_PPC64)
|
336 |
void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void) |
337 |
{ |
338 |
/* Note: T1 comes from xer_bc then no cast is needed */
|
339 |
if (likely(T1 != 0)) { |
340 |
if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
|
341 |
(PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) { |
342 |
do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX); |
343 |
} else {
|
344 |
glue(do_lsw_le_64, MEMSUFFIX)(PARAM1); |
345 |
} |
346 |
} |
347 |
RETURN(); |
348 |
} |
349 |
#endif
|
350 |
|
351 |
void OPPROTO glue(op_stsw, MEMSUFFIX) (void) |
352 |
{ |
353 |
glue(do_stsw, MEMSUFFIX)(PARAM1); |
354 |
RETURN(); |
355 |
} |
356 |
|
357 |
#if defined(TARGET_PPC64)
|
358 |
void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void) |
359 |
{ |
360 |
glue(do_stsw_64, MEMSUFFIX)(PARAM1); |
361 |
RETURN(); |
362 |
} |
363 |
#endif
|
364 |
|
365 |
void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void) |
366 |
{ |
367 |
glue(do_stsw_le, MEMSUFFIX)(PARAM1); |
368 |
RETURN(); |
369 |
} |
370 |
|
371 |
#if defined(TARGET_PPC64)
|
372 |
void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void) |
373 |
{ |
374 |
glue(do_stsw_le_64, MEMSUFFIX)(PARAM1); |
375 |
RETURN(); |
376 |
} |
377 |
#endif
|
378 |
|
379 |
/*** Floating-point store ***/
|
380 |
#define PPC_STF_OP(name, op) \
|
381 |
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \ |
382 |
{ \ |
383 |
glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \ |
384 |
RETURN(); \ |
385 |
} |
386 |
|
387 |
#if defined(TARGET_PPC64)
|
388 |
#define PPC_STF_OP_64(name, op) \
|
389 |
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \ |
390 |
{ \ |
391 |
glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \ |
392 |
RETURN(); \ |
393 |
} |
394 |
#endif
|
395 |
|
396 |
PPC_STF_OP(fd, stfq); |
397 |
PPC_STF_OP(fs, stfl); |
398 |
#if defined(TARGET_PPC64)
|
399 |
PPC_STF_OP_64(fd, stfq); |
400 |
PPC_STF_OP_64(fs, stfl); |
401 |
#endif
|
402 |
|
403 |
static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d) |
404 |
{ |
405 |
union {
|
406 |
double d;
|
407 |
uint64_t u; |
408 |
} u; |
409 |
|
410 |
u.d = d; |
411 |
u.u = ((u.u & 0xFF00000000000000ULL) >> 56) | |
412 |
((u.u & 0x00FF000000000000ULL) >> 40) | |
413 |
((u.u & 0x0000FF0000000000ULL) >> 24) | |
414 |
((u.u & 0x000000FF00000000ULL) >> 8) | |
415 |
((u.u & 0x00000000FF000000ULL) << 8) | |
416 |
((u.u & 0x0000000000FF0000ULL) << 24) | |
417 |
((u.u & 0x000000000000FF00ULL) << 40) | |
418 |
((u.u & 0x00000000000000FFULL) << 56); |
419 |
glue(stfq, MEMSUFFIX)(EA, u.d); |
420 |
} |
421 |
|
422 |
static inline void glue(stflr, MEMSUFFIX) (target_ulong EA, float f) |
423 |
{ |
424 |
union {
|
425 |
float f;
|
426 |
uint32_t u; |
427 |
} u; |
428 |
|
429 |
u.f = f; |
430 |
u.u = ((u.u & 0xFF000000UL) >> 24) | |
431 |
((u.u & 0x00FF0000ULL) >> 8) | |
432 |
((u.u & 0x0000FF00UL) << 8) | |
433 |
((u.u & 0x000000FFULL) << 24); |
434 |
glue(stfl, MEMSUFFIX)(EA, u.f); |
435 |
} |
436 |
|
437 |
PPC_STF_OP(fd_le, stfqr); |
438 |
PPC_STF_OP(fs_le, stflr); |
439 |
#if defined(TARGET_PPC64)
|
440 |
PPC_STF_OP_64(fd_le, stfqr); |
441 |
PPC_STF_OP_64(fs_le, stflr); |
442 |
#endif
|
443 |
|
444 |
/*** Floating-point load ***/
|
445 |
#define PPC_LDF_OP(name, op) \
|
446 |
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \ |
447 |
{ \ |
448 |
FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \ |
449 |
RETURN(); \ |
450 |
} |
451 |
|
452 |
#if defined(TARGET_PPC64)
|
453 |
#define PPC_LDF_OP_64(name, op) \
|
454 |
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \ |
455 |
{ \ |
456 |
FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \ |
457 |
RETURN(); \ |
458 |
} |
459 |
#endif
|
460 |
|
461 |
PPC_LDF_OP(fd, ldfq); |
462 |
PPC_LDF_OP(fs, ldfl); |
463 |
#if defined(TARGET_PPC64)
|
464 |
PPC_LDF_OP_64(fd, ldfq); |
465 |
PPC_LDF_OP_64(fs, ldfl); |
466 |
#endif
|
467 |
|
468 |
static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA) |
469 |
{ |
470 |
union {
|
471 |
double d;
|
472 |
uint64_t u; |
473 |
} u; |
474 |
|
475 |
u.d = glue(ldfq, MEMSUFFIX)(EA); |
476 |
u.u = ((u.u & 0xFF00000000000000ULL) >> 56) | |
477 |
((u.u & 0x00FF000000000000ULL) >> 40) | |
478 |
((u.u & 0x0000FF0000000000ULL) >> 24) | |
479 |
((u.u & 0x000000FF00000000ULL) >> 8) | |
480 |
((u.u & 0x00000000FF000000ULL) << 8) | |
481 |
((u.u & 0x0000000000FF0000ULL) << 24) | |
482 |
((u.u & 0x000000000000FF00ULL) << 40) | |
483 |
((u.u & 0x00000000000000FFULL) << 56); |
484 |
|
485 |
return u.d;
|
486 |
} |
487 |
|
488 |
static inline float glue(ldflr, MEMSUFFIX) (target_ulong EA) |
489 |
{ |
490 |
union {
|
491 |
float f;
|
492 |
uint32_t u; |
493 |
} u; |
494 |
|
495 |
u.f = glue(ldfl, MEMSUFFIX)(EA); |
496 |
u.u = ((u.u & 0xFF000000UL) >> 24) | |
497 |
((u.u & 0x00FF0000ULL) >> 8) | |
498 |
((u.u & 0x0000FF00UL) << 8) | |
499 |
((u.u & 0x000000FFULL) << 24); |
500 |
|
501 |
return u.f;
|
502 |
} |
503 |
|
504 |
PPC_LDF_OP(fd_le, ldfqr); |
505 |
PPC_LDF_OP(fs_le, ldflr); |
506 |
#if defined(TARGET_PPC64)
|
507 |
PPC_LDF_OP_64(fd_le, ldfqr); |
508 |
PPC_LDF_OP_64(fs_le, ldflr); |
509 |
#endif
|
510 |
|
511 |
/* Load and set reservation */
|
512 |
void OPPROTO glue(op_lwarx, MEMSUFFIX) (void) |
513 |
{ |
514 |
if (unlikely(T0 & 0x03)) { |
515 |
do_raise_exception(EXCP_ALIGN); |
516 |
} else {
|
517 |
T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0); |
518 |
regs->reserve = (uint32_t)T0; |
519 |
} |
520 |
RETURN(); |
521 |
} |
522 |
|
523 |
#if defined(TARGET_PPC64)
|
524 |
void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void) |
525 |
{ |
526 |
if (unlikely(T0 & 0x03)) { |
527 |
do_raise_exception(EXCP_ALIGN); |
528 |
} else {
|
529 |
T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0); |
530 |
regs->reserve = (uint64_t)T0; |
531 |
} |
532 |
RETURN(); |
533 |
} |
534 |
|
535 |
void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void) |
536 |
{ |
537 |
if (unlikely(T0 & 0x03)) { |
538 |
do_raise_exception(EXCP_ALIGN); |
539 |
} else {
|
540 |
T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0); |
541 |
regs->reserve = (uint64_t)T0; |
542 |
} |
543 |
RETURN(); |
544 |
} |
545 |
#endif
|
546 |
|
547 |
void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void) |
548 |
{ |
549 |
if (unlikely(T0 & 0x03)) { |
550 |
do_raise_exception(EXCP_ALIGN); |
551 |
} else {
|
552 |
T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0); |
553 |
regs->reserve = (uint32_t)T0; |
554 |
} |
555 |
RETURN(); |
556 |
} |
557 |
|
558 |
#if defined(TARGET_PPC64)
|
559 |
void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void) |
560 |
{ |
561 |
if (unlikely(T0 & 0x03)) { |
562 |
do_raise_exception(EXCP_ALIGN); |
563 |
} else {
|
564 |
T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0); |
565 |
regs->reserve = (uint64_t)T0; |
566 |
} |
567 |
RETURN(); |
568 |
} |
569 |
|
570 |
void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void) |
571 |
{ |
572 |
if (unlikely(T0 & 0x03)) { |
573 |
do_raise_exception(EXCP_ALIGN); |
574 |
} else {
|
575 |
T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0); |
576 |
regs->reserve = (uint64_t)T0; |
577 |
} |
578 |
RETURN(); |
579 |
} |
580 |
#endif
|
581 |
|
582 |
/* Store with reservation */
|
583 |
void OPPROTO glue(op_stwcx, MEMSUFFIX) (void) |
584 |
{ |
585 |
if (unlikely(T0 & 0x03)) { |
586 |
do_raise_exception(EXCP_ALIGN); |
587 |
} else {
|
588 |
if (unlikely(regs->reserve != (uint32_t)T0)) {
|
589 |
env->crf[0] = xer_ov;
|
590 |
} else {
|
591 |
glue(stl, MEMSUFFIX)((uint32_t)T0, T1); |
592 |
env->crf[0] = xer_ov | 0x02; |
593 |
} |
594 |
} |
595 |
regs->reserve = -1;
|
596 |
RETURN(); |
597 |
} |
598 |
|
599 |
#if defined(TARGET_PPC64)
|
600 |
void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void) |
601 |
{ |
602 |
if (unlikely(T0 & 0x03)) { |
603 |
do_raise_exception(EXCP_ALIGN); |
604 |
} else {
|
605 |
if (unlikely(regs->reserve != (uint64_t)T0)) {
|
606 |
env->crf[0] = xer_ov;
|
607 |
} else {
|
608 |
glue(stl, MEMSUFFIX)((uint64_t)T0, T1); |
609 |
env->crf[0] = xer_ov | 0x02; |
610 |
} |
611 |
} |
612 |
regs->reserve = -1;
|
613 |
RETURN(); |
614 |
} |
615 |
|
616 |
void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void) |
617 |
{ |
618 |
if (unlikely(T0 & 0x03)) { |
619 |
do_raise_exception(EXCP_ALIGN); |
620 |
} else {
|
621 |
if (unlikely(regs->reserve != (uint64_t)T0)) {
|
622 |
env->crf[0] = xer_ov;
|
623 |
} else {
|
624 |
glue(stq, MEMSUFFIX)((uint64_t)T0, T1); |
625 |
env->crf[0] = xer_ov | 0x02; |
626 |
} |
627 |
} |
628 |
regs->reserve = -1;
|
629 |
RETURN(); |
630 |
} |
631 |
#endif
|
632 |
|
633 |
void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void) |
634 |
{ |
635 |
if (unlikely(T0 & 0x03)) { |
636 |
do_raise_exception(EXCP_ALIGN); |
637 |
} else {
|
638 |
if (unlikely(regs->reserve != (uint32_t)T0)) {
|
639 |
env->crf[0] = xer_ov;
|
640 |
} else {
|
641 |
glue(st32r, MEMSUFFIX)((uint32_t)T0, T1); |
642 |
env->crf[0] = xer_ov | 0x02; |
643 |
} |
644 |
} |
645 |
regs->reserve = -1;
|
646 |
RETURN(); |
647 |
} |
648 |
|
649 |
#if defined(TARGET_PPC64)
|
650 |
void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void) |
651 |
{ |
652 |
if (unlikely(T0 & 0x03)) { |
653 |
do_raise_exception(EXCP_ALIGN); |
654 |
} else {
|
655 |
if (unlikely(regs->reserve != (uint64_t)T0)) {
|
656 |
env->crf[0] = xer_ov;
|
657 |
} else {
|
658 |
glue(st32r, MEMSUFFIX)((uint64_t)T0, T1); |
659 |
env->crf[0] = xer_ov | 0x02; |
660 |
} |
661 |
} |
662 |
regs->reserve = -1;
|
663 |
RETURN(); |
664 |
} |
665 |
|
666 |
void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void) |
667 |
{ |
668 |
if (unlikely(T0 & 0x03)) { |
669 |
do_raise_exception(EXCP_ALIGN); |
670 |
} else {
|
671 |
if (unlikely(regs->reserve != (uint64_t)T0)) {
|
672 |
env->crf[0] = xer_ov;
|
673 |
} else {
|
674 |
glue(st64r, MEMSUFFIX)((uint64_t)T0, T1); |
675 |
env->crf[0] = xer_ov | 0x02; |
676 |
} |
677 |
} |
678 |
regs->reserve = -1;
|
679 |
RETURN(); |
680 |
} |
681 |
#endif
|
682 |
|
683 |
void OPPROTO glue(op_dcbz, MEMSUFFIX) (void) |
684 |
{ |
685 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0); |
686 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0); |
687 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0); |
688 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0); |
689 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0); |
690 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0); |
691 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0); |
692 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0); |
693 |
#if DCACHE_LINE_SIZE == 64 |
694 |
/* XXX: cache line size should be 64 for POWER & PowerPC 601 */
|
695 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0); |
696 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0); |
697 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0); |
698 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0); |
699 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0); |
700 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0); |
701 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0); |
702 |
glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0); |
703 |
#endif
|
704 |
RETURN(); |
705 |
} |
706 |
|
707 |
#if defined(TARGET_PPC64)
|
708 |
void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void) |
709 |
{ |
710 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0); |
711 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0); |
712 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0); |
713 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0); |
714 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0); |
715 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0); |
716 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0); |
717 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0); |
718 |
#if DCACHE_LINE_SIZE == 64 |
719 |
/* XXX: cache line size should be 64 for POWER & PowerPC 601 */
|
720 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0); |
721 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0); |
722 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0); |
723 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0); |
724 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0); |
725 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0); |
726 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0); |
727 |
glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0); |
728 |
#endif
|
729 |
RETURN(); |
730 |
} |
731 |
#endif
|
732 |
|
733 |
/* Instruction cache block invalidate */
|
734 |
void OPPROTO glue(op_icbi, MEMSUFFIX) (void) |
735 |
{ |
736 |
glue(do_icbi, MEMSUFFIX)(); |
737 |
RETURN(); |
738 |
} |
739 |
|
740 |
#if defined(TARGET_PPC64)
|
741 |
void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void) |
742 |
{ |
743 |
glue(do_icbi_64, MEMSUFFIX)(); |
744 |
RETURN(); |
745 |
} |
746 |
#endif
|
747 |
|
748 |
/* External access */
|
749 |
void OPPROTO glue(op_eciwx, MEMSUFFIX) (void) |
750 |
{ |
751 |
T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0); |
752 |
RETURN(); |
753 |
} |
754 |
|
755 |
#if defined(TARGET_PPC64)
|
756 |
void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void) |
757 |
{ |
758 |
T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0); |
759 |
RETURN(); |
760 |
} |
761 |
#endif
|
762 |
|
763 |
void OPPROTO glue(op_ecowx, MEMSUFFIX) (void) |
764 |
{ |
765 |
glue(stl, MEMSUFFIX)((uint32_t)T0, T1); |
766 |
RETURN(); |
767 |
} |
768 |
|
769 |
#if defined(TARGET_PPC64)
|
770 |
void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void) |
771 |
{ |
772 |
glue(stl, MEMSUFFIX)((uint64_t)T0, T1); |
773 |
RETURN(); |
774 |
} |
775 |
#endif
|
776 |
|
777 |
void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void) |
778 |
{ |
779 |
T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0); |
780 |
RETURN(); |
781 |
} |
782 |
|
783 |
#if defined(TARGET_PPC64)
|
784 |
void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void) |
785 |
{ |
786 |
T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0); |
787 |
RETURN(); |
788 |
} |
789 |
#endif
|
790 |
|
791 |
void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void) |
792 |
{ |
793 |
glue(st32r, MEMSUFFIX)((uint32_t)T0, T1); |
794 |
RETURN(); |
795 |
} |
796 |
|
797 |
#if defined(TARGET_PPC64)
|
798 |
void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void) |
799 |
{ |
800 |
glue(st32r, MEMSUFFIX)((uint64_t)T0, T1); |
801 |
RETURN(); |
802 |
} |
803 |
#endif
|
804 |
|
805 |
/* XXX: those micro-ops need tests ! */
|
806 |
/* PowerPC 601 specific instructions (POWER bridge) */
|
807 |
void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void) |
808 |
{ |
809 |
/* When byte count is 0, do nothing */
|
810 |
if (likely(T1 != 0)) { |
811 |
glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3); |
812 |
} |
813 |
RETURN(); |
814 |
} |
815 |
|
816 |
/* POWER2 quad load and store */
|
817 |
/* XXX: TAGs are not managed */
|
818 |
void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void) |
819 |
{ |
820 |
glue(do_POWER2_lfq, MEMSUFFIX)(); |
821 |
RETURN(); |
822 |
} |
823 |
|
824 |
void glue(op_POWER2_lfq_le, MEMSUFFIX) (void) |
825 |
{ |
826 |
glue(do_POWER2_lfq_le, MEMSUFFIX)(); |
827 |
RETURN(); |
828 |
} |
829 |
|
830 |
void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void) |
831 |
{ |
832 |
glue(do_POWER2_stfq, MEMSUFFIX)(); |
833 |
RETURN(); |
834 |
} |
835 |
|
836 |
void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void) |
837 |
{ |
838 |
glue(do_POWER2_stfq_le, MEMSUFFIX)(); |
839 |
RETURN(); |
840 |
} |
841 |
|
842 |
#undef MEMSUFFIX
|