Statistics
| Branch: | Revision:

root / target-ppc / op_mem.h @ a73666f6

History | View | Annotate | Download (41.8 kB)

1
/*
2
 *  PowerPC emulation micro-operations for qemu.
3
 *
4
 *  Copyright (c) 2003-2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20

    
21
static always_inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
22
{
23
    uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24
    return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
25
}
26

    
27
static always_inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
28
{
29
    int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30
    return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
31
}
32

    
33
static always_inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
34
{
35
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36
    return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
38
}
39

    
40
#if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
41
static always_inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
42
{
43
    uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44
    return ((tmp & 0xFF00000000000000ULL) >> 56) |
45
        ((tmp & 0x00FF000000000000ULL) >> 40) |
46
        ((tmp & 0x0000FF0000000000ULL) >> 24) |
47
        ((tmp & 0x000000FF00000000ULL) >> 8) |
48
        ((tmp & 0x00000000FF000000ULL) << 8) |
49
        ((tmp & 0x0000000000FF0000ULL) << 24) |
50
        ((tmp & 0x000000000000FF00ULL) << 40) |
51
        ((tmp & 0x00000000000000FFULL) << 54);
52
}
53
#endif
54

    
55
#if defined(TARGET_PPC64)
56
static always_inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
57
{
58
    return (int32_t)glue(ldl, MEMSUFFIX)(EA);
59
}
60

    
61
static always_inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
62
{
63
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64
    return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
65
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
66
}
67
#endif
68

    
69
static always_inline void glue(st16r, MEMSUFFIX) (target_ulong EA,
70
                                                  uint16_t data)
71
{
72
    uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
73
    glue(stw, MEMSUFFIX)(EA, tmp);
74
}
75

    
76
static always_inline void glue(st32r, MEMSUFFIX) (target_ulong EA,
77
                                                  uint32_t data)
78
{
79
    uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
80
        ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
81
    glue(stl, MEMSUFFIX)(EA, tmp);
82
}
83

    
84
#if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
85
static always_inline void glue(st64r, MEMSUFFIX) (target_ulong EA,
86
                                                  uint64_t data)
87
{
88
    uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
89
        ((data & 0x00FF000000000000ULL) >> 40) |
90
        ((data & 0x0000FF0000000000ULL) >> 24) |
91
        ((data & 0x000000FF00000000ULL) >> 8) |
92
        ((data & 0x00000000FF000000ULL) << 8) |
93
        ((data & 0x0000000000FF0000ULL) << 24) |
94
        ((data & 0x000000000000FF00ULL) << 40) |
95
        ((data & 0x00000000000000FFULL) << 56);
96
    glue(stq, MEMSUFFIX)(EA, tmp);
97
}
98
#endif
99

    
100
/***                             Integer load                              ***/
101
#define PPC_LD_OP(name, op)                                                   \
102
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
103
{                                                                             \
104
    T1 = glue(op, MEMSUFFIX)((uint32_t)T0);                                   \
105
    RETURN();                                                                 \
106
}
107

    
108
#if defined(TARGET_PPC64)
109
#define PPC_LD_OP_64(name, op)                                                \
110
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
111
{                                                                             \
112
    T1 = glue(op, MEMSUFFIX)((uint64_t)T0);                                   \
113
    RETURN();                                                                 \
114
}
115
#endif
116

    
117
#define PPC_ST_OP(name, op)                                                   \
118
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
119
{                                                                             \
120
    glue(op, MEMSUFFIX)((uint32_t)T0, T1);                                    \
121
    RETURN();                                                                 \
122
}
123

    
124
#if defined(TARGET_PPC64)
125
#define PPC_ST_OP_64(name, op)                                                \
126
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
127
{                                                                             \
128
    glue(op, MEMSUFFIX)((uint64_t)T0, T1);                                    \
129
    RETURN();                                                                 \
130
}
131
#endif
132

    
133
PPC_LD_OP(bz, ldub);
134
PPC_LD_OP(ha, ldsw);
135
PPC_LD_OP(hz, lduw);
136
PPC_LD_OP(wz, ldl);
137
#if defined(TARGET_PPC64)
138
PPC_LD_OP(d, ldq);
139
PPC_LD_OP(wa, ldsl);
140
PPC_LD_OP_64(d, ldq);
141
PPC_LD_OP_64(wa, ldsl);
142
PPC_LD_OP_64(bz, ldub);
143
PPC_LD_OP_64(ha, ldsw);
144
PPC_LD_OP_64(hz, lduw);
145
PPC_LD_OP_64(wz, ldl);
146
#endif
147

    
148
PPC_LD_OP(ha_le, ld16rs);
149
PPC_LD_OP(hz_le, ld16r);
150
PPC_LD_OP(wz_le, ld32r);
151
#if defined(TARGET_PPC64)
152
PPC_LD_OP(d_le, ld64r);
153
PPC_LD_OP(wa_le, ld32rs);
154
PPC_LD_OP_64(d_le, ld64r);
155
PPC_LD_OP_64(wa_le, ld32rs);
156
PPC_LD_OP_64(ha_le, ld16rs);
157
PPC_LD_OP_64(hz_le, ld16r);
158
PPC_LD_OP_64(wz_le, ld32r);
159
#endif
160

    
161
/***                              Integer store                            ***/
162
PPC_ST_OP(b, stb);
163
PPC_ST_OP(h, stw);
164
PPC_ST_OP(w, stl);
165
#if defined(TARGET_PPC64)
166
PPC_ST_OP(d, stq);
167
PPC_ST_OP_64(d, stq);
168
PPC_ST_OP_64(b, stb);
169
PPC_ST_OP_64(h, stw);
170
PPC_ST_OP_64(w, stl);
171
#endif
172

    
173
PPC_ST_OP(h_le, st16r);
174
PPC_ST_OP(w_le, st32r);
175
#if defined(TARGET_PPC64)
176
PPC_ST_OP(d_le, st64r);
177
PPC_ST_OP_64(d_le, st64r);
178
PPC_ST_OP_64(h_le, st16r);
179
PPC_ST_OP_64(w_le, st32r);
180
#endif
181

    
182
/***                Integer load and store with byte reverse               ***/
183
PPC_LD_OP(hbr, ld16r);
184
PPC_LD_OP(wbr, ld32r);
185
PPC_ST_OP(hbr, st16r);
186
PPC_ST_OP(wbr, st32r);
187
#if defined(TARGET_PPC64)
188
PPC_LD_OP_64(hbr, ld16r);
189
PPC_LD_OP_64(wbr, ld32r);
190
PPC_ST_OP_64(hbr, st16r);
191
PPC_ST_OP_64(wbr, st32r);
192
#endif
193

    
194
PPC_LD_OP(hbr_le, lduw);
195
PPC_LD_OP(wbr_le, ldl);
196
PPC_ST_OP(hbr_le, stw);
197
PPC_ST_OP(wbr_le, stl);
198
#if defined(TARGET_PPC64)
199
PPC_LD_OP_64(hbr_le, lduw);
200
PPC_LD_OP_64(wbr_le, ldl);
201
PPC_ST_OP_64(hbr_le, stw);
202
PPC_ST_OP_64(wbr_le, stl);
203
#endif
204

    
205
/***                    Integer load and store multiple                    ***/
206
void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
207
{
208
    glue(do_lmw, MEMSUFFIX)(PARAM1);
209
    RETURN();
210
}
211

    
212
#if defined(TARGET_PPC64)
213
void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
214
{
215
    glue(do_lmw_64, MEMSUFFIX)(PARAM1);
216
    RETURN();
217
}
218
#endif
219

    
220
void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
221
{
222
    glue(do_lmw_le, MEMSUFFIX)(PARAM1);
223
    RETURN();
224
}
225

    
226
#if defined(TARGET_PPC64)
227
void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
228
{
229
    glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
230
    RETURN();
231
}
232
#endif
233

    
234
void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
235
{
236
    glue(do_stmw, MEMSUFFIX)(PARAM1);
237
    RETURN();
238
}
239

    
240
#if defined(TARGET_PPC64)
241
void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
242
{
243
    glue(do_stmw_64, MEMSUFFIX)(PARAM1);
244
    RETURN();
245
}
246
#endif
247

    
248
void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
249
{
250
    glue(do_stmw_le, MEMSUFFIX)(PARAM1);
251
    RETURN();
252
}
253

    
254
#if defined(TARGET_PPC64)
255
void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
256
{
257
    glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
258
    RETURN();
259
}
260
#endif
261

    
262
/***                    Integer load and store strings                     ***/
263
void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
264
{
265
    glue(do_lsw, MEMSUFFIX)(PARAM1);
266
    RETURN();
267
}
268

    
269
#if defined(TARGET_PPC64)
270
void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
271
{
272
    glue(do_lsw_64, MEMSUFFIX)(PARAM1);
273
    RETURN();
274
}
275
#endif
276

    
277
void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
278
{
279
    glue(do_lsw_le, MEMSUFFIX)(PARAM1);
280
    RETURN();
281
}
282

    
283
#if defined(TARGET_PPC64)
284
void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
285
{
286
    glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
287
    RETURN();
288
}
289
#endif
290

    
291
/* PPC32 specification says we must generate an exception if
292
 * rA is in the range of registers to be loaded.
293
 * In an other hand, IBM says this is valid, but rA won't be loaded.
294
 * For now, I'll follow the spec...
295
 */
296
void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
297
{
298
    /* Note: T1 comes from xer_bc then no cast is needed */
299
    if (likely(T1 != 0)) {
300
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
301
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
302
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
303
                                   POWERPC_EXCP_INVAL |
304
                                   POWERPC_EXCP_INVAL_LSWX);
305
        } else {
306
            glue(do_lsw, MEMSUFFIX)(PARAM1);
307
        }
308
    }
309
    RETURN();
310
}
311

    
312
#if defined(TARGET_PPC64)
313
void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
314
{
315
    /* Note: T1 comes from xer_bc then no cast is needed */
316
    if (likely(T1 != 0)) {
317
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
318
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
319
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
320
                                   POWERPC_EXCP_INVAL |
321
                                   POWERPC_EXCP_INVAL_LSWX);
322
        } else {
323
            glue(do_lsw_64, MEMSUFFIX)(PARAM1);
324
        }
325
    }
326
    RETURN();
327
}
328
#endif
329

    
330
void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
331
{
332
    /* Note: T1 comes from xer_bc then no cast is needed */
333
    if (likely(T1 != 0)) {
334
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
335
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
336
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
337
                                   POWERPC_EXCP_INVAL |
338
                                   POWERPC_EXCP_INVAL_LSWX);
339
        } else {
340
            glue(do_lsw_le, MEMSUFFIX)(PARAM1);
341
        }
342
    }
343
    RETURN();
344
}
345

    
346
#if defined(TARGET_PPC64)
347
void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
348
{
349
    /* Note: T1 comes from xer_bc then no cast is needed */
350
    if (likely(T1 != 0)) {
351
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
352
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
353
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
354
                                   POWERPC_EXCP_INVAL |
355
                                   POWERPC_EXCP_INVAL_LSWX);
356
        } else {
357
            glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
358
        }
359
    }
360
    RETURN();
361
}
362
#endif
363

    
364
void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
365
{
366
    glue(do_stsw, MEMSUFFIX)(PARAM1);
367
    RETURN();
368
}
369

    
370
#if defined(TARGET_PPC64)
371
void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
372
{
373
    glue(do_stsw_64, MEMSUFFIX)(PARAM1);
374
    RETURN();
375
}
376
#endif
377

    
378
void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
379
{
380
    glue(do_stsw_le, MEMSUFFIX)(PARAM1);
381
    RETURN();
382
}
383

    
384
#if defined(TARGET_PPC64)
385
void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
386
{
387
    glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
388
    RETURN();
389
}
390
#endif
391

    
392
/***                         Floating-point store                          ***/
393
#define PPC_STF_OP(name, op)                                                  \
394
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
395
{                                                                             \
396
    glue(op, MEMSUFFIX)((uint32_t)T0, FT0);                                   \
397
    RETURN();                                                                 \
398
}
399

    
400
#if defined(TARGET_PPC64)
401
#define PPC_STF_OP_64(name, op)                                               \
402
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
403
{                                                                             \
404
    glue(op, MEMSUFFIX)((uint64_t)T0, FT0);                                   \
405
    RETURN();                                                                 \
406
}
407
#endif
408

    
409
static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
410
{
411
    glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
412
}
413

    
414
#if defined(WORDS_BIGENDIAN)
415
#define WORD0 0
416
#define WORD1 1
417
#else
418
#define WORD0 1
419
#define WORD1 0
420
#endif
421
static always_inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d)
422
{
423
    union {
424
        double d;
425
        uint32_t u[2];
426
    } u;
427

    
428
    /* Store the low order 32 bits without any conversion */
429
    u.d = d;
430
    glue(stl, MEMSUFFIX)(EA, u.u[WORD0]);
431
}
432
#undef WORD0
433
#undef WORD1
434

    
435
PPC_STF_OP(fd, stfq);
436
PPC_STF_OP(fs, stfs);
437
PPC_STF_OP(fiwx, stfiwx);
438
#if defined(TARGET_PPC64)
439
PPC_STF_OP_64(fd, stfq);
440
PPC_STF_OP_64(fs, stfs);
441
PPC_STF_OP_64(fiwx, stfiwx);
442
#endif
443

    
444
static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
445
{
446
    union {
447
        double d;
448
        uint64_t u;
449
    } u;
450

    
451
    u.d = d;
452
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
453
        ((u.u & 0x00FF000000000000ULL) >> 40) |
454
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
455
        ((u.u & 0x000000FF00000000ULL) >> 8) |
456
        ((u.u & 0x00000000FF000000ULL) << 8) |
457
        ((u.u & 0x0000000000FF0000ULL) << 24) |
458
        ((u.u & 0x000000000000FF00ULL) << 40) |
459
        ((u.u & 0x00000000000000FFULL) << 56);
460
    glue(stfq, MEMSUFFIX)(EA, u.d);
461
}
462

    
463
static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
464
{
465
    union {
466
        float f;
467
        uint32_t u;
468
    } u;
469

    
470
    u.f = float64_to_float32(d, &env->fp_status);
471
    u.u = ((u.u & 0xFF000000UL) >> 24) |
472
        ((u.u & 0x00FF0000ULL) >> 8) |
473
        ((u.u & 0x0000FF00UL) << 8) |
474
        ((u.u & 0x000000FFULL) << 24);
475
    glue(stfl, MEMSUFFIX)(EA, u.f);
476
}
477

    
478
static always_inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d)
479
{
480
    union {
481
        double d;
482
        uint64_t u;
483
    } u;
484

    
485
    /* Store the low order 32 bits without any conversion */
486
    u.d = d;
487
    u.u = ((u.u & 0xFF000000UL) >> 24) |
488
        ((u.u & 0x00FF0000ULL) >> 8) |
489
        ((u.u & 0x0000FF00UL) << 8) |
490
        ((u.u & 0x000000FFULL) << 24);
491
    glue(stl, MEMSUFFIX)(EA, u.u);
492
}
493

    
494
PPC_STF_OP(fd_le, stfqr);
495
PPC_STF_OP(fs_le, stfsr);
496
PPC_STF_OP(fiwx_le, stfiwxr);
497
#if defined(TARGET_PPC64)
498
PPC_STF_OP_64(fd_le, stfqr);
499
PPC_STF_OP_64(fs_le, stfsr);
500
PPC_STF_OP_64(fiwx_le, stfiwxr);
501
#endif
502

    
503
/***                         Floating-point load                           ***/
504
#define PPC_LDF_OP(name, op)                                                  \
505
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
506
{                                                                             \
507
    FT0 = glue(op, MEMSUFFIX)((uint32_t)T0);                                  \
508
    RETURN();                                                                 \
509
}
510

    
511
#if defined(TARGET_PPC64)
512
#define PPC_LDF_OP_64(name, op)                                               \
513
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
514
{                                                                             \
515
    FT0 = glue(op, MEMSUFFIX)((uint64_t)T0);                                  \
516
    RETURN();                                                                 \
517
}
518
#endif
519

    
520
static always_inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
521
{
522
    return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
523
}
524

    
525
PPC_LDF_OP(fd, ldfq);
526
PPC_LDF_OP(fs, ldfs);
527
#if defined(TARGET_PPC64)
528
PPC_LDF_OP_64(fd, ldfq);
529
PPC_LDF_OP_64(fs, ldfs);
530
#endif
531

    
532
static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
533
{
534
    union {
535
        double d;
536
        uint64_t u;
537
    } u;
538

    
539
    u.d = glue(ldfq, MEMSUFFIX)(EA);
540
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
541
        ((u.u & 0x00FF000000000000ULL) >> 40) |
542
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
543
        ((u.u & 0x000000FF00000000ULL) >> 8) |
544
        ((u.u & 0x00000000FF000000ULL) << 8) |
545
        ((u.u & 0x0000000000FF0000ULL) << 24) |
546
        ((u.u & 0x000000000000FF00ULL) << 40) |
547
        ((u.u & 0x00000000000000FFULL) << 56);
548

    
549
    return u.d;
550
}
551

    
552
static always_inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
553
{
554
    union {
555
        float f;
556
        uint32_t u;
557
    } u;
558

    
559
    u.f = glue(ldfl, MEMSUFFIX)(EA);
560
    u.u = ((u.u & 0xFF000000UL) >> 24) |
561
        ((u.u & 0x00FF0000ULL) >> 8) |
562
        ((u.u & 0x0000FF00UL) << 8) |
563
        ((u.u & 0x000000FFULL) << 24);
564

    
565
    return float32_to_float64(u.f, &env->fp_status);
566
}
567

    
568
PPC_LDF_OP(fd_le, ldfqr);
569
PPC_LDF_OP(fs_le, ldfsr);
570
#if defined(TARGET_PPC64)
571
PPC_LDF_OP_64(fd_le, ldfqr);
572
PPC_LDF_OP_64(fs_le, ldfsr);
573
#endif
574

    
575
/* Load and set reservation */
576
void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
577
{
578
    if (unlikely(T0 & 0x03)) {
579
        do_raise_exception(POWERPC_EXCP_ALIGN);
580
    } else {
581
        T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
582
        env->reserve = (uint32_t)T0;
583
    }
584
    RETURN();
585
}
586

    
587
#if defined(TARGET_PPC64)
588
void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
589
{
590
    if (unlikely(T0 & 0x03)) {
591
        do_raise_exception(POWERPC_EXCP_ALIGN);
592
    } else {
593
        T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
594
        env->reserve = (uint64_t)T0;
595
    }
596
    RETURN();
597
}
598

    
599
void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
600
{
601
    if (unlikely(T0 & 0x03)) {
602
        do_raise_exception(POWERPC_EXCP_ALIGN);
603
    } else {
604
        T1 = glue(ldq, MEMSUFFIX)((uint32_t)T0);
605
        env->reserve = (uint32_t)T0;
606
    }
607
    RETURN();
608
}
609

    
610
void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
611
{
612
    if (unlikely(T0 & 0x03)) {
613
        do_raise_exception(POWERPC_EXCP_ALIGN);
614
    } else {
615
        T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
616
        env->reserve = (uint64_t)T0;
617
    }
618
    RETURN();
619
}
620
#endif
621

    
622
void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
623
{
624
    if (unlikely(T0 & 0x03)) {
625
        do_raise_exception(POWERPC_EXCP_ALIGN);
626
    } else {
627
        T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
628
        env->reserve = (uint32_t)T0;
629
    }
630
    RETURN();
631
}
632

    
633
#if defined(TARGET_PPC64)
634
void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
635
{
636
    if (unlikely(T0 & 0x03)) {
637
        do_raise_exception(POWERPC_EXCP_ALIGN);
638
    } else {
639
        T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
640
        env->reserve = (uint64_t)T0;
641
    }
642
    RETURN();
643
}
644

    
645
void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
646
{
647
    if (unlikely(T0 & 0x03)) {
648
        do_raise_exception(POWERPC_EXCP_ALIGN);
649
    } else {
650
        T1 = glue(ld64r, MEMSUFFIX)((uint32_t)T0);
651
        env->reserve = (uint32_t)T0;
652
    }
653
    RETURN();
654
}
655

    
656
void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
657
{
658
    if (unlikely(T0 & 0x03)) {
659
        do_raise_exception(POWERPC_EXCP_ALIGN);
660
    } else {
661
        T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
662
        env->reserve = (uint64_t)T0;
663
    }
664
    RETURN();
665
}
666
#endif
667

    
668
/* Store with reservation */
669
void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
670
{
671
    if (unlikely(T0 & 0x03)) {
672
        do_raise_exception(POWERPC_EXCP_ALIGN);
673
    } else {
674
        if (unlikely(env->reserve != (uint32_t)T0)) {
675
            env->crf[0] = xer_so;
676
        } else {
677
            glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
678
            env->crf[0] = xer_so | 0x02;
679
        }
680
    }
681
    env->reserve = (target_ulong)-1ULL;
682
    RETURN();
683
}
684

    
685
#if defined(TARGET_PPC64)
686
void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
687
{
688
    if (unlikely(T0 & 0x03)) {
689
        do_raise_exception(POWERPC_EXCP_ALIGN);
690
    } else {
691
        if (unlikely(env->reserve != (uint64_t)T0)) {
692
            env->crf[0] = xer_so;
693
        } else {
694
            glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
695
            env->crf[0] = xer_so | 0x02;
696
        }
697
    }
698
    env->reserve = (target_ulong)-1ULL;
699
    RETURN();
700
}
701

    
702
void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
703
{
704
    if (unlikely(T0 & 0x03)) {
705
        do_raise_exception(POWERPC_EXCP_ALIGN);
706
    } else {
707
        if (unlikely(env->reserve != (uint32_t)T0)) {
708
            env->crf[0] = xer_so;
709
        } else {
710
            glue(stq, MEMSUFFIX)((uint32_t)T0, T1);
711
            env->crf[0] = xer_so | 0x02;
712
        }
713
    }
714
    env->reserve = (target_ulong)-1ULL;
715
    RETURN();
716
}
717

    
718
void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
719
{
720
    if (unlikely(T0 & 0x03)) {
721
        do_raise_exception(POWERPC_EXCP_ALIGN);
722
    } else {
723
        if (unlikely(env->reserve != (uint64_t)T0)) {
724
            env->crf[0] = xer_so;
725
        } else {
726
            glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
727
            env->crf[0] = xer_so | 0x02;
728
        }
729
    }
730
    env->reserve = (target_ulong)-1ULL;
731
    RETURN();
732
}
733
#endif
734

    
735
void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
736
{
737
    if (unlikely(T0 & 0x03)) {
738
        do_raise_exception(POWERPC_EXCP_ALIGN);
739
    } else {
740
        if (unlikely(env->reserve != (uint32_t)T0)) {
741
            env->crf[0] = xer_so;
742
        } else {
743
            glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
744
            env->crf[0] = xer_so | 0x02;
745
        }
746
    }
747
    env->reserve = (target_ulong)-1ULL;
748
    RETURN();
749
}
750

    
751
#if defined(TARGET_PPC64)
752
void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
753
{
754
    if (unlikely(T0 & 0x03)) {
755
        do_raise_exception(POWERPC_EXCP_ALIGN);
756
    } else {
757
        if (unlikely(env->reserve != (uint64_t)T0)) {
758
            env->crf[0] = xer_so;
759
        } else {
760
            glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
761
            env->crf[0] = xer_so | 0x02;
762
        }
763
    }
764
    env->reserve = (target_ulong)-1ULL;
765
    RETURN();
766
}
767

    
768
void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
769
{
770
    if (unlikely(T0 & 0x03)) {
771
        do_raise_exception(POWERPC_EXCP_ALIGN);
772
    } else {
773
        if (unlikely(env->reserve != (uint32_t)T0)) {
774
            env->crf[0] = xer_so;
775
        } else {
776
            glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
777
            env->crf[0] = xer_so | 0x02;
778
        }
779
    }
780
    env->reserve = (target_ulong)-1ULL;
781
    RETURN();
782
}
783

    
784
void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
785
{
786
    if (unlikely(T0 & 0x03)) {
787
        do_raise_exception(POWERPC_EXCP_ALIGN);
788
    } else {
789
        if (unlikely(env->reserve != (uint64_t)T0)) {
790
            env->crf[0] = xer_so;
791
        } else {
792
            glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
793
            env->crf[0] = xer_so | 0x02;
794
        }
795
    }
796
    env->reserve = (target_ulong)-1ULL;
797
    RETURN();
798
}
799
#endif
800

    
801
void OPPROTO glue(op_dcbz_l32, MEMSUFFIX) (void)
802
{
803
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
804
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
805
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
806
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
807
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
808
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
809
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
810
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
811
    RETURN();
812
}
813

    
814
void OPPROTO glue(op_dcbz_l64, MEMSUFFIX) (void)
815
{
816
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
817
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
818
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
819
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
820
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
821
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
822
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
823
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
824
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
825
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
826
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
827
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
828
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
829
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
830
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
831
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
832
    RETURN();
833
}
834

    
835
void OPPROTO glue(op_dcbz_l128, MEMSUFFIX) (void)
836
{
837
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
838
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
839
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
840
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
841
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
842
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
843
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
844
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
845
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
846
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
847
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
848
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
849
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
850
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
851
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
852
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
853
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0);
854
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0);
855
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0);
856
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0);
857
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0);
858
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0);
859
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0);
860
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0);
861
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0);
862
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0);
863
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0);
864
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0);
865
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0);
866
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0);
867
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0);
868
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0);
869
    RETURN();
870
}
871

    
872
void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
873
{
874
    glue(do_dcbz, MEMSUFFIX)();
875
    RETURN();
876
}
877

    
878
#if defined(TARGET_PPC64)
879
void OPPROTO glue(op_dcbz_l32_64, MEMSUFFIX) (void)
880
{
881
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
882
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
883
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
884
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
885
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
886
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
887
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
888
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
889
    RETURN();
890
}
891

    
892
void OPPROTO glue(op_dcbz_l64_64, MEMSUFFIX) (void)
893
{
894
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
895
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
896
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
897
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
898
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
899
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
900
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
901
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
902
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
903
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
904
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
905
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
906
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
907
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
908
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
909
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
910
    RETURN();
911
}
912

    
913
void OPPROTO glue(op_dcbz_l128_64, MEMSUFFIX) (void)
914
{
915
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
916
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
917
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
918
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
919
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
920
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
921
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
922
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
923
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
924
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
925
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
926
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
927
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
928
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
929
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
930
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
931
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0);
932
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0);
933
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0);
934
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0);
935
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0);
936
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0);
937
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0);
938
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0);
939
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0);
940
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0);
941
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0);
942
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0);
943
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0);
944
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0);
945
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0);
946
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0);
947
    RETURN();
948
}
949

    
950
void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
951
{
952
    glue(do_dcbz_64, MEMSUFFIX)();
953
    RETURN();
954
}
955
#endif
956

    
957
/* Instruction cache block invalidate */
958
void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
959
{
960
    glue(do_icbi, MEMSUFFIX)();
961
    RETURN();
962
}
963

    
964
#if defined(TARGET_PPC64)
965
void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
966
{
967
    glue(do_icbi_64, MEMSUFFIX)();
968
    RETURN();
969
}
970
#endif
971

    
972
/* External access */
973
void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
974
{
975
    T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
976
    RETURN();
977
}
978

    
979
#if defined(TARGET_PPC64)
980
void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
981
{
982
    T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
983
    RETURN();
984
}
985
#endif
986

    
987
void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
988
{
989
    glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
990
    RETURN();
991
}
992

    
993
#if defined(TARGET_PPC64)
994
void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
995
{
996
    glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
997
    RETURN();
998
}
999
#endif
1000

    
1001
void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
1002
{
1003
    T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
1004
    RETURN();
1005
}
1006

    
1007
#if defined(TARGET_PPC64)
1008
void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
1009
{
1010
    T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
1011
    RETURN();
1012
}
1013
#endif
1014

    
1015
void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
1016
{
1017
    glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
1018
    RETURN();
1019
}
1020

    
1021
#if defined(TARGET_PPC64)
1022
void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
1023
{
1024
    glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
1025
    RETURN();
1026
}
1027
#endif
1028

    
1029
/* XXX: those micro-ops need tests ! */
1030
/* PowerPC 601 specific instructions (POWER bridge) */
1031
void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
1032
{
1033
    /* When byte count is 0, do nothing */
1034
    if (likely(T1 != 0)) {
1035
        glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
1036
    }
1037
    RETURN();
1038
}
1039

    
1040
/* POWER2 quad load and store */
1041
/* XXX: TAGs are not managed */
1042
void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
1043
{
1044
    glue(do_POWER2_lfq, MEMSUFFIX)();
1045
    RETURN();
1046
}
1047

    
1048
void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
1049
{
1050
    glue(do_POWER2_lfq_le, MEMSUFFIX)();
1051
    RETURN();
1052
}
1053

    
1054
void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
1055
{
1056
    glue(do_POWER2_stfq, MEMSUFFIX)();
1057
    RETURN();
1058
}
1059

    
1060
void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
1061
{
1062
    glue(do_POWER2_stfq_le, MEMSUFFIX)();
1063
    RETURN();
1064
}
1065

    
1066
/* Altivec vector extension */
1067
#if defined(WORDS_BIGENDIAN)
1068
#define VR_DWORD0 0
1069
#define VR_DWORD1 1
1070
#else
1071
#define VR_DWORD0 1
1072
#define VR_DWORD1 0
1073
#endif
1074
void OPPROTO glue(op_vr_lvx, MEMSUFFIX) (void)
1075
{
1076
    AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint32_t)T0);
1077
    AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint32_t)T0 + 8);
1078
}
1079

    
1080
void OPPROTO glue(op_vr_lvx_le, MEMSUFFIX) (void)
1081
{
1082
    AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint32_t)T0);
1083
    AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint32_t)T0 + 8);
1084
}
1085

    
1086
void OPPROTO glue(op_vr_stvx, MEMSUFFIX) (void)
1087
{
1088
    glue(stq, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD0]);
1089
    glue(stq, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD1]);
1090
}
1091

    
1092
void OPPROTO glue(op_vr_stvx_le, MEMSUFFIX) (void)
1093
{
1094
    glue(stq, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD1]);
1095
    glue(stq, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD0]);
1096
}
1097

    
1098
#if defined(TARGET_PPC64)
1099
void OPPROTO glue(op_vr_lvx_64, MEMSUFFIX) (void)
1100
{
1101
    AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint64_t)T0);
1102
    AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint64_t)T0 + 8);
1103
}
1104

    
1105
void OPPROTO glue(op_vr_lvx_le_64, MEMSUFFIX) (void)
1106
{
1107
    AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint64_t)T0);
1108
    AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint64_t)T0 + 8);
1109
}
1110

    
1111
void OPPROTO glue(op_vr_stvx_64, MEMSUFFIX) (void)
1112
{
1113
    glue(stq, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD0]);
1114
    glue(stq, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD1]);
1115
}
1116

    
1117
void OPPROTO glue(op_vr_stvx_le_64, MEMSUFFIX) (void)
1118
{
1119
    glue(stq, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD1]);
1120
    glue(stq, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD0]);
1121
}
1122
#endif
1123
#undef VR_DWORD0
1124
#undef VR_DWORD1
1125

    
1126
#if defined(TARGET_PPCEMB)
1127
/* SPE extension */
1128
#define _PPC_SPE_LD_OP(name, op)                                              \
1129
void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void)                     \
1130
{                                                                             \
1131
    T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0);                                \
1132
    RETURN();                                                                 \
1133
}
1134

    
1135
#if defined(TARGET_PPC64)
1136
#define _PPC_SPE_LD_OP_64(name, op)                                           \
1137
void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void)          \
1138
{                                                                             \
1139
    T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0);                                \
1140
    RETURN();                                                                 \
1141
}
1142
#define PPC_SPE_LD_OP(name, op)                                               \
1143
_PPC_SPE_LD_OP(name, op);                                                     \
1144
_PPC_SPE_LD_OP_64(name, op)
1145
#else
1146
#define PPC_SPE_LD_OP(name, op)                                               \
1147
_PPC_SPE_LD_OP(name, op)
1148
#endif
1149

    
1150
#define _PPC_SPE_ST_OP(name, op)                                              \
1151
void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void)                    \
1152
{                                                                             \
1153
    glue(op, MEMSUFFIX)((uint32_t)T0, T1_64);                                 \
1154
    RETURN();                                                                 \
1155
}
1156

    
1157
#if defined(TARGET_PPC64)
1158
#define _PPC_SPE_ST_OP_64(name, op)                                           \
1159
void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void)         \
1160
{                                                                             \
1161
    glue(op, MEMSUFFIX)((uint64_t)T0, T1_64);                                 \
1162
    RETURN();                                                                 \
1163
}
1164
#define PPC_SPE_ST_OP(name, op)                                               \
1165
_PPC_SPE_ST_OP(name, op);                                                     \
1166
_PPC_SPE_ST_OP_64(name, op)
1167
#else
1168
#define PPC_SPE_ST_OP(name, op)                                               \
1169
_PPC_SPE_ST_OP(name, op)
1170
#endif
1171

    
1172
#if !defined(TARGET_PPC64)
1173
PPC_SPE_LD_OP(dd, ldq);
1174
PPC_SPE_ST_OP(dd, stq);
1175
PPC_SPE_LD_OP(dd_le, ld64r);
1176
PPC_SPE_ST_OP(dd_le, st64r);
1177
#endif
1178
static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1179
{
1180
    uint64_t ret;
1181
    ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
1182
    ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
1183
    return ret;
1184
}
1185
PPC_SPE_LD_OP(dw, spe_ldw);
1186
static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA,
1187
                                                     uint64_t data)
1188
{
1189
    glue(stl, MEMSUFFIX)(EA, data >> 32);
1190
    glue(stl, MEMSUFFIX)(EA + 4, data);
1191
}
1192
PPC_SPE_ST_OP(dw, spe_stdw);
1193
static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1194
{
1195
    uint64_t ret;
1196
    ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
1197
    ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
1198
    return ret;
1199
}
1200
PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1201
static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1202
                                                        uint64_t data)
1203
{
1204
    glue(st32r, MEMSUFFIX)(EA, data >> 32);
1205
    glue(st32r, MEMSUFFIX)(EA + 4, data);
1206
}
1207
PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1208
static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1209
{
1210
    uint64_t ret;
1211
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1212
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
1213
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
1214
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
1215
    return ret;
1216
}
1217
PPC_SPE_LD_OP(dh, spe_ldh);
1218
static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA,
1219
                                                     uint64_t data)
1220
{
1221
    glue(stw, MEMSUFFIX)(EA, data >> 48);
1222
    glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
1223
    glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
1224
    glue(stw, MEMSUFFIX)(EA + 6, data);
1225
}
1226
PPC_SPE_ST_OP(dh, spe_stdh);
1227
static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1228
{
1229
    uint64_t ret;
1230
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1231
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
1232
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
1233
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
1234
    return ret;
1235
}
1236
PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1237
static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1238
                                                        uint64_t data)
1239
{
1240
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1241
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1242
    glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1243
    glue(st16r, MEMSUFFIX)(EA + 6, data);
1244
}
1245
PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1246
static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1247
{
1248
    uint64_t ret;
1249
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1250
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
1251
    return ret;
1252
}
1253
PPC_SPE_LD_OP(whe, spe_lwhe);
1254
static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA,
1255
                                                      uint64_t data)
1256
{
1257
    glue(stw, MEMSUFFIX)(EA, data >> 48);
1258
    glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1259
}
1260
PPC_SPE_ST_OP(whe, spe_stwhe);
1261
static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1262
{
1263
    uint64_t ret;
1264
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1265
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
1266
    return ret;
1267
}
1268
PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1269
static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1270
                                                         uint64_t data)
1271
{
1272
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1273
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1274
}
1275
PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1276
static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1277
{
1278
    uint64_t ret;
1279
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
1280
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
1281
    return ret;
1282
}
1283
PPC_SPE_LD_OP(whou, spe_lwhou);
1284
static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1285
{
1286
    uint64_t ret;
1287
    ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1288
    ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1289
    return ret;
1290
}
1291
PPC_SPE_LD_OP(whos, spe_lwhos);
1292
static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA,
1293
                                                      uint64_t data)
1294
{
1295
    glue(stw, MEMSUFFIX)(EA, data >> 32);
1296
    glue(stw, MEMSUFFIX)(EA + 2, data);
1297
}
1298
PPC_SPE_ST_OP(who, spe_stwho);
1299
static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1300
{
1301
    uint64_t ret;
1302
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1303
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1304
    return ret;
1305
}
1306
PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1307
static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1308
{
1309
    uint64_t ret;
1310
    ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1311
    ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1312
    return ret;
1313
}
1314
PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1315
static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1316
                                                         uint64_t data)
1317
{
1318
    glue(st16r, MEMSUFFIX)(EA, data >> 32);
1319
    glue(st16r, MEMSUFFIX)(EA + 2, data);
1320
}
1321
PPC_SPE_ST_OP(who_le, spe_stwho_le);
1322
#if !defined(TARGET_PPC64)
1323
static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA,
1324
                                                      uint64_t data)
1325
{
1326
    glue(stl, MEMSUFFIX)(EA, data);
1327
}
1328
PPC_SPE_ST_OP(wwo, spe_stwwo);
1329
static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1330
                                                         uint64_t data)
1331
{
1332
    glue(st32r, MEMSUFFIX)(EA, data);
1333
}
1334
PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1335
#endif
1336
static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1337
{
1338
    uint16_t tmp;
1339
    tmp = glue(lduw, MEMSUFFIX)(EA);
1340
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1341
}
1342
PPC_SPE_LD_OP(h, spe_lh);
1343
static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1344
{
1345
    uint16_t tmp;
1346
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1347
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1348
}
1349
PPC_SPE_LD_OP(h_le, spe_lh_le);
1350
static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1351
{
1352
    uint32_t tmp;
1353
    tmp = glue(ldl, MEMSUFFIX)(EA);
1354
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1355
}
1356
PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1357
static always_inline
1358
uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1359
{
1360
    uint32_t tmp;
1361
    tmp = glue(ld32r, MEMSUFFIX)(EA);
1362
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1363
}
1364
PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1365
static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1366
{
1367
    uint64_t ret;
1368
    uint16_t tmp;
1369
    tmp = glue(lduw, MEMSUFFIX)(EA);
1370
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1371
    tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1372
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1373
    return ret;
1374
}
1375
PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1376
static always_inline
1377
uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1378
{
1379
    uint64_t ret;
1380
    uint16_t tmp;
1381
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1382
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1383
    tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1384
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1385
    return ret;
1386
}
1387
PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1388
#endif /* defined(TARGET_PPCEMB) */
1389

    
1390
#undef MEMSUFFIX