Statistics
| Branch: | Revision:

root / target-ppc / op_mem.h @ c7697e1f

History | View | Annotate | Download (41.5 kB)

1
/*
2
 *  PowerPC emulation micro-operations for qemu.
3
 *
4
 *  Copyright (c) 2003-2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20

    
21
static always_inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
22
{
23
    uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24
    return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
25
}
26

    
27
static always_inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
28
{
29
    int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30
    return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
31
}
32

    
33
static always_inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
34
{
35
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36
    return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
38
}
39

    
40
#if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
41
static always_inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
42
{
43
    uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44
    return ((tmp & 0xFF00000000000000ULL) >> 56) |
45
        ((tmp & 0x00FF000000000000ULL) >> 40) |
46
        ((tmp & 0x0000FF0000000000ULL) >> 24) |
47
        ((tmp & 0x000000FF00000000ULL) >> 8) |
48
        ((tmp & 0x00000000FF000000ULL) << 8) |
49
        ((tmp & 0x0000000000FF0000ULL) << 24) |
50
        ((tmp & 0x000000000000FF00ULL) << 40) |
51
        ((tmp & 0x00000000000000FFULL) << 54);
52
}
53
#endif
54

    
55
#if defined(TARGET_PPC64)
56
static always_inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
57
{
58
    return (int32_t)glue(ldl, MEMSUFFIX)(EA);
59
}
60

    
61
static always_inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
62
{
63
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64
    return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
65
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
66
}
67
#endif
68

    
69
static always_inline void glue(st16r, MEMSUFFIX) (target_ulong EA,
70
                                                  uint16_t data)
71
{
72
    uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
73
    glue(stw, MEMSUFFIX)(EA, tmp);
74
}
75

    
76
static always_inline void glue(st32r, MEMSUFFIX) (target_ulong EA,
77
                                                  uint32_t data)
78
{
79
    uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
80
        ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
81
    glue(stl, MEMSUFFIX)(EA, tmp);
82
}
83

    
84
#if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
85
static always_inline void glue(st64r, MEMSUFFIX) (target_ulong EA,
86
                                                  uint64_t data)
87
{
88
    uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
89
        ((data & 0x00FF000000000000ULL) >> 40) |
90
        ((data & 0x0000FF0000000000ULL) >> 24) |
91
        ((data & 0x000000FF00000000ULL) >> 8) |
92
        ((data & 0x00000000FF000000ULL) << 8) |
93
        ((data & 0x0000000000FF0000ULL) << 24) |
94
        ((data & 0x000000000000FF00ULL) << 40) |
95
        ((data & 0x00000000000000FFULL) << 56);
96
    glue(stq, MEMSUFFIX)(EA, tmp);
97
}
98
#endif
99

    
100
/***                             Integer load                              ***/
101
#define PPC_LD_OP(name, op)                                                   \
102
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
103
{                                                                             \
104
    T1 = glue(op, MEMSUFFIX)((uint32_t)T0);                                   \
105
    RETURN();                                                                 \
106
}
107

    
108
#if defined(TARGET_PPC64)
109
#define PPC_LD_OP_64(name, op)                                                \
110
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
111
{                                                                             \
112
    T1 = glue(op, MEMSUFFIX)((uint64_t)T0);                                   \
113
    RETURN();                                                                 \
114
}
115
#endif
116

    
117
#define PPC_ST_OP(name, op)                                                   \
118
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
119
{                                                                             \
120
    glue(op, MEMSUFFIX)((uint32_t)T0, T1);                                    \
121
    RETURN();                                                                 \
122
}
123

    
124
#if defined(TARGET_PPC64)
125
#define PPC_ST_OP_64(name, op)                                                \
126
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
127
{                                                                             \
128
    glue(op, MEMSUFFIX)((uint64_t)T0, T1);                                    \
129
    RETURN();                                                                 \
130
}
131
#endif
132

    
133
PPC_LD_OP(bz, ldub);
134
PPC_LD_OP(ha, ldsw);
135
PPC_LD_OP(hz, lduw);
136
PPC_LD_OP(wz, ldl);
137
#if defined(TARGET_PPC64)
138
PPC_LD_OP(d, ldq);
139
PPC_LD_OP(wa, ldsl);
140
PPC_LD_OP_64(d, ldq);
141
PPC_LD_OP_64(wa, ldsl);
142
PPC_LD_OP_64(bz, ldub);
143
PPC_LD_OP_64(ha, ldsw);
144
PPC_LD_OP_64(hz, lduw);
145
PPC_LD_OP_64(wz, ldl);
146
#endif
147

    
148
PPC_LD_OP(ha_le, ld16rs);
149
PPC_LD_OP(hz_le, ld16r);
150
PPC_LD_OP(wz_le, ld32r);
151
#if defined(TARGET_PPC64)
152
PPC_LD_OP(d_le, ld64r);
153
PPC_LD_OP(wa_le, ld32rs);
154
PPC_LD_OP_64(d_le, ld64r);
155
PPC_LD_OP_64(wa_le, ld32rs);
156
PPC_LD_OP_64(ha_le, ld16rs);
157
PPC_LD_OP_64(hz_le, ld16r);
158
PPC_LD_OP_64(wz_le, ld32r);
159
#endif
160

    
161
/***                              Integer store                            ***/
162
PPC_ST_OP(b, stb);
163
PPC_ST_OP(h, stw);
164
PPC_ST_OP(w, stl);
165
#if defined(TARGET_PPC64)
166
PPC_ST_OP(d, stq);
167
PPC_ST_OP_64(d, stq);
168
PPC_ST_OP_64(b, stb);
169
PPC_ST_OP_64(h, stw);
170
PPC_ST_OP_64(w, stl);
171
#endif
172

    
173
PPC_ST_OP(h_le, st16r);
174
PPC_ST_OP(w_le, st32r);
175
#if defined(TARGET_PPC64)
176
PPC_ST_OP(d_le, st64r);
177
PPC_ST_OP_64(d_le, st64r);
178
PPC_ST_OP_64(h_le, st16r);
179
PPC_ST_OP_64(w_le, st32r);
180
#endif
181

    
182
/***                Integer load and store with byte reverse               ***/
183
PPC_LD_OP(hbr, ld16r);
184
PPC_LD_OP(wbr, ld32r);
185
PPC_ST_OP(hbr, st16r);
186
PPC_ST_OP(wbr, st32r);
187
#if defined(TARGET_PPC64)
188
PPC_LD_OP_64(hbr, ld16r);
189
PPC_LD_OP_64(wbr, ld32r);
190
PPC_ST_OP_64(hbr, st16r);
191
PPC_ST_OP_64(wbr, st32r);
192
#endif
193

    
194
PPC_LD_OP(hbr_le, lduw);
195
PPC_LD_OP(wbr_le, ldl);
196
PPC_ST_OP(hbr_le, stw);
197
PPC_ST_OP(wbr_le, stl);
198
#if defined(TARGET_PPC64)
199
PPC_LD_OP_64(hbr_le, lduw);
200
PPC_LD_OP_64(wbr_le, ldl);
201
PPC_ST_OP_64(hbr_le, stw);
202
PPC_ST_OP_64(wbr_le, stl);
203
#endif
204

    
205
/***                    Integer load and store multiple                    ***/
206
void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
207
{
208
    glue(do_lmw, MEMSUFFIX)(PARAM1);
209
    RETURN();
210
}
211

    
212
#if defined(TARGET_PPC64)
213
void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
214
{
215
    glue(do_lmw_64, MEMSUFFIX)(PARAM1);
216
    RETURN();
217
}
218
#endif
219

    
220
void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
221
{
222
    glue(do_lmw_le, MEMSUFFIX)(PARAM1);
223
    RETURN();
224
}
225

    
226
#if defined(TARGET_PPC64)
227
void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
228
{
229
    glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
230
    RETURN();
231
}
232
#endif
233

    
234
void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
235
{
236
    glue(do_stmw, MEMSUFFIX)(PARAM1);
237
    RETURN();
238
}
239

    
240
#if defined(TARGET_PPC64)
241
void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
242
{
243
    glue(do_stmw_64, MEMSUFFIX)(PARAM1);
244
    RETURN();
245
}
246
#endif
247

    
248
void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
249
{
250
    glue(do_stmw_le, MEMSUFFIX)(PARAM1);
251
    RETURN();
252
}
253

    
254
#if defined(TARGET_PPC64)
255
void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
256
{
257
    glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
258
    RETURN();
259
}
260
#endif
261

    
262
/***                    Integer load and store strings                     ***/
263
void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
264
{
265
    glue(do_lsw, MEMSUFFIX)(PARAM1);
266
    RETURN();
267
}
268

    
269
#if defined(TARGET_PPC64)
270
void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
271
{
272
    glue(do_lsw_64, MEMSUFFIX)(PARAM1);
273
    RETURN();
274
}
275
#endif
276

    
277
void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
278
{
279
    glue(do_lsw_le, MEMSUFFIX)(PARAM1);
280
    RETURN();
281
}
282

    
283
#if defined(TARGET_PPC64)
284
void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
285
{
286
    glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
287
    RETURN();
288
}
289
#endif
290

    
291
/* PPC32 specification says we must generate an exception if
292
 * rA is in the range of registers to be loaded.
293
 * In an other hand, IBM says this is valid, but rA won't be loaded.
294
 * For now, I'll follow the spec...
295
 */
296
void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
297
{
298
    /* Note: T1 comes from xer_bc then no cast is needed */
299
    if (likely(T1 != 0)) {
300
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
301
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
302
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
303
                                   POWERPC_EXCP_INVAL |
304
                                   POWERPC_EXCP_INVAL_LSWX);
305
        } else {
306
            glue(do_lsw, MEMSUFFIX)(PARAM1);
307
        }
308
    }
309
    RETURN();
310
}
311

    
312
#if defined(TARGET_PPC64)
313
void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
314
{
315
    /* Note: T1 comes from xer_bc then no cast is needed */
316
    if (likely(T1 != 0)) {
317
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
318
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
319
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
320
                                   POWERPC_EXCP_INVAL |
321
                                   POWERPC_EXCP_INVAL_LSWX);
322
        } else {
323
            glue(do_lsw_64, MEMSUFFIX)(PARAM1);
324
        }
325
    }
326
    RETURN();
327
}
328
#endif
329

    
330
void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
331
{
332
    /* Note: T1 comes from xer_bc then no cast is needed */
333
    if (likely(T1 != 0)) {
334
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
335
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
336
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
337
                                   POWERPC_EXCP_INVAL |
338
                                   POWERPC_EXCP_INVAL_LSWX);
339
        } else {
340
            glue(do_lsw_le, MEMSUFFIX)(PARAM1);
341
        }
342
    }
343
    RETURN();
344
}
345

    
346
#if defined(TARGET_PPC64)
347
void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
348
{
349
    /* Note: T1 comes from xer_bc then no cast is needed */
350
    if (likely(T1 != 0)) {
351
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
352
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
353
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
354
                                   POWERPC_EXCP_INVAL |
355
                                   POWERPC_EXCP_INVAL_LSWX);
356
        } else {
357
            glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
358
        }
359
    }
360
    RETURN();
361
}
362
#endif
363

    
364
void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
365
{
366
    glue(do_stsw, MEMSUFFIX)(PARAM1);
367
    RETURN();
368
}
369

    
370
#if defined(TARGET_PPC64)
371
void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
372
{
373
    glue(do_stsw_64, MEMSUFFIX)(PARAM1);
374
    RETURN();
375
}
376
#endif
377

    
378
void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
379
{
380
    glue(do_stsw_le, MEMSUFFIX)(PARAM1);
381
    RETURN();
382
}
383

    
384
#if defined(TARGET_PPC64)
385
void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
386
{
387
    glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
388
    RETURN();
389
}
390
#endif
391

    
392
/***                         Floating-point store                          ***/
393
#define PPC_STF_OP(name, op)                                                  \
394
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
395
{                                                                             \
396
    glue(op, MEMSUFFIX)((uint32_t)T0, FT0);                                   \
397
    RETURN();                                                                 \
398
}
399

    
400
#if defined(TARGET_PPC64)
401
#define PPC_STF_OP_64(name, op)                                               \
402
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
403
{                                                                             \
404
    glue(op, MEMSUFFIX)((uint64_t)T0, FT0);                                   \
405
    RETURN();                                                                 \
406
}
407
#endif
408

    
409
static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
410
{
411
    glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
412
}
413

    
414
static always_inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d)
415
{
416
    union {
417
        double d;
418
        uint64_t u;
419
    } u;
420

    
421
    /* Store the low order 32 bits without any conversion */
422
    u.d = d;
423
    glue(stl, MEMSUFFIX)(EA, u.u);
424
}
425

    
426
PPC_STF_OP(fd, stfq);
427
PPC_STF_OP(fs, stfs);
428
PPC_STF_OP(fiwx, stfiwx);
429
#if defined(TARGET_PPC64)
430
PPC_STF_OP_64(fd, stfq);
431
PPC_STF_OP_64(fs, stfs);
432
PPC_STF_OP_64(fiwx, stfiwx);
433
#endif
434

    
435
static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
436
{
437
    union {
438
        double d;
439
        uint64_t u;
440
    } u;
441

    
442
    u.d = d;
443
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
444
        ((u.u & 0x00FF000000000000ULL) >> 40) |
445
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
446
        ((u.u & 0x000000FF00000000ULL) >> 8) |
447
        ((u.u & 0x00000000FF000000ULL) << 8) |
448
        ((u.u & 0x0000000000FF0000ULL) << 24) |
449
        ((u.u & 0x000000000000FF00ULL) << 40) |
450
        ((u.u & 0x00000000000000FFULL) << 56);
451
    glue(stfq, MEMSUFFIX)(EA, u.d);
452
}
453

    
454
static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
455
{
456
    union {
457
        float f;
458
        uint32_t u;
459
    } u;
460

    
461
    u.f = float64_to_float32(d, &env->fp_status);
462
    u.u = ((u.u & 0xFF000000UL) >> 24) |
463
        ((u.u & 0x00FF0000ULL) >> 8) |
464
        ((u.u & 0x0000FF00UL) << 8) |
465
        ((u.u & 0x000000FFULL) << 24);
466
    glue(stfl, MEMSUFFIX)(EA, u.f);
467
}
468

    
469
static always_inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d)
470
{
471
    union {
472
        double d;
473
        uint64_t u;
474
    } u;
475

    
476
    /* Store the low order 32 bits without any conversion */
477
    u.d = d;
478
    u.u = ((u.u & 0xFF000000UL) >> 24) |
479
        ((u.u & 0x00FF0000ULL) >> 8) |
480
        ((u.u & 0x0000FF00UL) << 8) |
481
        ((u.u & 0x000000FFULL) << 24);
482
    glue(stl, MEMSUFFIX)(EA, u.u);
483
}
484

    
485
PPC_STF_OP(fd_le, stfqr);
486
PPC_STF_OP(fs_le, stfsr);
487
PPC_STF_OP(fiwx_le, stfiwxr);
488
#if defined(TARGET_PPC64)
489
PPC_STF_OP_64(fd_le, stfqr);
490
PPC_STF_OP_64(fs_le, stfsr);
491
PPC_STF_OP_64(fiwx_le, stfiwxr);
492
#endif
493

    
494
/***                         Floating-point load                           ***/
495
#define PPC_LDF_OP(name, op)                                                  \
496
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
497
{                                                                             \
498
    FT0 = glue(op, MEMSUFFIX)((uint32_t)T0);                                  \
499
    RETURN();                                                                 \
500
}
501

    
502
#if defined(TARGET_PPC64)
503
#define PPC_LDF_OP_64(name, op)                                               \
504
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
505
{                                                                             \
506
    FT0 = glue(op, MEMSUFFIX)((uint64_t)T0);                                  \
507
    RETURN();                                                                 \
508
}
509
#endif
510

    
511
static always_inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
512
{
513
    return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
514
}
515

    
516
PPC_LDF_OP(fd, ldfq);
517
PPC_LDF_OP(fs, ldfs);
518
#if defined(TARGET_PPC64)
519
PPC_LDF_OP_64(fd, ldfq);
520
PPC_LDF_OP_64(fs, ldfs);
521
#endif
522

    
523
static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
524
{
525
    union {
526
        double d;
527
        uint64_t u;
528
    } u;
529

    
530
    u.d = glue(ldfq, MEMSUFFIX)(EA);
531
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
532
        ((u.u & 0x00FF000000000000ULL) >> 40) |
533
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
534
        ((u.u & 0x000000FF00000000ULL) >> 8) |
535
        ((u.u & 0x00000000FF000000ULL) << 8) |
536
        ((u.u & 0x0000000000FF0000ULL) << 24) |
537
        ((u.u & 0x000000000000FF00ULL) << 40) |
538
        ((u.u & 0x00000000000000FFULL) << 56);
539

    
540
    return u.d;
541
}
542

    
543
static always_inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
544
{
545
    union {
546
        float f;
547
        uint32_t u;
548
    } u;
549

    
550
    u.f = glue(ldfl, MEMSUFFIX)(EA);
551
    u.u = ((u.u & 0xFF000000UL) >> 24) |
552
        ((u.u & 0x00FF0000ULL) >> 8) |
553
        ((u.u & 0x0000FF00UL) << 8) |
554
        ((u.u & 0x000000FFULL) << 24);
555

    
556
    return float32_to_float64(u.f, &env->fp_status);
557
}
558

    
559
PPC_LDF_OP(fd_le, ldfqr);
560
PPC_LDF_OP(fs_le, ldfsr);
561
#if defined(TARGET_PPC64)
562
PPC_LDF_OP_64(fd_le, ldfqr);
563
PPC_LDF_OP_64(fs_le, ldfsr);
564
#endif
565

    
566
/* Load and set reservation */
567
void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
568
{
569
    if (unlikely(T0 & 0x03)) {
570
        do_raise_exception(POWERPC_EXCP_ALIGN);
571
    } else {
572
        T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
573
        env->reserve = (uint32_t)T0;
574
    }
575
    RETURN();
576
}
577

    
578
#if defined(TARGET_PPC64)
579
void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
580
{
581
    if (unlikely(T0 & 0x03)) {
582
        do_raise_exception(POWERPC_EXCP_ALIGN);
583
    } else {
584
        T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
585
        env->reserve = (uint64_t)T0;
586
    }
587
    RETURN();
588
}
589

    
590
void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
591
{
592
    if (unlikely(T0 & 0x03)) {
593
        do_raise_exception(POWERPC_EXCP_ALIGN);
594
    } else {
595
        T1 = glue(ldq, MEMSUFFIX)((uint32_t)T0);
596
        env->reserve = (uint32_t)T0;
597
    }
598
    RETURN();
599
}
600

    
601
void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
602
{
603
    if (unlikely(T0 & 0x03)) {
604
        do_raise_exception(POWERPC_EXCP_ALIGN);
605
    } else {
606
        T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
607
        env->reserve = (uint64_t)T0;
608
    }
609
    RETURN();
610
}
611
#endif
612

    
613
void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
614
{
615
    if (unlikely(T0 & 0x03)) {
616
        do_raise_exception(POWERPC_EXCP_ALIGN);
617
    } else {
618
        T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
619
        env->reserve = (uint32_t)T0;
620
    }
621
    RETURN();
622
}
623

    
624
#if defined(TARGET_PPC64)
625
void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
626
{
627
    if (unlikely(T0 & 0x03)) {
628
        do_raise_exception(POWERPC_EXCP_ALIGN);
629
    } else {
630
        T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
631
        env->reserve = (uint64_t)T0;
632
    }
633
    RETURN();
634
}
635

    
636
void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
637
{
638
    if (unlikely(T0 & 0x03)) {
639
        do_raise_exception(POWERPC_EXCP_ALIGN);
640
    } else {
641
        T1 = glue(ld64r, MEMSUFFIX)((uint32_t)T0);
642
        env->reserve = (uint32_t)T0;
643
    }
644
    RETURN();
645
}
646

    
647
void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
648
{
649
    if (unlikely(T0 & 0x03)) {
650
        do_raise_exception(POWERPC_EXCP_ALIGN);
651
    } else {
652
        T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
653
        env->reserve = (uint64_t)T0;
654
    }
655
    RETURN();
656
}
657
#endif
658

    
659
/* Store with reservation */
660
void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
661
{
662
    if (unlikely(T0 & 0x03)) {
663
        do_raise_exception(POWERPC_EXCP_ALIGN);
664
    } else {
665
        if (unlikely(env->reserve != (uint32_t)T0)) {
666
            env->crf[0] = xer_so;
667
        } else {
668
            glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
669
            env->crf[0] = xer_so | 0x02;
670
        }
671
    }
672
    env->reserve = -1;
673
    RETURN();
674
}
675

    
676
#if defined(TARGET_PPC64)
677
void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
678
{
679
    if (unlikely(T0 & 0x03)) {
680
        do_raise_exception(POWERPC_EXCP_ALIGN);
681
    } else {
682
        if (unlikely(env->reserve != (uint64_t)T0)) {
683
            env->crf[0] = xer_so;
684
        } else {
685
            glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
686
            env->crf[0] = xer_so | 0x02;
687
        }
688
    }
689
    env->reserve = -1;
690
    RETURN();
691
}
692

    
693
void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
694
{
695
    if (unlikely(T0 & 0x03)) {
696
        do_raise_exception(POWERPC_EXCP_ALIGN);
697
    } else {
698
        if (unlikely(env->reserve != (uint32_t)T0)) {
699
            env->crf[0] = xer_so;
700
        } else {
701
            glue(stq, MEMSUFFIX)((uint32_t)T0, T1);
702
            env->crf[0] = xer_so | 0x02;
703
        }
704
    }
705
    env->reserve = -1;
706
    RETURN();
707
}
708

    
709
void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
710
{
711
    if (unlikely(T0 & 0x03)) {
712
        do_raise_exception(POWERPC_EXCP_ALIGN);
713
    } else {
714
        if (unlikely(env->reserve != (uint64_t)T0)) {
715
            env->crf[0] = xer_so;
716
        } else {
717
            glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
718
            env->crf[0] = xer_so | 0x02;
719
        }
720
    }
721
    env->reserve = -1;
722
    RETURN();
723
}
724
#endif
725

    
726
void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
727
{
728
    if (unlikely(T0 & 0x03)) {
729
        do_raise_exception(POWERPC_EXCP_ALIGN);
730
    } else {
731
        if (unlikely(env->reserve != (uint32_t)T0)) {
732
            env->crf[0] = xer_so;
733
        } else {
734
            glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
735
            env->crf[0] = xer_so | 0x02;
736
        }
737
    }
738
    env->reserve = -1;
739
    RETURN();
740
}
741

    
742
#if defined(TARGET_PPC64)
743
void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
744
{
745
    if (unlikely(T0 & 0x03)) {
746
        do_raise_exception(POWERPC_EXCP_ALIGN);
747
    } else {
748
        if (unlikely(env->reserve != (uint64_t)T0)) {
749
            env->crf[0] = xer_so;
750
        } else {
751
            glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
752
            env->crf[0] = xer_so | 0x02;
753
        }
754
    }
755
    env->reserve = -1;
756
    RETURN();
757
}
758

    
759
void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
760
{
761
    if (unlikely(T0 & 0x03)) {
762
        do_raise_exception(POWERPC_EXCP_ALIGN);
763
    } else {
764
        if (unlikely(env->reserve != (uint32_t)T0)) {
765
            env->crf[0] = xer_so;
766
        } else {
767
            glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
768
            env->crf[0] = xer_so | 0x02;
769
        }
770
    }
771
    env->reserve = -1;
772
    RETURN();
773
}
774

    
775
void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
776
{
777
    if (unlikely(T0 & 0x03)) {
778
        do_raise_exception(POWERPC_EXCP_ALIGN);
779
    } else {
780
        if (unlikely(env->reserve != (uint64_t)T0)) {
781
            env->crf[0] = xer_so;
782
        } else {
783
            glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
784
            env->crf[0] = xer_so | 0x02;
785
        }
786
    }
787
    env->reserve = -1;
788
    RETURN();
789
}
790
#endif
791

    
792
void OPPROTO glue(op_dcbz_l32, MEMSUFFIX) (void)
793
{
794
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
795
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
796
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
797
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
798
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
799
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
800
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
801
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
802
    RETURN();
803
}
804

    
805
void OPPROTO glue(op_dcbz_l64, MEMSUFFIX) (void)
806
{
807
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
808
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
809
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
810
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
811
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
812
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
813
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
814
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
815
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
816
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
817
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
818
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
819
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
820
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
821
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
822
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
823
    RETURN();
824
}
825

    
826
void OPPROTO glue(op_dcbz_l128, MEMSUFFIX) (void)
827
{
828
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
829
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
830
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
831
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
832
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
833
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
834
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
835
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
836
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
837
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
838
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
839
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
840
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
841
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
842
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
843
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
844
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0);
845
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0);
846
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0);
847
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0);
848
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0);
849
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0);
850
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0);
851
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0);
852
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0);
853
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0);
854
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0);
855
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0);
856
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0);
857
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0);
858
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0);
859
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0);
860
    RETURN();
861
}
862

    
863
void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
864
{
865
    glue(do_dcbz, MEMSUFFIX)();
866
    RETURN();
867
}
868

    
869
#if defined(TARGET_PPC64)
870
void OPPROTO glue(op_dcbz_l32_64, MEMSUFFIX) (void)
871
{
872
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
873
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
874
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
875
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
876
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
877
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
878
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
879
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
880
    RETURN();
881
}
882

    
883
void OPPROTO glue(op_dcbz_l64_64, MEMSUFFIX) (void)
884
{
885
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
886
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
887
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
888
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
889
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
890
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
891
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
892
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
893
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
894
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
895
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
896
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
897
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
898
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
899
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
900
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
901
    RETURN();
902
}
903

    
904
void OPPROTO glue(op_dcbz_l128_64, MEMSUFFIX) (void)
905
{
906
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
907
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
908
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
909
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
910
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
911
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
912
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
913
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
914
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
915
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
916
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
917
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
918
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
919
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
920
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
921
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
922
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0);
923
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0);
924
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0);
925
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0);
926
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0);
927
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0);
928
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0);
929
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0);
930
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0);
931
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0);
932
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0);
933
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0);
934
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0);
935
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0);
936
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0);
937
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0);
938
    RETURN();
939
}
940

    
941
void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
942
{
943
    glue(do_dcbz_64, MEMSUFFIX)();
944
    RETURN();
945
}
946
#endif
947

    
948
/* Instruction cache block invalidate */
949
void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
950
{
951
    glue(do_icbi, MEMSUFFIX)();
952
    RETURN();
953
}
954

    
955
#if defined(TARGET_PPC64)
956
void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
957
{
958
    glue(do_icbi_64, MEMSUFFIX)();
959
    RETURN();
960
}
961
#endif
962

    
963
/* External access */
964
void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
965
{
966
    T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
967
    RETURN();
968
}
969

    
970
#if defined(TARGET_PPC64)
971
void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
972
{
973
    T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
974
    RETURN();
975
}
976
#endif
977

    
978
void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
979
{
980
    glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
981
    RETURN();
982
}
983

    
984
#if defined(TARGET_PPC64)
985
void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
986
{
987
    glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
988
    RETURN();
989
}
990
#endif
991

    
992
void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
993
{
994
    T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
995
    RETURN();
996
}
997

    
998
#if defined(TARGET_PPC64)
999
void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
1000
{
1001
    T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
1002
    RETURN();
1003
}
1004
#endif
1005

    
1006
void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
1007
{
1008
    glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
1009
    RETURN();
1010
}
1011

    
1012
#if defined(TARGET_PPC64)
1013
void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
1014
{
1015
    glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
1016
    RETURN();
1017
}
1018
#endif
1019

    
1020
/* XXX: those micro-ops need tests ! */
1021
/* PowerPC 601 specific instructions (POWER bridge) */
1022
void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
1023
{
1024
    /* When byte count is 0, do nothing */
1025
    if (likely(T1 != 0)) {
1026
        glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
1027
    }
1028
    RETURN();
1029
}
1030

    
1031
/* POWER2 quad load and store */
1032
/* XXX: TAGs are not managed */
1033
void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
1034
{
1035
    glue(do_POWER2_lfq, MEMSUFFIX)();
1036
    RETURN();
1037
}
1038

    
1039
void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
1040
{
1041
    glue(do_POWER2_lfq_le, MEMSUFFIX)();
1042
    RETURN();
1043
}
1044

    
1045
void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
1046
{
1047
    glue(do_POWER2_stfq, MEMSUFFIX)();
1048
    RETURN();
1049
}
1050

    
1051
void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
1052
{
1053
    glue(do_POWER2_stfq_le, MEMSUFFIX)();
1054
    RETURN();
1055
}
1056

    
1057
/* Altivec vector extension */
1058
#if defined(WORDS_BIGENDIAN)
1059
#define VR_DWORD0 0
1060
#define VR_DWORD1 1
1061
#else
1062
#define VR_DWORD0 1
1063
#define VR_DWORD1 0
1064
#endif
1065
void OPPROTO glue(op_vr_lvx, MEMSUFFIX) (void)
1066
{
1067
    AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint32_t)T0);
1068
    AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint32_t)T0 + 8);
1069
}
1070

    
1071
void OPPROTO glue(op_vr_lvx_le, MEMSUFFIX) (void)
1072
{
1073
    AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint32_t)T0);
1074
    AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint32_t)T0 + 8);
1075
}
1076

    
1077
void OPPROTO glue(op_vr_stvx, MEMSUFFIX) (void)
1078
{
1079
    glue(stq, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD0]);
1080
    glue(stq, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD1]);
1081
}
1082

    
1083
void OPPROTO glue(op_vr_stvx_le, MEMSUFFIX) (void)
1084
{
1085
    glue(stq, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD1]);
1086
    glue(stq, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD0]);
1087
}
1088

    
1089
#if defined(TARGET_PPC64)
1090
void OPPROTO glue(op_vr_lvx_64, MEMSUFFIX) (void)
1091
{
1092
    AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint64_t)T0);
1093
    AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint64_t)T0 + 8);
1094
}
1095

    
1096
void OPPROTO glue(op_vr_lvx_le_64, MEMSUFFIX) (void)
1097
{
1098
    AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint64_t)T0);
1099
    AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint64_t)T0 + 8);
1100
}
1101

    
1102
void OPPROTO glue(op_vr_stvx_64, MEMSUFFIX) (void)
1103
{
1104
    glue(stq, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD0]);
1105
    glue(stq, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD1]);
1106
}
1107

    
1108
void OPPROTO glue(op_vr_stvx_le_64, MEMSUFFIX) (void)
1109
{
1110
    glue(stq, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD1]);
1111
    glue(stq, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD0]);
1112
}
1113
#endif
1114
#undef VR_DWORD0
1115
#undef VR_DWORD1
1116

    
1117
#if defined(TARGET_PPCEMB)
1118
/* SPE extension */
1119
#define _PPC_SPE_LD_OP(name, op)                                              \
1120
void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void)                     \
1121
{                                                                             \
1122
    T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0);                                \
1123
    RETURN();                                                                 \
1124
}
1125

    
1126
#if defined(TARGET_PPC64)
1127
#define _PPC_SPE_LD_OP_64(name, op)                                           \
1128
void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void)          \
1129
{                                                                             \
1130
    T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0);                                \
1131
    RETURN();                                                                 \
1132
}
1133
#define PPC_SPE_LD_OP(name, op)                                               \
1134
_PPC_SPE_LD_OP(name, op);                                                     \
1135
_PPC_SPE_LD_OP_64(name, op)
1136
#else
1137
#define PPC_SPE_LD_OP(name, op)                                               \
1138
_PPC_SPE_LD_OP(name, op)
1139
#endif
1140

    
1141
#define _PPC_SPE_ST_OP(name, op)                                              \
1142
void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void)                    \
1143
{                                                                             \
1144
    glue(op, MEMSUFFIX)((uint32_t)T0, T1_64);                                 \
1145
    RETURN();                                                                 \
1146
}
1147

    
1148
#if defined(TARGET_PPC64)
1149
#define _PPC_SPE_ST_OP_64(name, op)                                           \
1150
void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void)         \
1151
{                                                                             \
1152
    glue(op, MEMSUFFIX)((uint64_t)T0, T1_64);                                 \
1153
    RETURN();                                                                 \
1154
}
1155
#define PPC_SPE_ST_OP(name, op)                                               \
1156
_PPC_SPE_ST_OP(name, op);                                                     \
1157
_PPC_SPE_ST_OP_64(name, op)
1158
#else
1159
#define PPC_SPE_ST_OP(name, op)                                               \
1160
_PPC_SPE_ST_OP(name, op)
1161
#endif
1162

    
1163
#if !defined(TARGET_PPC64)
1164
PPC_SPE_LD_OP(dd, ldq);
1165
PPC_SPE_ST_OP(dd, stq);
1166
PPC_SPE_LD_OP(dd_le, ld64r);
1167
PPC_SPE_ST_OP(dd_le, st64r);
1168
#endif
1169
static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1170
{
1171
    uint64_t ret;
1172
    ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
1173
    ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
1174
    return ret;
1175
}
1176
PPC_SPE_LD_OP(dw, spe_ldw);
1177
static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA,
1178
                                                     uint64_t data)
1179
{
1180
    glue(stl, MEMSUFFIX)(EA, data >> 32);
1181
    glue(stl, MEMSUFFIX)(EA + 4, data);
1182
}
1183
PPC_SPE_ST_OP(dw, spe_stdw);
1184
static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1185
{
1186
    uint64_t ret;
1187
    ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
1188
    ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
1189
    return ret;
1190
}
1191
PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1192
static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1193
                                                        uint64_t data)
1194
{
1195
    glue(st32r, MEMSUFFIX)(EA, data >> 32);
1196
    glue(st32r, MEMSUFFIX)(EA + 4, data);
1197
}
1198
PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1199
static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1200
{
1201
    uint64_t ret;
1202
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1203
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
1204
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
1205
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
1206
    return ret;
1207
}
1208
PPC_SPE_LD_OP(dh, spe_ldh);
1209
static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA,
1210
                                                     uint64_t data)
1211
{
1212
    glue(stw, MEMSUFFIX)(EA, data >> 48);
1213
    glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
1214
    glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
1215
    glue(stw, MEMSUFFIX)(EA + 6, data);
1216
}
1217
PPC_SPE_ST_OP(dh, spe_stdh);
1218
static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1219
{
1220
    uint64_t ret;
1221
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1222
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
1223
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
1224
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
1225
    return ret;
1226
}
1227
PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1228
static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1229
                                                        uint64_t data)
1230
{
1231
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1232
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1233
    glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1234
    glue(st16r, MEMSUFFIX)(EA + 6, data);
1235
}
1236
PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1237
static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1238
{
1239
    uint64_t ret;
1240
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1241
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
1242
    return ret;
1243
}
1244
PPC_SPE_LD_OP(whe, spe_lwhe);
1245
static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA,
1246
                                                      uint64_t data)
1247
{
1248
    glue(stw, MEMSUFFIX)(EA, data >> 48);
1249
    glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1250
}
1251
PPC_SPE_ST_OP(whe, spe_stwhe);
1252
static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1253
{
1254
    uint64_t ret;
1255
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1256
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
1257
    return ret;
1258
}
1259
PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1260
static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1261
                                                         uint64_t data)
1262
{
1263
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1264
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1265
}
1266
PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1267
static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1268
{
1269
    uint64_t ret;
1270
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
1271
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
1272
    return ret;
1273
}
1274
PPC_SPE_LD_OP(whou, spe_lwhou);
1275
static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1276
{
1277
    uint64_t ret;
1278
    ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1279
    ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1280
    return ret;
1281
}
1282
PPC_SPE_LD_OP(whos, spe_lwhos);
1283
static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA,
1284
                                                      uint64_t data)
1285
{
1286
    glue(stw, MEMSUFFIX)(EA, data >> 32);
1287
    glue(stw, MEMSUFFIX)(EA + 2, data);
1288
}
1289
PPC_SPE_ST_OP(who, spe_stwho);
1290
static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1291
{
1292
    uint64_t ret;
1293
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1294
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1295
    return ret;
1296
}
1297
PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1298
static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1299
{
1300
    uint64_t ret;
1301
    ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1302
    ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1303
    return ret;
1304
}
1305
PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1306
static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1307
                                                         uint64_t data)
1308
{
1309
    glue(st16r, MEMSUFFIX)(EA, data >> 32);
1310
    glue(st16r, MEMSUFFIX)(EA + 2, data);
1311
}
1312
PPC_SPE_ST_OP(who_le, spe_stwho_le);
1313
#if !defined(TARGET_PPC64)
1314
static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA,
1315
                                                      uint64_t data)
1316
{
1317
    glue(stl, MEMSUFFIX)(EA, data);
1318
}
1319
PPC_SPE_ST_OP(wwo, spe_stwwo);
1320
static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1321
                                                         uint64_t data)
1322
{
1323
    glue(st32r, MEMSUFFIX)(EA, data);
1324
}
1325
PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1326
#endif
1327
static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1328
{
1329
    uint16_t tmp;
1330
    tmp = glue(lduw, MEMSUFFIX)(EA);
1331
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1332
}
1333
PPC_SPE_LD_OP(h, spe_lh);
1334
static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1335
{
1336
    uint16_t tmp;
1337
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1338
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1339
}
1340
PPC_SPE_LD_OP(h_le, spe_lh_le);
1341
static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1342
{
1343
    uint32_t tmp;
1344
    tmp = glue(ldl, MEMSUFFIX)(EA);
1345
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1346
}
1347
PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1348
static always_inline
1349
uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1350
{
1351
    uint32_t tmp;
1352
    tmp = glue(ld32r, MEMSUFFIX)(EA);
1353
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1354
}
1355
PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1356
static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1357
{
1358
    uint64_t ret;
1359
    uint16_t tmp;
1360
    tmp = glue(lduw, MEMSUFFIX)(EA);
1361
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1362
    tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1363
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1364
    return ret;
1365
}
1366
PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1367
static always_inline
1368
uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1369
{
1370
    uint64_t ret;
1371
    uint16_t tmp;
1372
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1373
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1374
    tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1375
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1376
    return ret;
1377
}
1378
PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1379
#endif /* defined(TARGET_PPCEMB) */
1380

    
1381
#undef MEMSUFFIX