Statistics
| Branch: | Revision:

root / target-ppc / op_mem.h @ b068d6a7

History | View | Annotate | Download (39.9 kB)

1
/*
2
 *  PowerPC emulation micro-operations for qemu.
3
 *
4
 *  Copyright (c) 2003-2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20

    
21
static always_inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
22
{
23
    uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24
    return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
25
}
26

    
27
static always_inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
28
{
29
    int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30
    return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
31
}
32

    
33
static always_inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
34
{
35
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36
    return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
38
}
39

    
40
#if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
41
static always_inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
42
{
43
    uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44
    return ((tmp & 0xFF00000000000000ULL) >> 56) |
45
        ((tmp & 0x00FF000000000000ULL) >> 40) |
46
        ((tmp & 0x0000FF0000000000ULL) >> 24) |
47
        ((tmp & 0x000000FF00000000ULL) >> 8) |
48
        ((tmp & 0x00000000FF000000ULL) << 8) |
49
        ((tmp & 0x0000000000FF0000ULL) << 24) |
50
        ((tmp & 0x000000000000FF00ULL) << 40) |
51
        ((tmp & 0x00000000000000FFULL) << 54);
52
}
53
#endif
54

    
55
#if defined(TARGET_PPC64)
56
static always_inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
57
{
58
    return (int32_t)glue(ldl, MEMSUFFIX)(EA);
59
}
60

    
61
static always_inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
62
{
63
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64
    return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
65
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
66
}
67
#endif
68

    
69
static always_inline void glue(st16r, MEMSUFFIX) (target_ulong EA,
70
                                                  uint16_t data)
71
{
72
    uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
73
    glue(stw, MEMSUFFIX)(EA, tmp);
74
}
75

    
76
static always_inline void glue(st32r, MEMSUFFIX) (target_ulong EA,
77
                                                  uint32_t data)
78
{
79
    uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
80
        ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
81
    glue(stl, MEMSUFFIX)(EA, tmp);
82
}
83

    
84
#if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
85
static always_inline void glue(st64r, MEMSUFFIX) (target_ulong EA,
86
                                                  uint64_t data)
87
{
88
    uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
89
        ((data & 0x00FF000000000000ULL) >> 40) |
90
        ((data & 0x0000FF0000000000ULL) >> 24) |
91
        ((data & 0x000000FF00000000ULL) >> 8) |
92
        ((data & 0x00000000FF000000ULL) << 8) |
93
        ((data & 0x0000000000FF0000ULL) << 24) |
94
        ((data & 0x000000000000FF00ULL) << 40) |
95
        ((data & 0x00000000000000FFULL) << 56);
96
    glue(stq, MEMSUFFIX)(EA, tmp);
97
}
98
#endif
99

    
100
/***                             Integer load                              ***/
101
#define PPC_LD_OP(name, op)                                                   \
102
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
103
{                                                                             \
104
    T1 = glue(op, MEMSUFFIX)((uint32_t)T0);                                   \
105
    RETURN();                                                                 \
106
}
107

    
108
#if defined(TARGET_PPC64)
109
#define PPC_LD_OP_64(name, op)                                                \
110
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
111
{                                                                             \
112
    T1 = glue(op, MEMSUFFIX)((uint64_t)T0);                                   \
113
    RETURN();                                                                 \
114
}
115
#endif
116

    
117
#define PPC_ST_OP(name, op)                                                   \
118
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
119
{                                                                             \
120
    glue(op, MEMSUFFIX)((uint32_t)T0, T1);                                    \
121
    RETURN();                                                                 \
122
}
123

    
124
#if defined(TARGET_PPC64)
125
#define PPC_ST_OP_64(name, op)                                                \
126
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
127
{                                                                             \
128
    glue(op, MEMSUFFIX)((uint64_t)T0, T1);                                    \
129
    RETURN();                                                                 \
130
}
131
#endif
132

    
133
PPC_LD_OP(bz, ldub);
134
PPC_LD_OP(ha, ldsw);
135
PPC_LD_OP(hz, lduw);
136
PPC_LD_OP(wz, ldl);
137
#if defined(TARGET_PPC64)
138
PPC_LD_OP(d, ldq);
139
PPC_LD_OP(wa, ldsl);
140
PPC_LD_OP_64(d, ldq);
141
PPC_LD_OP_64(wa, ldsl);
142
PPC_LD_OP_64(bz, ldub);
143
PPC_LD_OP_64(ha, ldsw);
144
PPC_LD_OP_64(hz, lduw);
145
PPC_LD_OP_64(wz, ldl);
146
#endif
147

    
148
PPC_LD_OP(ha_le, ld16rs);
149
PPC_LD_OP(hz_le, ld16r);
150
PPC_LD_OP(wz_le, ld32r);
151
#if defined(TARGET_PPC64)
152
PPC_LD_OP(d_le, ld64r);
153
PPC_LD_OP(wa_le, ld32rs);
154
PPC_LD_OP_64(d_le, ld64r);
155
PPC_LD_OP_64(wa_le, ld32rs);
156
PPC_LD_OP_64(ha_le, ld16rs);
157
PPC_LD_OP_64(hz_le, ld16r);
158
PPC_LD_OP_64(wz_le, ld32r);
159
#endif
160

    
161
/***                              Integer store                            ***/
162
PPC_ST_OP(b, stb);
163
PPC_ST_OP(h, stw);
164
PPC_ST_OP(w, stl);
165
#if defined(TARGET_PPC64)
166
PPC_ST_OP(d, stq);
167
PPC_ST_OP_64(d, stq);
168
PPC_ST_OP_64(b, stb);
169
PPC_ST_OP_64(h, stw);
170
PPC_ST_OP_64(w, stl);
171
#endif
172

    
173
PPC_ST_OP(h_le, st16r);
174
PPC_ST_OP(w_le, st32r);
175
#if defined(TARGET_PPC64)
176
PPC_ST_OP(d_le, st64r);
177
PPC_ST_OP_64(d_le, st64r);
178
PPC_ST_OP_64(h_le, st16r);
179
PPC_ST_OP_64(w_le, st32r);
180
#endif
181

    
182
/***                Integer load and store with byte reverse               ***/
183
PPC_LD_OP(hbr, ld16r);
184
PPC_LD_OP(wbr, ld32r);
185
PPC_ST_OP(hbr, st16r);
186
PPC_ST_OP(wbr, st32r);
187
#if defined(TARGET_PPC64)
188
PPC_LD_OP_64(hbr, ld16r);
189
PPC_LD_OP_64(wbr, ld32r);
190
PPC_ST_OP_64(hbr, st16r);
191
PPC_ST_OP_64(wbr, st32r);
192
#endif
193

    
194
PPC_LD_OP(hbr_le, lduw);
195
PPC_LD_OP(wbr_le, ldl);
196
PPC_ST_OP(hbr_le, stw);
197
PPC_ST_OP(wbr_le, stl);
198
#if defined(TARGET_PPC64)
199
PPC_LD_OP_64(hbr_le, lduw);
200
PPC_LD_OP_64(wbr_le, ldl);
201
PPC_ST_OP_64(hbr_le, stw);
202
PPC_ST_OP_64(wbr_le, stl);
203
#endif
204

    
205
/***                    Integer load and store multiple                    ***/
206
void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
207
{
208
    glue(do_lmw, MEMSUFFIX)(PARAM1);
209
    RETURN();
210
}
211

    
212
#if defined(TARGET_PPC64)
213
void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
214
{
215
    glue(do_lmw_64, MEMSUFFIX)(PARAM1);
216
    RETURN();
217
}
218
#endif
219

    
220
void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
221
{
222
    glue(do_lmw_le, MEMSUFFIX)(PARAM1);
223
    RETURN();
224
}
225

    
226
#if defined(TARGET_PPC64)
227
void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
228
{
229
    glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
230
    RETURN();
231
}
232
#endif
233

    
234
void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
235
{
236
    glue(do_stmw, MEMSUFFIX)(PARAM1);
237
    RETURN();
238
}
239

    
240
#if defined(TARGET_PPC64)
241
void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
242
{
243
    glue(do_stmw_64, MEMSUFFIX)(PARAM1);
244
    RETURN();
245
}
246
#endif
247

    
248
void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
249
{
250
    glue(do_stmw_le, MEMSUFFIX)(PARAM1);
251
    RETURN();
252
}
253

    
254
#if defined(TARGET_PPC64)
255
void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
256
{
257
    glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
258
    RETURN();
259
}
260
#endif
261

    
262
/***                    Integer load and store strings                     ***/
263
void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
264
{
265
    glue(do_lsw, MEMSUFFIX)(PARAM1);
266
    RETURN();
267
}
268

    
269
#if defined(TARGET_PPC64)
270
void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
271
{
272
    glue(do_lsw_64, MEMSUFFIX)(PARAM1);
273
    RETURN();
274
}
275
#endif
276

    
277
void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
278
{
279
    glue(do_lsw_le, MEMSUFFIX)(PARAM1);
280
    RETURN();
281
}
282

    
283
#if defined(TARGET_PPC64)
284
void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
285
{
286
    glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
287
    RETURN();
288
}
289
#endif
290

    
291
/* PPC32 specification says we must generate an exception if
292
 * rA is in the range of registers to be loaded.
293
 * In an other hand, IBM says this is valid, but rA won't be loaded.
294
 * For now, I'll follow the spec...
295
 */
296
void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
297
{
298
    /* Note: T1 comes from xer_bc then no cast is needed */
299
    if (likely(T1 != 0)) {
300
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
301
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
302
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
303
                                   POWERPC_EXCP_INVAL |
304
                                   POWERPC_EXCP_INVAL_LSWX);
305
        } else {
306
            glue(do_lsw, MEMSUFFIX)(PARAM1);
307
        }
308
    }
309
    RETURN();
310
}
311

    
312
#if defined(TARGET_PPC64)
313
void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
314
{
315
    /* Note: T1 comes from xer_bc then no cast is needed */
316
    if (likely(T1 != 0)) {
317
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
318
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
319
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
320
                                   POWERPC_EXCP_INVAL |
321
                                   POWERPC_EXCP_INVAL_LSWX);
322
        } else {
323
            glue(do_lsw_64, MEMSUFFIX)(PARAM1);
324
        }
325
    }
326
    RETURN();
327
}
328
#endif
329

    
330
void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
331
{
332
    /* Note: T1 comes from xer_bc then no cast is needed */
333
    if (likely(T1 != 0)) {
334
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
335
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
336
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
337
                                   POWERPC_EXCP_INVAL |
338
                                   POWERPC_EXCP_INVAL_LSWX);
339
        } else {
340
            glue(do_lsw_le, MEMSUFFIX)(PARAM1);
341
        }
342
    }
343
    RETURN();
344
}
345

    
346
#if defined(TARGET_PPC64)
347
void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
348
{
349
    /* Note: T1 comes from xer_bc then no cast is needed */
350
    if (likely(T1 != 0)) {
351
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
352
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
353
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
354
                                   POWERPC_EXCP_INVAL |
355
                                   POWERPC_EXCP_INVAL_LSWX);
356
        } else {
357
            glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
358
        }
359
    }
360
    RETURN();
361
}
362
#endif
363

    
364
void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
365
{
366
    glue(do_stsw, MEMSUFFIX)(PARAM1);
367
    RETURN();
368
}
369

    
370
#if defined(TARGET_PPC64)
371
void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
372
{
373
    glue(do_stsw_64, MEMSUFFIX)(PARAM1);
374
    RETURN();
375
}
376
#endif
377

    
378
void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
379
{
380
    glue(do_stsw_le, MEMSUFFIX)(PARAM1);
381
    RETURN();
382
}
383

    
384
#if defined(TARGET_PPC64)
385
void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
386
{
387
    glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
388
    RETURN();
389
}
390
#endif
391

    
392
/***                         Floating-point store                          ***/
393
#define PPC_STF_OP(name, op)                                                  \
394
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
395
{                                                                             \
396
    glue(op, MEMSUFFIX)((uint32_t)T0, FT0);                                   \
397
    RETURN();                                                                 \
398
}
399

    
400
#if defined(TARGET_PPC64)
401
#define PPC_STF_OP_64(name, op)                                               \
402
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
403
{                                                                             \
404
    glue(op, MEMSUFFIX)((uint64_t)T0, FT0);                                   \
405
    RETURN();                                                                 \
406
}
407
#endif
408

    
409
static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
410
{
411
    glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
412
}
413

    
414
static always_inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d)
415
{
416
    union {
417
        double d;
418
        uint64_t u;
419
    } u;
420

    
421
    /* Store the low order 32 bits without any conversion */
422
    u.d = d;
423
    glue(stl, MEMSUFFIX)(EA, u.u);
424
}
425

    
426
PPC_STF_OP(fd, stfq);
427
PPC_STF_OP(fs, stfs);
428
PPC_STF_OP(fiwx, stfiwx);
429
#if defined(TARGET_PPC64)
430
PPC_STF_OP_64(fd, stfq);
431
PPC_STF_OP_64(fs, stfs);
432
PPC_STF_OP_64(fiwx, stfiwx);
433
#endif
434

    
435
static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
436
{
437
    union {
438
        double d;
439
        uint64_t u;
440
    } u;
441

    
442
    u.d = d;
443
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
444
        ((u.u & 0x00FF000000000000ULL) >> 40) |
445
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
446
        ((u.u & 0x000000FF00000000ULL) >> 8) |
447
        ((u.u & 0x00000000FF000000ULL) << 8) |
448
        ((u.u & 0x0000000000FF0000ULL) << 24) |
449
        ((u.u & 0x000000000000FF00ULL) << 40) |
450
        ((u.u & 0x00000000000000FFULL) << 56);
451
    glue(stfq, MEMSUFFIX)(EA, u.d);
452
}
453

    
454
static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
455
{
456
    union {
457
        float f;
458
        uint32_t u;
459
    } u;
460

    
461
    u.f = float64_to_float32(d, &env->fp_status);
462
    u.u = ((u.u & 0xFF000000UL) >> 24) |
463
        ((u.u & 0x00FF0000ULL) >> 8) |
464
        ((u.u & 0x0000FF00UL) << 8) |
465
        ((u.u & 0x000000FFULL) << 24);
466
    glue(stfl, MEMSUFFIX)(EA, u.f);
467
}
468

    
469
static always_inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d)
470
{
471
    union {
472
        double d;
473
        uint64_t u;
474
    } u;
475

    
476
    /* Store the low order 32 bits without any conversion */
477
    u.d = d;
478
    u.u = ((u.u & 0xFF000000UL) >> 24) |
479
        ((u.u & 0x00FF0000ULL) >> 8) |
480
        ((u.u & 0x0000FF00UL) << 8) |
481
        ((u.u & 0x000000FFULL) << 24);
482
    glue(stl, MEMSUFFIX)(EA, u.u);
483
}
484

    
485

    
486
PPC_STF_OP(fd_le, stfqr);
487
PPC_STF_OP(fs_le, stfsr);
488
PPC_STF_OP(fiwx_le, stfiwxr);
489
#if defined(TARGET_PPC64)
490
PPC_STF_OP_64(fd_le, stfqr);
491
PPC_STF_OP_64(fs_le, stfsr);
492
PPC_STF_OP_64(fiwx_le, stfiwxr);
493
#endif
494

    
495
/***                         Floating-point load                           ***/
496
#define PPC_LDF_OP(name, op)                                                  \
497
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
498
{                                                                             \
499
    FT0 = glue(op, MEMSUFFIX)((uint32_t)T0);                                  \
500
    RETURN();                                                                 \
501
}
502

    
503
#if defined(TARGET_PPC64)
504
#define PPC_LDF_OP_64(name, op)                                               \
505
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
506
{                                                                             \
507
    FT0 = glue(op, MEMSUFFIX)((uint64_t)T0);                                  \
508
    RETURN();                                                                 \
509
}
510
#endif
511

    
512
static always_inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
513
{
514
    return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
515
}
516

    
517
PPC_LDF_OP(fd, ldfq);
518
PPC_LDF_OP(fs, ldfs);
519
#if defined(TARGET_PPC64)
520
PPC_LDF_OP_64(fd, ldfq);
521
PPC_LDF_OP_64(fs, ldfs);
522
#endif
523

    
524
static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
525
{
526
    union {
527
        double d;
528
        uint64_t u;
529
    } u;
530

    
531
    u.d = glue(ldfq, MEMSUFFIX)(EA);
532
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
533
        ((u.u & 0x00FF000000000000ULL) >> 40) |
534
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
535
        ((u.u & 0x000000FF00000000ULL) >> 8) |
536
        ((u.u & 0x00000000FF000000ULL) << 8) |
537
        ((u.u & 0x0000000000FF0000ULL) << 24) |
538
        ((u.u & 0x000000000000FF00ULL) << 40) |
539
        ((u.u & 0x00000000000000FFULL) << 56);
540

    
541
    return u.d;
542
}
543

    
544
static always_inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
545
{
546
    union {
547
        float f;
548
        uint32_t u;
549
    } u;
550

    
551
    u.f = glue(ldfl, MEMSUFFIX)(EA);
552
    u.u = ((u.u & 0xFF000000UL) >> 24) |
553
        ((u.u & 0x00FF0000ULL) >> 8) |
554
        ((u.u & 0x0000FF00UL) << 8) |
555
        ((u.u & 0x000000FFULL) << 24);
556

    
557
    return float32_to_float64(u.f, &env->fp_status);
558
}
559

    
560
PPC_LDF_OP(fd_le, ldfqr);
561
PPC_LDF_OP(fs_le, ldfsr);
562
#if defined(TARGET_PPC64)
563
PPC_LDF_OP_64(fd_le, ldfqr);
564
PPC_LDF_OP_64(fs_le, ldfsr);
565
#endif
566

    
567
/* Load and set reservation */
568
void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
569
{
570
    if (unlikely(T0 & 0x03)) {
571
        do_raise_exception(POWERPC_EXCP_ALIGN);
572
    } else {
573
        T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
574
        env->reserve = (uint32_t)T0;
575
    }
576
    RETURN();
577
}
578

    
579
#if defined(TARGET_PPC64)
580
void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
581
{
582
    if (unlikely(T0 & 0x03)) {
583
        do_raise_exception(POWERPC_EXCP_ALIGN);
584
    } else {
585
        T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
586
        env->reserve = (uint64_t)T0;
587
    }
588
    RETURN();
589
}
590

    
591
void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
592
{
593
    if (unlikely(T0 & 0x03)) {
594
        do_raise_exception(POWERPC_EXCP_ALIGN);
595
    } else {
596
        T1 = glue(ldq, MEMSUFFIX)((uint32_t)T0);
597
        env->reserve = (uint32_t)T0;
598
    }
599
    RETURN();
600
}
601

    
602
void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
603
{
604
    if (unlikely(T0 & 0x03)) {
605
        do_raise_exception(POWERPC_EXCP_ALIGN);
606
    } else {
607
        T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
608
        env->reserve = (uint64_t)T0;
609
    }
610
    RETURN();
611
}
612
#endif
613

    
614
void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
615
{
616
    if (unlikely(T0 & 0x03)) {
617
        do_raise_exception(POWERPC_EXCP_ALIGN);
618
    } else {
619
        T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
620
        env->reserve = (uint32_t)T0;
621
    }
622
    RETURN();
623
}
624

    
625
#if defined(TARGET_PPC64)
626
void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
627
{
628
    if (unlikely(T0 & 0x03)) {
629
        do_raise_exception(POWERPC_EXCP_ALIGN);
630
    } else {
631
        T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
632
        env->reserve = (uint64_t)T0;
633
    }
634
    RETURN();
635
}
636

    
637
void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
638
{
639
    if (unlikely(T0 & 0x03)) {
640
        do_raise_exception(POWERPC_EXCP_ALIGN);
641
    } else {
642
        T1 = glue(ld64r, MEMSUFFIX)((uint32_t)T0);
643
        env->reserve = (uint32_t)T0;
644
    }
645
    RETURN();
646
}
647

    
648
void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
649
{
650
    if (unlikely(T0 & 0x03)) {
651
        do_raise_exception(POWERPC_EXCP_ALIGN);
652
    } else {
653
        T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
654
        env->reserve = (uint64_t)T0;
655
    }
656
    RETURN();
657
}
658
#endif
659

    
660
/* Store with reservation */
661
void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
662
{
663
    if (unlikely(T0 & 0x03)) {
664
        do_raise_exception(POWERPC_EXCP_ALIGN);
665
    } else {
666
        if (unlikely(env->reserve != (uint32_t)T0)) {
667
            env->crf[0] = xer_so;
668
        } else {
669
            glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
670
            env->crf[0] = xer_so | 0x02;
671
        }
672
    }
673
    env->reserve = -1;
674
    RETURN();
675
}
676

    
677
#if defined(TARGET_PPC64)
678
void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
679
{
680
    if (unlikely(T0 & 0x03)) {
681
        do_raise_exception(POWERPC_EXCP_ALIGN);
682
    } else {
683
        if (unlikely(env->reserve != (uint64_t)T0)) {
684
            env->crf[0] = xer_so;
685
        } else {
686
            glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
687
            env->crf[0] = xer_so | 0x02;
688
        }
689
    }
690
    env->reserve = -1;
691
    RETURN();
692
}
693

    
694
void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
695
{
696
    if (unlikely(T0 & 0x03)) {
697
        do_raise_exception(POWERPC_EXCP_ALIGN);
698
    } else {
699
        if (unlikely(env->reserve != (uint32_t)T0)) {
700
            env->crf[0] = xer_so;
701
        } else {
702
            glue(stq, MEMSUFFIX)((uint32_t)T0, T1);
703
            env->crf[0] = xer_so | 0x02;
704
        }
705
    }
706
    env->reserve = -1;
707
    RETURN();
708
}
709

    
710
void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
711
{
712
    if (unlikely(T0 & 0x03)) {
713
        do_raise_exception(POWERPC_EXCP_ALIGN);
714
    } else {
715
        if (unlikely(env->reserve != (uint64_t)T0)) {
716
            env->crf[0] = xer_so;
717
        } else {
718
            glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
719
            env->crf[0] = xer_so | 0x02;
720
        }
721
    }
722
    env->reserve = -1;
723
    RETURN();
724
}
725
#endif
726

    
727
void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
728
{
729
    if (unlikely(T0 & 0x03)) {
730
        do_raise_exception(POWERPC_EXCP_ALIGN);
731
    } else {
732
        if (unlikely(env->reserve != (uint32_t)T0)) {
733
            env->crf[0] = xer_so;
734
        } else {
735
            glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
736
            env->crf[0] = xer_so | 0x02;
737
        }
738
    }
739
    env->reserve = -1;
740
    RETURN();
741
}
742

    
743
#if defined(TARGET_PPC64)
744
void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
745
{
746
    if (unlikely(T0 & 0x03)) {
747
        do_raise_exception(POWERPC_EXCP_ALIGN);
748
    } else {
749
        if (unlikely(env->reserve != (uint64_t)T0)) {
750
            env->crf[0] = xer_so;
751
        } else {
752
            glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
753
            env->crf[0] = xer_so | 0x02;
754
        }
755
    }
756
    env->reserve = -1;
757
    RETURN();
758
}
759

    
760
void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
761
{
762
    if (unlikely(T0 & 0x03)) {
763
        do_raise_exception(POWERPC_EXCP_ALIGN);
764
    } else {
765
        if (unlikely(env->reserve != (uint32_t)T0)) {
766
            env->crf[0] = xer_so;
767
        } else {
768
            glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
769
            env->crf[0] = xer_so | 0x02;
770
        }
771
    }
772
    env->reserve = -1;
773
    RETURN();
774
}
775

    
776
void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
777
{
778
    if (unlikely(T0 & 0x03)) {
779
        do_raise_exception(POWERPC_EXCP_ALIGN);
780
    } else {
781
        if (unlikely(env->reserve != (uint64_t)T0)) {
782
            env->crf[0] = xer_so;
783
        } else {
784
            glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
785
            env->crf[0] = xer_so | 0x02;
786
        }
787
    }
788
    env->reserve = -1;
789
    RETURN();
790
}
791
#endif
792

    
793
void OPPROTO glue(op_dcbz_l32, MEMSUFFIX) (void)
794
{
795
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
796
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
797
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
798
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
799
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
800
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
801
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
802
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
803
    RETURN();
804
}
805

    
806
void OPPROTO glue(op_dcbz_l64, MEMSUFFIX) (void)
807
{
808
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
809
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
810
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
811
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
812
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
813
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
814
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
815
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
816
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
817
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
818
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
819
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
820
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
821
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
822
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
823
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
824
    RETURN();
825
}
826

    
827
void OPPROTO glue(op_dcbz_l128, MEMSUFFIX) (void)
828
{
829
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
830
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
831
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
832
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
833
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
834
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
835
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
836
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
837
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
838
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
839
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
840
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
841
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
842
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
843
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
844
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
845
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0);
846
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0);
847
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0);
848
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0);
849
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0);
850
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0);
851
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0);
852
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0);
853
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0);
854
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0);
855
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0);
856
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0);
857
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0);
858
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0);
859
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0);
860
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0);
861
    RETURN();
862
}
863

    
864
void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
865
{
866
    glue(do_dcbz, MEMSUFFIX)();
867
    RETURN();
868
}
869

    
870
#if defined(TARGET_PPC64)
871
void OPPROTO glue(op_dcbz_l32_64, MEMSUFFIX) (void)
872
{
873
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
874
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
875
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
876
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
877
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
878
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
879
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
880
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
881
    RETURN();
882
}
883

    
884
void OPPROTO glue(op_dcbz_l64_64, MEMSUFFIX) (void)
885
{
886
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
887
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
888
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
889
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
890
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
891
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
892
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
893
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
894
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
895
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
896
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
897
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
898
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
899
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
900
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
901
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
902
    RETURN();
903
}
904

    
905
void OPPROTO glue(op_dcbz_l128_64, MEMSUFFIX) (void)
906
{
907
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
908
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
909
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
910
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
911
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
912
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
913
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
914
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
915
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
916
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
917
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
918
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
919
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
920
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
921
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
922
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
923
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0);
924
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0);
925
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0);
926
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0);
927
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0);
928
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0);
929
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0);
930
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0);
931
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0);
932
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0);
933
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0);
934
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0);
935
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0);
936
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0);
937
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0);
938
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0);
939
    RETURN();
940
}
941

    
942
void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
943
{
944
    glue(do_dcbz_64, MEMSUFFIX)();
945
    RETURN();
946
}
947
#endif
948

    
949
/* Instruction cache block invalidate */
950
void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
951
{
952
    glue(do_icbi, MEMSUFFIX)();
953
    RETURN();
954
}
955

    
956
#if defined(TARGET_PPC64)
957
void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
958
{
959
    glue(do_icbi_64, MEMSUFFIX)();
960
    RETURN();
961
}
962
#endif
963

    
964
/* External access */
965
void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
966
{
967
    T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
968
    RETURN();
969
}
970

    
971
#if defined(TARGET_PPC64)
972
void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
973
{
974
    T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
975
    RETURN();
976
}
977
#endif
978

    
979
void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
980
{
981
    glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
982
    RETURN();
983
}
984

    
985
#if defined(TARGET_PPC64)
986
void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
987
{
988
    glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
989
    RETURN();
990
}
991
#endif
992

    
993
void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
994
{
995
    T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
996
    RETURN();
997
}
998

    
999
#if defined(TARGET_PPC64)
1000
void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
1001
{
1002
    T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
1003
    RETURN();
1004
}
1005
#endif
1006

    
1007
void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
1008
{
1009
    glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
1010
    RETURN();
1011
}
1012

    
1013
#if defined(TARGET_PPC64)
1014
void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
1015
{
1016
    glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
1017
    RETURN();
1018
}
1019
#endif
1020

    
1021
/* XXX: those micro-ops need tests ! */
1022
/* PowerPC 601 specific instructions (POWER bridge) */
1023
void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
1024
{
1025
    /* When byte count is 0, do nothing */
1026
    if (likely(T1 != 0)) {
1027
        glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
1028
    }
1029
    RETURN();
1030
}
1031

    
1032
/* POWER2 quad load and store */
1033
/* XXX: TAGs are not managed */
1034
void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
1035
{
1036
    glue(do_POWER2_lfq, MEMSUFFIX)();
1037
    RETURN();
1038
}
1039

    
1040
void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
1041
{
1042
    glue(do_POWER2_lfq_le, MEMSUFFIX)();
1043
    RETURN();
1044
}
1045

    
1046
void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
1047
{
1048
    glue(do_POWER2_stfq, MEMSUFFIX)();
1049
    RETURN();
1050
}
1051

    
1052
void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
1053
{
1054
    glue(do_POWER2_stfq_le, MEMSUFFIX)();
1055
    RETURN();
1056
}
1057

    
1058
#if defined(TARGET_PPCEMB)
1059
/* SPE extension */
1060
#define _PPC_SPE_LD_OP(name, op)                                              \
1061
void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void)                     \
1062
{                                                                             \
1063
    T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0);                                \
1064
    RETURN();                                                                 \
1065
}
1066

    
1067
#if defined(TARGET_PPC64)
1068
#define _PPC_SPE_LD_OP_64(name, op)                                           \
1069
void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void)          \
1070
{                                                                             \
1071
    T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0);                                \
1072
    RETURN();                                                                 \
1073
}
1074
#define PPC_SPE_LD_OP(name, op)                                               \
1075
_PPC_SPE_LD_OP(name, op);                                                     \
1076
_PPC_SPE_LD_OP_64(name, op)
1077
#else
1078
#define PPC_SPE_LD_OP(name, op)                                               \
1079
_PPC_SPE_LD_OP(name, op)
1080
#endif
1081

    
1082

    
1083
#define _PPC_SPE_ST_OP(name, op)                                              \
1084
void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void)                    \
1085
{                                                                             \
1086
    glue(op, MEMSUFFIX)((uint32_t)T0, T1_64);                                 \
1087
    RETURN();                                                                 \
1088
}
1089

    
1090
#if defined(TARGET_PPC64)
1091
#define _PPC_SPE_ST_OP_64(name, op)                                           \
1092
void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void)         \
1093
{                                                                             \
1094
    glue(op, MEMSUFFIX)((uint64_t)T0, T1_64);                                 \
1095
    RETURN();                                                                 \
1096
}
1097
#define PPC_SPE_ST_OP(name, op)                                               \
1098
_PPC_SPE_ST_OP(name, op);                                                     \
1099
_PPC_SPE_ST_OP_64(name, op)
1100
#else
1101
#define PPC_SPE_ST_OP(name, op)                                               \
1102
_PPC_SPE_ST_OP(name, op)
1103
#endif
1104

    
1105
#if !defined(TARGET_PPC64)
1106
PPC_SPE_LD_OP(dd, ldq);
1107
PPC_SPE_ST_OP(dd, stq);
1108
PPC_SPE_LD_OP(dd_le, ld64r);
1109
PPC_SPE_ST_OP(dd_le, st64r);
1110
#endif
1111
static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1112
{
1113
    uint64_t ret;
1114
    ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
1115
    ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
1116
    return ret;
1117
}
1118
PPC_SPE_LD_OP(dw, spe_ldw);
1119
static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA,
1120
                                                     uint64_t data)
1121
{
1122
    glue(stl, MEMSUFFIX)(EA, data >> 32);
1123
    glue(stl, MEMSUFFIX)(EA + 4, data);
1124
}
1125
PPC_SPE_ST_OP(dw, spe_stdw);
1126
static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1127
{
1128
    uint64_t ret;
1129
    ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
1130
    ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
1131
    return ret;
1132
}
1133
PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1134
static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1135
                                                        uint64_t data)
1136
{
1137
    glue(st32r, MEMSUFFIX)(EA, data >> 32);
1138
    glue(st32r, MEMSUFFIX)(EA + 4, data);
1139
}
1140
PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1141
static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1142
{
1143
    uint64_t ret;
1144
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1145
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
1146
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
1147
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
1148
    return ret;
1149
}
1150
PPC_SPE_LD_OP(dh, spe_ldh);
1151
static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA,
1152
                                                     uint64_t data)
1153
{
1154
    glue(stw, MEMSUFFIX)(EA, data >> 48);
1155
    glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
1156
    glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
1157
    glue(stw, MEMSUFFIX)(EA + 6, data);
1158
}
1159
PPC_SPE_ST_OP(dh, spe_stdh);
1160
static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1161
{
1162
    uint64_t ret;
1163
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1164
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
1165
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
1166
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
1167
    return ret;
1168
}
1169
PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1170
static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1171
                                                        uint64_t data)
1172
{
1173
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1174
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1175
    glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1176
    glue(st16r, MEMSUFFIX)(EA + 6, data);
1177
}
1178
PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1179
static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1180
{
1181
    uint64_t ret;
1182
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1183
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
1184
    return ret;
1185
}
1186
PPC_SPE_LD_OP(whe, spe_lwhe);
1187
static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA,
1188
                                                      uint64_t data)
1189
{
1190
    glue(stw, MEMSUFFIX)(EA, data >> 48);
1191
    glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1192
}
1193
PPC_SPE_ST_OP(whe, spe_stwhe);
1194
static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1195
{
1196
    uint64_t ret;
1197
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1198
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
1199
    return ret;
1200
}
1201
PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1202
static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1203
                                                         uint64_t data)
1204
{
1205
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1206
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1207
}
1208
PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1209
static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1210
{
1211
    uint64_t ret;
1212
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
1213
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
1214
    return ret;
1215
}
1216
PPC_SPE_LD_OP(whou, spe_lwhou);
1217
static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1218
{
1219
    uint64_t ret;
1220
    ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1221
    ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1222
    return ret;
1223
}
1224
PPC_SPE_LD_OP(whos, spe_lwhos);
1225
static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA,
1226
                                                      uint64_t data)
1227
{
1228
    glue(stw, MEMSUFFIX)(EA, data >> 32);
1229
    glue(stw, MEMSUFFIX)(EA + 2, data);
1230
}
1231
PPC_SPE_ST_OP(who, spe_stwho);
1232
static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1233
{
1234
    uint64_t ret;
1235
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1236
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1237
    return ret;
1238
}
1239
PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1240
static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1241
{
1242
    uint64_t ret;
1243
    ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1244
    ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1245
    return ret;
1246
}
1247
PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1248
static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1249
                                                         uint64_t data)
1250
{
1251
    glue(st16r, MEMSUFFIX)(EA, data >> 32);
1252
    glue(st16r, MEMSUFFIX)(EA + 2, data);
1253
}
1254
PPC_SPE_ST_OP(who_le, spe_stwho_le);
1255
#if !defined(TARGET_PPC64)
1256
static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA,
1257
                                                      uint64_t data)
1258
{
1259
    glue(stl, MEMSUFFIX)(EA, data);
1260
}
1261
PPC_SPE_ST_OP(wwo, spe_stwwo);
1262
static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1263
                                                         uint64_t data)
1264
{
1265
    glue(st32r, MEMSUFFIX)(EA, data);
1266
}
1267
PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1268
#endif
1269
static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1270
{
1271
    uint16_t tmp;
1272
    tmp = glue(lduw, MEMSUFFIX)(EA);
1273
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1274
}
1275
PPC_SPE_LD_OP(h, spe_lh);
1276
static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1277
{
1278
    uint16_t tmp;
1279
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1280
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1281
}
1282
PPC_SPE_LD_OP(h_le, spe_lh_le);
1283
static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1284
{
1285
    uint32_t tmp;
1286
    tmp = glue(ldl, MEMSUFFIX)(EA);
1287
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1288
}
1289
PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1290
static always_inline
1291
uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1292
{
1293
    uint32_t tmp;
1294
    tmp = glue(ld32r, MEMSUFFIX)(EA);
1295
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1296
}
1297
PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1298
static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1299
{
1300
    uint64_t ret;
1301
    uint16_t tmp;
1302
    tmp = glue(lduw, MEMSUFFIX)(EA);
1303
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1304
    tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1305
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1306
    return ret;
1307
}
1308
PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1309
static always_inline
1310
uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1311
{
1312
    uint64_t ret;
1313
    uint16_t tmp;
1314
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1315
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1316
    tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1317
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1318
    return ret;
1319
}
1320
PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1321
#endif /* defined(TARGET_PPCEMB) */
1322

    
1323
#undef MEMSUFFIX