Statistics
| Branch: | Revision:

root / target-ppc / op_mem.h @ 426613db

History | View | Annotate | Download (33.2 kB)

1
/*
2
 *  PowerPC emulation micro-operations for qemu.
3
 * 
4
 *  Copyright (c) 2003-2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20

    
21
static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
22
{
23
    uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24
    return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
25
}
26

    
27
static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
28
{
29
    int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30
    return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
31
}
32

    
33
static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
34
{
35
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36
    return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
38
}
39

    
40
#if defined(TARGET_PPC64) || defined(TARGET_PPCSPE)
41
static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
42
{
43
    uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44
    return ((tmp & 0xFF00000000000000ULL) >> 56) |
45
        ((tmp & 0x00FF000000000000ULL) >> 40) |
46
        ((tmp & 0x0000FF0000000000ULL) >> 24) |
47
        ((tmp & 0x000000FF00000000ULL) >> 8) |
48
        ((tmp & 0x00000000FF000000ULL) << 8) |
49
        ((tmp & 0x0000000000FF0000ULL) << 24) |
50
        ((tmp & 0x000000000000FF00ULL) << 40) |
51
        ((tmp & 0x00000000000000FFULL) << 54);
52
}
53
#endif
54

    
55
#if defined(TARGET_PPC64)
56
static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
57
{
58
    return (int32_t)glue(ldl, MEMSUFFIX)(EA);
59
}
60

    
61
static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
62
{
63
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64
    return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
65
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
66
}
67
#endif
68

    
69
static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data)
70
{
71
    uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
72
    glue(stw, MEMSUFFIX)(EA, tmp);
73
}
74

    
75
static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data)
76
{
77
    uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
78
        ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
79
    glue(stl, MEMSUFFIX)(EA, tmp);
80
}
81

    
82
#if defined(TARGET_PPC64) || defined(TARGET_PPCSPE)
83
static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data)
84
{
85
    uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
86
        ((data & 0x00FF000000000000ULL) >> 40) |
87
        ((data & 0x0000FF0000000000ULL) >> 24) |
88
        ((data & 0x000000FF00000000ULL) >> 8) |
89
        ((data & 0x00000000FF000000ULL) << 8) |
90
        ((data & 0x0000000000FF0000ULL) << 24) |
91
        ((data & 0x000000000000FF00ULL) << 40) |
92
        ((data & 0x00000000000000FFULL) << 56);
93
    glue(stq, MEMSUFFIX)(EA, tmp);
94
}
95
#endif
96

    
97
/***                             Integer load                              ***/
98
#define PPC_LD_OP(name, op)                                                   \
99
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
100
{                                                                             \
101
    T1 = glue(op, MEMSUFFIX)((uint32_t)T0);                                   \
102
    RETURN();                                                                 \
103
}
104

    
105
#if defined(TARGET_PPC64)
106
#define PPC_LD_OP_64(name, op)                                                \
107
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
108
{                                                                             \
109
    T1 = glue(op, MEMSUFFIX)((uint64_t)T0);                                   \
110
    RETURN();                                                                 \
111
}
112
#endif
113

    
114
#define PPC_ST_OP(name, op)                                                   \
115
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
116
{                                                                             \
117
    glue(op, MEMSUFFIX)((uint32_t)T0, T1);                                    \
118
    RETURN();                                                                 \
119
}
120

    
121
#if defined(TARGET_PPC64)
122
#define PPC_ST_OP_64(name, op)                                                \
123
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
124
{                                                                             \
125
    glue(op, MEMSUFFIX)((uint64_t)T0, T1);                                    \
126
    RETURN();                                                                 \
127
}
128
#endif
129

    
130
PPC_LD_OP(bz, ldub);
131
PPC_LD_OP(ha, ldsw);
132
PPC_LD_OP(hz, lduw);
133
PPC_LD_OP(wz, ldl);
134
#if defined(TARGET_PPC64)
135
PPC_LD_OP(d, ldq);
136
PPC_LD_OP(wa, ldsl);
137
PPC_LD_OP_64(d, ldq);
138
PPC_LD_OP_64(wa, ldsl);
139
PPC_LD_OP_64(bz, ldub);
140
PPC_LD_OP_64(ha, ldsw);
141
PPC_LD_OP_64(hz, lduw);
142
PPC_LD_OP_64(wz, ldl);
143
#endif
144

    
145
PPC_LD_OP(ha_le, ld16rs);
146
PPC_LD_OP(hz_le, ld16r);
147
PPC_LD_OP(wz_le, ld32r);
148
#if defined(TARGET_PPC64)
149
PPC_LD_OP(d_le, ld64r);
150
PPC_LD_OP(wa_le, ld32rs);
151
PPC_LD_OP_64(d_le, ld64r);
152
PPC_LD_OP_64(wa_le, ld32rs);
153
PPC_LD_OP_64(ha_le, ld16rs);
154
PPC_LD_OP_64(hz_le, ld16r);
155
PPC_LD_OP_64(wz_le, ld32r);
156
#endif
157

    
158
/***                              Integer store                            ***/
159
PPC_ST_OP(b, stb);
160
PPC_ST_OP(h, stw);
161
PPC_ST_OP(w, stl);
162
#if defined(TARGET_PPC64)
163
PPC_ST_OP(d, stq);
164
PPC_ST_OP_64(d, stq);
165
PPC_ST_OP_64(b, stb);
166
PPC_ST_OP_64(h, stw);
167
PPC_ST_OP_64(w, stl);
168
#endif
169

    
170
PPC_ST_OP(h_le, st16r);
171
PPC_ST_OP(w_le, st32r);
172
#if defined(TARGET_PPC64)
173
PPC_ST_OP(d_le, st64r);
174
PPC_ST_OP_64(d_le, st64r);
175
PPC_ST_OP_64(h_le, st16r);
176
PPC_ST_OP_64(w_le, st32r);
177
#endif
178

    
179
/***                Integer load and store with byte reverse               ***/
180
PPC_LD_OP(hbr, ld16r);
181
PPC_LD_OP(wbr, ld32r);
182
PPC_ST_OP(hbr, st16r);
183
PPC_ST_OP(wbr, st32r);
184
#if defined(TARGET_PPC64)
185
PPC_LD_OP_64(hbr, ld16r);
186
PPC_LD_OP_64(wbr, ld32r);
187
PPC_ST_OP_64(hbr, st16r);
188
PPC_ST_OP_64(wbr, st32r);
189
#endif
190

    
191
PPC_LD_OP(hbr_le, lduw);
192
PPC_LD_OP(wbr_le, ldl);
193
PPC_ST_OP(hbr_le, stw);
194
PPC_ST_OP(wbr_le, stl);
195
#if defined(TARGET_PPC64)
196
PPC_LD_OP_64(hbr_le, lduw);
197
PPC_LD_OP_64(wbr_le, ldl);
198
PPC_ST_OP_64(hbr_le, stw);
199
PPC_ST_OP_64(wbr_le, stl);
200
#endif
201

    
202
/***                    Integer load and store multiple                    ***/
203
void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
204
{
205
    glue(do_lmw, MEMSUFFIX)(PARAM1);
206
    RETURN();
207
}
208

    
209
#if defined(TARGET_PPC64)
210
void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
211
{
212
    glue(do_lmw_64, MEMSUFFIX)(PARAM1);
213
    RETURN();
214
}
215
#endif
216

    
217
void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
218
{
219
    glue(do_lmw_le, MEMSUFFIX)(PARAM1);
220
    RETURN();
221
}
222

    
223
#if defined(TARGET_PPC64)
224
void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
225
{
226
    glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
227
    RETURN();
228
}
229
#endif
230

    
231
void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
232
{
233
    glue(do_stmw, MEMSUFFIX)(PARAM1);
234
    RETURN();
235
}
236

    
237
#if defined(TARGET_PPC64)
238
void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
239
{
240
    glue(do_stmw_64, MEMSUFFIX)(PARAM1);
241
    RETURN();
242
}
243
#endif
244

    
245
void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
246
{
247
    glue(do_stmw_le, MEMSUFFIX)(PARAM1);
248
    RETURN();
249
}
250

    
251
#if defined(TARGET_PPC64)
252
void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
253
{
254
    glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
255
    RETURN();
256
}
257
#endif
258

    
259
/***                    Integer load and store strings                     ***/
260
void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
261
{
262
    glue(do_lsw, MEMSUFFIX)(PARAM1);
263
    RETURN();
264
}
265

    
266
#if defined(TARGET_PPC64)
267
void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
268
{
269
    glue(do_lsw_64, MEMSUFFIX)(PARAM1);
270
    RETURN();
271
}
272
#endif
273

    
274
void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
275
{
276
    glue(do_lsw_le, MEMSUFFIX)(PARAM1);
277
    RETURN();
278
}
279

    
280
#if defined(TARGET_PPC64)
281
void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
282
{
283
    glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
284
    RETURN();
285
}
286
#endif
287

    
288
/* PPC32 specification says we must generate an exception if
289
 * rA is in the range of registers to be loaded.
290
 * In an other hand, IBM says this is valid, but rA won't be loaded.
291
 * For now, I'll follow the spec...
292
 */
293
void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
294
{
295
    /* Note: T1 comes from xer_bc then no cast is needed */
296
    if (likely(T1 != 0)) {
297
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
298
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
299
            do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
300
        } else {
301
            glue(do_lsw, MEMSUFFIX)(PARAM1);
302
        }
303
    }
304
    RETURN();
305
}
306

    
307
#if defined(TARGET_PPC64)
308
void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
309
{
310
    /* Note: T1 comes from xer_bc then no cast is needed */
311
    if (likely(T1 != 0)) {
312
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
313
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
314
            do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
315
        } else {
316
            glue(do_lsw_64, MEMSUFFIX)(PARAM1);
317
        }
318
    }
319
    RETURN();
320
}
321
#endif
322

    
323
void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
324
{
325
    /* Note: T1 comes from xer_bc then no cast is needed */
326
    if (likely(T1 != 0)) {
327
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
328
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
329
            do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
330
        } else {
331
            glue(do_lsw_le, MEMSUFFIX)(PARAM1);
332
        }
333
    }
334
    RETURN();
335
}
336

    
337
#if defined(TARGET_PPC64)
338
void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
339
{
340
    /* Note: T1 comes from xer_bc then no cast is needed */
341
    if (likely(T1 != 0)) {
342
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
343
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
344
            do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
345
        } else {
346
            glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
347
        }
348
    }
349
    RETURN();
350
}
351
#endif
352

    
353
void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
354
{
355
    glue(do_stsw, MEMSUFFIX)(PARAM1);
356
    RETURN();
357
}
358

    
359
#if defined(TARGET_PPC64)
360
void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
361
{
362
    glue(do_stsw_64, MEMSUFFIX)(PARAM1);
363
    RETURN();
364
}
365
#endif
366

    
367
void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
368
{
369
    glue(do_stsw_le, MEMSUFFIX)(PARAM1);
370
    RETURN();
371
}
372

    
373
#if defined(TARGET_PPC64)
374
void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
375
{
376
    glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
377
    RETURN();
378
}
379
#endif
380

    
381
/***                         Floating-point store                          ***/
382
#define PPC_STF_OP(name, op)                                                  \
383
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
384
{                                                                             \
385
    glue(op, MEMSUFFIX)((uint32_t)T0, FT0);                                   \
386
    RETURN();                                                                 \
387
}
388

    
389
#if defined(TARGET_PPC64)
390
#define PPC_STF_OP_64(name, op)                                               \
391
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
392
{                                                                             \
393
    glue(op, MEMSUFFIX)((uint64_t)T0, FT0);                                   \
394
    RETURN();                                                                 \
395
}
396
#endif
397

    
398
PPC_STF_OP(fd, stfq);
399
PPC_STF_OP(fs, stfl);
400
#if defined(TARGET_PPC64)
401
PPC_STF_OP_64(fd, stfq);
402
PPC_STF_OP_64(fs, stfl);
403
#endif
404

    
405
static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
406
{
407
    union {
408
        double d;
409
        uint64_t u;
410
    } u;
411

    
412
    u.d = d;
413
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
414
        ((u.u & 0x00FF000000000000ULL) >> 40) |
415
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
416
        ((u.u & 0x000000FF00000000ULL) >> 8) |
417
        ((u.u & 0x00000000FF000000ULL) << 8) |
418
        ((u.u & 0x0000000000FF0000ULL) << 24) |
419
        ((u.u & 0x000000000000FF00ULL) << 40) |
420
        ((u.u & 0x00000000000000FFULL) << 56);
421
    glue(stfq, MEMSUFFIX)(EA, u.d);
422
}
423

    
424
static inline void glue(stflr, MEMSUFFIX) (target_ulong EA, float f)
425
{
426
    union {
427
        float f;
428
        uint32_t u;
429
    } u;
430

    
431
    u.f = f;
432
    u.u = ((u.u & 0xFF000000UL) >> 24) |
433
        ((u.u & 0x00FF0000ULL) >> 8) |
434
        ((u.u & 0x0000FF00UL) << 8) |
435
        ((u.u & 0x000000FFULL) << 24);
436
    glue(stfl, MEMSUFFIX)(EA, u.f);
437
}
438

    
439
PPC_STF_OP(fd_le, stfqr);
440
PPC_STF_OP(fs_le, stflr);
441
#if defined(TARGET_PPC64)
442
PPC_STF_OP_64(fd_le, stfqr);
443
PPC_STF_OP_64(fs_le, stflr);
444
#endif
445

    
446
/***                         Floating-point load                           ***/
447
#define PPC_LDF_OP(name, op)                                                  \
448
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
449
{                                                                             \
450
    FT0 = glue(op, MEMSUFFIX)((uint32_t)T0);                                  \
451
    RETURN();                                                                 \
452
}
453

    
454
#if defined(TARGET_PPC64)
455
#define PPC_LDF_OP_64(name, op)                                               \
456
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
457
{                                                                             \
458
    FT0 = glue(op, MEMSUFFIX)((uint64_t)T0);                                  \
459
    RETURN();                                                                 \
460
}
461
#endif
462

    
463
PPC_LDF_OP(fd, ldfq);
464
PPC_LDF_OP(fs, ldfl);
465
#if defined(TARGET_PPC64)
466
PPC_LDF_OP_64(fd, ldfq);
467
PPC_LDF_OP_64(fs, ldfl);
468
#endif
469

    
470
static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
471
{
472
    union {
473
        double d;
474
        uint64_t u;
475
    } u;
476

    
477
    u.d = glue(ldfq, MEMSUFFIX)(EA);
478
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
479
        ((u.u & 0x00FF000000000000ULL) >> 40) |
480
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
481
        ((u.u & 0x000000FF00000000ULL) >> 8) |
482
        ((u.u & 0x00000000FF000000ULL) << 8) |
483
        ((u.u & 0x0000000000FF0000ULL) << 24) |
484
        ((u.u & 0x000000000000FF00ULL) << 40) |
485
        ((u.u & 0x00000000000000FFULL) << 56);
486

    
487
    return u.d;
488
}
489

    
490
static inline float glue(ldflr, MEMSUFFIX) (target_ulong EA)
491
{
492
    union {
493
        float f;
494
        uint32_t u;
495
    } u;
496

    
497
    u.f = glue(ldfl, MEMSUFFIX)(EA);
498
    u.u = ((u.u & 0xFF000000UL) >> 24) |
499
        ((u.u & 0x00FF0000ULL) >> 8) |
500
        ((u.u & 0x0000FF00UL) << 8) |
501
        ((u.u & 0x000000FFULL) << 24);
502

    
503
    return u.f;
504
}
505

    
506
PPC_LDF_OP(fd_le, ldfqr);
507
PPC_LDF_OP(fs_le, ldflr);
508
#if defined(TARGET_PPC64)
509
PPC_LDF_OP_64(fd_le, ldfqr);
510
PPC_LDF_OP_64(fs_le, ldflr);
511
#endif
512

    
513
/* Load and set reservation */
514
void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
515
{
516
    if (unlikely(T0 & 0x03)) {
517
        do_raise_exception(EXCP_ALIGN);
518
    } else {
519
        T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
520
        regs->reserve = (uint32_t)T0;
521
    }
522
    RETURN();
523
}
524

    
525
#if defined(TARGET_PPC64)
526
void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
527
{
528
    if (unlikely(T0 & 0x03)) {
529
        do_raise_exception(EXCP_ALIGN);
530
    } else {
531
        T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
532
        regs->reserve = (uint64_t)T0;
533
    }
534
    RETURN();
535
}
536

    
537
void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
538
{
539
    if (unlikely(T0 & 0x03)) {
540
        do_raise_exception(EXCP_ALIGN);
541
    } else {
542
        T1 = glue(ldq, MEMSUFFIX)((uint32_t)T0);
543
        regs->reserve = (uint32_t)T0;
544
    }
545
    RETURN();
546
}
547

    
548
void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
549
{
550
    if (unlikely(T0 & 0x03)) {
551
        do_raise_exception(EXCP_ALIGN);
552
    } else {
553
        T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
554
        regs->reserve = (uint64_t)T0;
555
    }
556
    RETURN();
557
}
558
#endif
559

    
560
void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
561
{
562
    if (unlikely(T0 & 0x03)) {
563
        do_raise_exception(EXCP_ALIGN);
564
    } else {
565
        T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
566
        regs->reserve = (uint32_t)T0;
567
    }
568
    RETURN();
569
}
570

    
571
#if defined(TARGET_PPC64)
572
void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
573
{
574
    if (unlikely(T0 & 0x03)) {
575
        do_raise_exception(EXCP_ALIGN);
576
    } else {
577
        T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
578
        regs->reserve = (uint64_t)T0;
579
    }
580
    RETURN();
581
}
582

    
583
void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
584
{
585
    if (unlikely(T0 & 0x03)) {
586
        do_raise_exception(EXCP_ALIGN);
587
    } else {
588
        T1 = glue(ld64r, MEMSUFFIX)((uint32_t)T0);
589
        regs->reserve = (uint32_t)T0;
590
    }
591
    RETURN();
592
}
593

    
594
void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
595
{
596
    if (unlikely(T0 & 0x03)) {
597
        do_raise_exception(EXCP_ALIGN);
598
    } else {
599
        T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
600
        regs->reserve = (uint64_t)T0;
601
    }
602
    RETURN();
603
}
604
#endif
605

    
606
/* Store with reservation */
607
void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
608
{
609
    if (unlikely(T0 & 0x03)) {
610
        do_raise_exception(EXCP_ALIGN);
611
    } else {
612
        if (unlikely(regs->reserve != (uint32_t)T0)) {
613
            env->crf[0] = xer_ov;
614
        } else {
615
            glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
616
            env->crf[0] = xer_ov | 0x02;
617
        }
618
    }
619
    regs->reserve = -1;
620
    RETURN();
621
}
622

    
623
#if defined(TARGET_PPC64)
624
void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
625
{
626
    if (unlikely(T0 & 0x03)) {
627
        do_raise_exception(EXCP_ALIGN);
628
    } else {
629
        if (unlikely(regs->reserve != (uint64_t)T0)) {
630
            env->crf[0] = xer_ov;
631
        } else {
632
            glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
633
            env->crf[0] = xer_ov | 0x02;
634
        }
635
    }
636
    regs->reserve = -1;
637
    RETURN();
638
}
639

    
640
void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
641
{
642
    if (unlikely(T0 & 0x03)) {
643
        do_raise_exception(EXCP_ALIGN);
644
    } else {
645
        if (unlikely(regs->reserve != (uint32_t)T0)) {
646
            env->crf[0] = xer_ov;
647
        } else {
648
            glue(stq, MEMSUFFIX)((uint32_t)T0, T1);
649
            env->crf[0] = xer_ov | 0x02;
650
        }
651
    }
652
    regs->reserve = -1;
653
    RETURN();
654
}
655

    
656
void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
657
{
658
    if (unlikely(T0 & 0x03)) {
659
        do_raise_exception(EXCP_ALIGN);
660
    } else {
661
        if (unlikely(regs->reserve != (uint64_t)T0)) {
662
            env->crf[0] = xer_ov;
663
        } else {
664
            glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
665
            env->crf[0] = xer_ov | 0x02;
666
        }
667
    }
668
    regs->reserve = -1;
669
    RETURN();
670
}
671
#endif
672

    
673
void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
674
{
675
    if (unlikely(T0 & 0x03)) {
676
        do_raise_exception(EXCP_ALIGN);
677
    } else {
678
        if (unlikely(regs->reserve != (uint32_t)T0)) {
679
            env->crf[0] = xer_ov;
680
        } else {
681
            glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
682
            env->crf[0] = xer_ov | 0x02;
683
        }
684
    }
685
    regs->reserve = -1;
686
    RETURN();
687
}
688

    
689
#if defined(TARGET_PPC64)
690
void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
691
{
692
    if (unlikely(T0 & 0x03)) {
693
        do_raise_exception(EXCP_ALIGN);
694
    } else {
695
        if (unlikely(regs->reserve != (uint64_t)T0)) {
696
            env->crf[0] = xer_ov;
697
        } else {
698
            glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
699
            env->crf[0] = xer_ov | 0x02;
700
        }
701
    }
702
    regs->reserve = -1;
703
    RETURN();
704
}
705

    
706
void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
707
{
708
    if (unlikely(T0 & 0x03)) {
709
        do_raise_exception(EXCP_ALIGN);
710
    } else {
711
        if (unlikely(regs->reserve != (uint32_t)T0)) {
712
            env->crf[0] = xer_ov;
713
        } else {
714
            glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
715
            env->crf[0] = xer_ov | 0x02;
716
        }
717
    }
718
    regs->reserve = -1;
719
    RETURN();
720
}
721

    
722
void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
723
{
724
    if (unlikely(T0 & 0x03)) {
725
        do_raise_exception(EXCP_ALIGN);
726
    } else {
727
        if (unlikely(regs->reserve != (uint64_t)T0)) {
728
            env->crf[0] = xer_ov;
729
        } else {
730
            glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
731
            env->crf[0] = xer_ov | 0x02;
732
        }
733
    }
734
    regs->reserve = -1;
735
    RETURN();
736
}
737
#endif
738

    
739
void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
740
{
741
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
742
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
743
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
744
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
745
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
746
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
747
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
748
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
749
#if DCACHE_LINE_SIZE == 64
750
    /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
751
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
752
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
753
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
754
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
755
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
756
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
757
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
758
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
759
#endif
760
    RETURN();
761
}
762

    
763
#if defined(TARGET_PPC64)
764
void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
765
{
766
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
767
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
768
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
769
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
770
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
771
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
772
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
773
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
774
#if DCACHE_LINE_SIZE == 64
775
    /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
776
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
777
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
778
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
779
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
780
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
781
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
782
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
783
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
784
#endif
785
    RETURN();
786
}
787
#endif
788

    
789
/* Instruction cache block invalidate */
790
void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
791
{
792
    glue(do_icbi, MEMSUFFIX)();
793
    RETURN();
794
}
795

    
796
#if defined(TARGET_PPC64)
797
void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
798
{
799
    glue(do_icbi_64, MEMSUFFIX)();
800
    RETURN();
801
}
802
#endif
803

    
804
/* External access */
805
void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
806
{
807
    T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
808
    RETURN();
809
}
810

    
811
#if defined(TARGET_PPC64)
812
void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
813
{
814
    T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
815
    RETURN();
816
}
817
#endif
818

    
819
void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
820
{
821
    glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
822
    RETURN();
823
}
824

    
825
#if defined(TARGET_PPC64)
826
void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
827
{
828
    glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
829
    RETURN();
830
}
831
#endif
832

    
833
void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
834
{
835
    T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
836
    RETURN();
837
}
838

    
839
#if defined(TARGET_PPC64)
840
void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
841
{
842
    T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
843
    RETURN();
844
}
845
#endif
846

    
847
void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
848
{
849
    glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
850
    RETURN();
851
}
852

    
853
#if defined(TARGET_PPC64)
854
void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
855
{
856
    glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
857
    RETURN();
858
}
859
#endif
860

    
861
/* XXX: those micro-ops need tests ! */
862
/* PowerPC 601 specific instructions (POWER bridge) */
863
void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
864
{
865
    /* When byte count is 0, do nothing */
866
    if (likely(T1 != 0)) {
867
        glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
868
    }
869
    RETURN();
870
}
871

    
872
/* POWER2 quad load and store */
873
/* XXX: TAGs are not managed */
874
void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
875
{
876
    glue(do_POWER2_lfq, MEMSUFFIX)();
877
    RETURN();
878
}
879

    
880
void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
881
{
882
    glue(do_POWER2_lfq_le, MEMSUFFIX)();
883
    RETURN();
884
}
885

    
886
void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
887
{
888
    glue(do_POWER2_stfq, MEMSUFFIX)();
889
    RETURN();
890
}
891

    
892
void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
893
{
894
    glue(do_POWER2_stfq_le, MEMSUFFIX)();
895
    RETURN();
896
}
897

    
898
#if defined(TARGET_PPCSPE)
899
/* SPE extension */
900
#define _PPC_SPE_LD_OP(name, op)                                              \
901
void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void)                     \
902
{                                                                             \
903
    T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0);                                \
904
    RETURN();                                                                 \
905
}
906

    
907
#if defined(TARGET_PPC64)
908
#define _PPC_SPE_LD_OP_64(name, op)                                           \
909
void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void)          \
910
{                                                                             \
911
    T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0);                                \
912
    RETURN();                                                                 \
913
}
914
#define PPC_SPE_LD_OP(name, op)                                               \
915
_PPC_SPE_LD_OP(name, op);                                                     \
916
_PPC_SPE_LD_OP_64(name, op)
917
#else
918
#define PPC_SPE_LD_OP(name, op)                                               \
919
_PPC_SPE_LD_OP(name, op)
920
#endif
921

    
922

    
923
#define _PPC_SPE_ST_OP(name, op)                                              \
924
void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void)                    \
925
{                                                                             \
926
    glue(op, MEMSUFFIX)((uint32_t)T0, T1_64);                                 \
927
    RETURN();                                                                 \
928
}
929

    
930
#if defined(TARGET_PPC64)
931
#define _PPC_SPE_ST_OP_64(name, op)                                           \
932
void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void)         \
933
{                                                                             \
934
    glue(op, MEMSUFFIX)((uint64_t)T0, T1_64);                                 \
935
    RETURN();                                                                 \
936
}
937
#define PPC_SPE_ST_OP(name, op)                                               \
938
_PPC_SPE_ST_OP(name, op);                                                     \
939
_PPC_SPE_ST_OP_64(name, op)
940
#else
941
#define PPC_SPE_ST_OP(name, op)                                               \
942
_PPC_SPE_ST_OP(name, op)
943
#endif
944

    
945
#if !defined(TARGET_PPC64)
946
PPC_SPE_LD_OP(dd, ldq);
947
PPC_SPE_ST_OP(dd, stq);
948
PPC_SPE_LD_OP(dd_le, ld64r);
949
PPC_SPE_ST_OP(dd_le, st64r);
950
#endif
951
static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
952
{
953
    uint64_t ret;
954
    ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
955
    ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
956
    return ret;
957
}
958
PPC_SPE_LD_OP(dw, spe_ldw);
959
static inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, uint64_t data)
960
{
961
    glue(stl, MEMSUFFIX)(EA, data >> 32);
962
    glue(stl, MEMSUFFIX)(EA + 4, data);
963
}
964
PPC_SPE_ST_OP(dw, spe_stdw);
965
static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
966
{
967
    uint64_t ret;
968
    ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
969
    ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
970
    return ret;
971
}
972
PPC_SPE_LD_OP(dw_le, spe_ldw_le);
973
static inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
974
                                                 uint64_t data)
975
{
976
    glue(st32r, MEMSUFFIX)(EA, data >> 32);
977
    glue(st32r, MEMSUFFIX)(EA + 4, data);
978
}
979
PPC_SPE_ST_OP(dw_le, spe_stdw_le);
980
static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
981
{
982
    uint64_t ret;
983
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
984
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
985
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
986
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
987
    return ret;
988
}
989
PPC_SPE_LD_OP(dh, spe_ldh);
990
static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data)
991
{
992
    glue(stw, MEMSUFFIX)(EA, data >> 48);
993
    glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
994
    glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
995
    glue(stw, MEMSUFFIX)(EA + 6, data);
996
}
997
PPC_SPE_ST_OP(dh, spe_stdh);
998
static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
999
{
1000
    uint64_t ret;
1001
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1002
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
1003
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
1004
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
1005
    return ret;
1006
}
1007
PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1008
static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1009
                                                 uint64_t data)
1010
{
1011
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1012
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1013
    glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1014
    glue(st16r, MEMSUFFIX)(EA + 6, data);
1015
}
1016
PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1017
static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1018
{
1019
    uint64_t ret;
1020
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1021
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
1022
    return ret;
1023
}
1024
PPC_SPE_LD_OP(whe, spe_lwhe);
1025
static inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, uint64_t data)
1026
{
1027
    glue(stw, MEMSUFFIX)(EA, data >> 48);
1028
    glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1029
}
1030
PPC_SPE_ST_OP(whe, spe_stwhe);
1031
static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1032
{
1033
    uint64_t ret;
1034
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1035
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
1036
    return ret;
1037
}
1038
PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1039
static inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1040
                                                  uint64_t data)
1041
{
1042
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1043
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1044
}
1045
PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1046
static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1047
{
1048
    uint64_t ret;
1049
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
1050
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
1051
    return ret;
1052
}
1053
PPC_SPE_LD_OP(whou, spe_lwhou);
1054
static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1055
{
1056
    uint64_t ret;
1057
    ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1058
    ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1059
    return ret;
1060
}
1061
PPC_SPE_LD_OP(whos, spe_lwhos);
1062
static inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, uint64_t data)
1063
{
1064
    glue(stw, MEMSUFFIX)(EA, data >> 32);
1065
    glue(stw, MEMSUFFIX)(EA + 2, data);
1066
}
1067
PPC_SPE_ST_OP(who, spe_stwho);
1068
static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1069
{
1070
    uint64_t ret;
1071
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1072
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1073
    return ret;
1074
}
1075
PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1076
static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1077
{
1078
    uint64_t ret;
1079
    ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1080
    ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1081
    return ret;
1082
}
1083
PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1084
static inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1085
                                                  uint64_t data)
1086
{
1087
    glue(st16r, MEMSUFFIX)(EA, data >> 32);
1088
    glue(st16r, MEMSUFFIX)(EA + 2, data);
1089
}
1090
PPC_SPE_ST_OP(who_le, spe_stwho_le);
1091
#if !defined(TARGET_PPC64)
1092
static inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, uint64_t data)
1093
{
1094
    glue(stl, MEMSUFFIX)(EA, data);
1095
}
1096
PPC_SPE_ST_OP(wwo, spe_stwwo);
1097
static inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1098
                                                 uint64_t data)
1099
{
1100
    glue(st32r, MEMSUFFIX)(EA, data);
1101
}
1102
PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1103
#endif
1104
static inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1105
{
1106
    uint16_t tmp;
1107
    tmp = glue(lduw, MEMSUFFIX)(EA);
1108
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1109
}
1110
PPC_SPE_LD_OP(h, spe_lh);
1111
static inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1112
{
1113
    uint16_t tmp;
1114
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1115
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1116
}
1117
PPC_SPE_LD_OP(h_le, spe_lh_le);
1118
static inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1119
{
1120
    uint32_t tmp;
1121
    tmp = glue(ldl, MEMSUFFIX)(EA);
1122
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1123
}
1124
PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1125
static inline uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1126
{
1127
    uint32_t tmp;
1128
    tmp = glue(ld32r, MEMSUFFIX)(EA);
1129
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1130
}
1131
PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1132
static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1133
{
1134
    uint64_t ret;
1135
    uint16_t tmp;
1136
    tmp = glue(lduw, MEMSUFFIX)(EA);
1137
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1138
    tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1139
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1140
    return ret;
1141
}
1142
PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1143
static inline uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1144
{
1145
    uint64_t ret;
1146
    uint16_t tmp;
1147
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1148
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1149
    tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1150
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1151
    return ret;
1152
}
1153
PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1154
#endif /* defined(TARGET_PPCSPE) */
1155

    
1156
#undef MEMSUFFIX