Statistics
| Branch: | Revision:

root / target-ppc / op_mem.h @ d7e4b87e

History | View | Annotate | Download (34.7 kB)

1
/*
2
 *  PowerPC emulation micro-operations for qemu.
3
 *
4
 *  Copyright (c) 2003-2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20

    
21
static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
22
{
23
    uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24
    return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
25
}
26

    
27
static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
28
{
29
    int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30
    return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
31
}
32

    
33
static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
34
{
35
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36
    return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
38
}
39

    
40
#if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
41
static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
42
{
43
    uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44
    return ((tmp & 0xFF00000000000000ULL) >> 56) |
45
        ((tmp & 0x00FF000000000000ULL) >> 40) |
46
        ((tmp & 0x0000FF0000000000ULL) >> 24) |
47
        ((tmp & 0x000000FF00000000ULL) >> 8) |
48
        ((tmp & 0x00000000FF000000ULL) << 8) |
49
        ((tmp & 0x0000000000FF0000ULL) << 24) |
50
        ((tmp & 0x000000000000FF00ULL) << 40) |
51
        ((tmp & 0x00000000000000FFULL) << 54);
52
}
53
#endif
54

    
55
#if defined(TARGET_PPC64)
56
static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
57
{
58
    return (int32_t)glue(ldl, MEMSUFFIX)(EA);
59
}
60

    
61
static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
62
{
63
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64
    return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
65
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
66
}
67
#endif
68

    
69
static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data)
70
{
71
    uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
72
    glue(stw, MEMSUFFIX)(EA, tmp);
73
}
74

    
75
static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data)
76
{
77
    uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
78
        ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
79
    glue(stl, MEMSUFFIX)(EA, tmp);
80
}
81

    
82
#if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
83
static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data)
84
{
85
    uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
86
        ((data & 0x00FF000000000000ULL) >> 40) |
87
        ((data & 0x0000FF0000000000ULL) >> 24) |
88
        ((data & 0x000000FF00000000ULL) >> 8) |
89
        ((data & 0x00000000FF000000ULL) << 8) |
90
        ((data & 0x0000000000FF0000ULL) << 24) |
91
        ((data & 0x000000000000FF00ULL) << 40) |
92
        ((data & 0x00000000000000FFULL) << 56);
93
    glue(stq, MEMSUFFIX)(EA, tmp);
94
}
95
#endif
96

    
97
/***                             Integer load                              ***/
98
#define PPC_LD_OP(name, op)                                                   \
99
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
100
{                                                                             \
101
    T1 = glue(op, MEMSUFFIX)((uint32_t)T0);                                   \
102
    RETURN();                                                                 \
103
}
104

    
105
#if defined(TARGET_PPC64)
106
#define PPC_LD_OP_64(name, op)                                                \
107
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
108
{                                                                             \
109
    T1 = glue(op, MEMSUFFIX)((uint64_t)T0);                                   \
110
    RETURN();                                                                 \
111
}
112
#endif
113

    
114
#define PPC_ST_OP(name, op)                                                   \
115
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
116
{                                                                             \
117
    glue(op, MEMSUFFIX)((uint32_t)T0, T1);                                    \
118
    RETURN();                                                                 \
119
}
120

    
121
#if defined(TARGET_PPC64)
122
#define PPC_ST_OP_64(name, op)                                                \
123
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
124
{                                                                             \
125
    glue(op, MEMSUFFIX)((uint64_t)T0, T1);                                    \
126
    RETURN();                                                                 \
127
}
128
#endif
129

    
130
PPC_LD_OP(bz, ldub);
131
PPC_LD_OP(ha, ldsw);
132
PPC_LD_OP(hz, lduw);
133
PPC_LD_OP(wz, ldl);
134
#if defined(TARGET_PPC64)
135
PPC_LD_OP(d, ldq);
136
PPC_LD_OP(wa, ldsl);
137
PPC_LD_OP_64(d, ldq);
138
PPC_LD_OP_64(wa, ldsl);
139
PPC_LD_OP_64(bz, ldub);
140
PPC_LD_OP_64(ha, ldsw);
141
PPC_LD_OP_64(hz, lduw);
142
PPC_LD_OP_64(wz, ldl);
143
#endif
144

    
145
PPC_LD_OP(ha_le, ld16rs);
146
PPC_LD_OP(hz_le, ld16r);
147
PPC_LD_OP(wz_le, ld32r);
148
#if defined(TARGET_PPC64)
149
PPC_LD_OP(d_le, ld64r);
150
PPC_LD_OP(wa_le, ld32rs);
151
PPC_LD_OP_64(d_le, ld64r);
152
PPC_LD_OP_64(wa_le, ld32rs);
153
PPC_LD_OP_64(ha_le, ld16rs);
154
PPC_LD_OP_64(hz_le, ld16r);
155
PPC_LD_OP_64(wz_le, ld32r);
156
#endif
157

    
158
/***                              Integer store                            ***/
159
PPC_ST_OP(b, stb);
160
PPC_ST_OP(h, stw);
161
PPC_ST_OP(w, stl);
162
#if defined(TARGET_PPC64)
163
PPC_ST_OP(d, stq);
164
PPC_ST_OP_64(d, stq);
165
PPC_ST_OP_64(b, stb);
166
PPC_ST_OP_64(h, stw);
167
PPC_ST_OP_64(w, stl);
168
#endif
169

    
170
PPC_ST_OP(h_le, st16r);
171
PPC_ST_OP(w_le, st32r);
172
#if defined(TARGET_PPC64)
173
PPC_ST_OP(d_le, st64r);
174
PPC_ST_OP_64(d_le, st64r);
175
PPC_ST_OP_64(h_le, st16r);
176
PPC_ST_OP_64(w_le, st32r);
177
#endif
178

    
179
/***                Integer load and store with byte reverse               ***/
180
PPC_LD_OP(hbr, ld16r);
181
PPC_LD_OP(wbr, ld32r);
182
PPC_ST_OP(hbr, st16r);
183
PPC_ST_OP(wbr, st32r);
184
#if defined(TARGET_PPC64)
185
PPC_LD_OP_64(hbr, ld16r);
186
PPC_LD_OP_64(wbr, ld32r);
187
PPC_ST_OP_64(hbr, st16r);
188
PPC_ST_OP_64(wbr, st32r);
189
#endif
190

    
191
PPC_LD_OP(hbr_le, lduw);
192
PPC_LD_OP(wbr_le, ldl);
193
PPC_ST_OP(hbr_le, stw);
194
PPC_ST_OP(wbr_le, stl);
195
#if defined(TARGET_PPC64)
196
PPC_LD_OP_64(hbr_le, lduw);
197
PPC_LD_OP_64(wbr_le, ldl);
198
PPC_ST_OP_64(hbr_le, stw);
199
PPC_ST_OP_64(wbr_le, stl);
200
#endif
201

    
202
/***                    Integer load and store multiple                    ***/
203
void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
204
{
205
    glue(do_lmw, MEMSUFFIX)(PARAM1);
206
    RETURN();
207
}
208

    
209
#if defined(TARGET_PPC64)
210
void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
211
{
212
    glue(do_lmw_64, MEMSUFFIX)(PARAM1);
213
    RETURN();
214
}
215
#endif
216

    
217
void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
218
{
219
    glue(do_lmw_le, MEMSUFFIX)(PARAM1);
220
    RETURN();
221
}
222

    
223
#if defined(TARGET_PPC64)
224
void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
225
{
226
    glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
227
    RETURN();
228
}
229
#endif
230

    
231
void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
232
{
233
    glue(do_stmw, MEMSUFFIX)(PARAM1);
234
    RETURN();
235
}
236

    
237
#if defined(TARGET_PPC64)
238
void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
239
{
240
    glue(do_stmw_64, MEMSUFFIX)(PARAM1);
241
    RETURN();
242
}
243
#endif
244

    
245
void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
246
{
247
    glue(do_stmw_le, MEMSUFFIX)(PARAM1);
248
    RETURN();
249
}
250

    
251
#if defined(TARGET_PPC64)
252
void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
253
{
254
    glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
255
    RETURN();
256
}
257
#endif
258

    
259
/***                    Integer load and store strings                     ***/
260
void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
261
{
262
    glue(do_lsw, MEMSUFFIX)(PARAM1);
263
    RETURN();
264
}
265

    
266
#if defined(TARGET_PPC64)
267
void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
268
{
269
    glue(do_lsw_64, MEMSUFFIX)(PARAM1);
270
    RETURN();
271
}
272
#endif
273

    
274
void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
275
{
276
    glue(do_lsw_le, MEMSUFFIX)(PARAM1);
277
    RETURN();
278
}
279

    
280
#if defined(TARGET_PPC64)
281
void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
282
{
283
    glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
284
    RETURN();
285
}
286
#endif
287

    
288
/* PPC32 specification says we must generate an exception if
289
 * rA is in the range of registers to be loaded.
290
 * In an other hand, IBM says this is valid, but rA won't be loaded.
291
 * For now, I'll follow the spec...
292
 */
293
void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
294
{
295
    /* Note: T1 comes from xer_bc then no cast is needed */
296
    if (likely(T1 != 0)) {
297
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
298
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
299
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
300
                                   POWERPC_EXCP_INVAL |
301
                                   POWERPC_EXCP_INVAL_LSWX);
302
        } else {
303
            glue(do_lsw, MEMSUFFIX)(PARAM1);
304
        }
305
    }
306
    RETURN();
307
}
308

    
309
#if defined(TARGET_PPC64)
310
void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
311
{
312
    /* Note: T1 comes from xer_bc then no cast is needed */
313
    if (likely(T1 != 0)) {
314
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
315
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
316
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
317
                                   POWERPC_EXCP_INVAL |
318
                                   POWERPC_EXCP_INVAL_LSWX);
319
        } else {
320
            glue(do_lsw_64, MEMSUFFIX)(PARAM1);
321
        }
322
    }
323
    RETURN();
324
}
325
#endif
326

    
327
void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
328
{
329
    /* Note: T1 comes from xer_bc then no cast is needed */
330
    if (likely(T1 != 0)) {
331
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
332
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
333
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
334
                                   POWERPC_EXCP_INVAL |
335
                                   POWERPC_EXCP_INVAL_LSWX);
336
        } else {
337
            glue(do_lsw_le, MEMSUFFIX)(PARAM1);
338
        }
339
    }
340
    RETURN();
341
}
342

    
343
#if defined(TARGET_PPC64)
344
void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
345
{
346
    /* Note: T1 comes from xer_bc then no cast is needed */
347
    if (likely(T1 != 0)) {
348
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
349
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
350
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
351
                                   POWERPC_EXCP_INVAL |
352
                                   POWERPC_EXCP_INVAL_LSWX);
353
        } else {
354
            glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
355
        }
356
    }
357
    RETURN();
358
}
359
#endif
360

    
361
void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
362
{
363
    glue(do_stsw, MEMSUFFIX)(PARAM1);
364
    RETURN();
365
}
366

    
367
#if defined(TARGET_PPC64)
368
void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
369
{
370
    glue(do_stsw_64, MEMSUFFIX)(PARAM1);
371
    RETURN();
372
}
373
#endif
374

    
375
void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
376
{
377
    glue(do_stsw_le, MEMSUFFIX)(PARAM1);
378
    RETURN();
379
}
380

    
381
#if defined(TARGET_PPC64)
382
void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
383
{
384
    glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
385
    RETURN();
386
}
387
#endif
388

    
389
/***                         Floating-point store                          ***/
390
#define PPC_STF_OP(name, op)                                                  \
391
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
392
{                                                                             \
393
    glue(op, MEMSUFFIX)((uint32_t)T0, FT0);                                   \
394
    RETURN();                                                                 \
395
}
396

    
397
#if defined(TARGET_PPC64)
398
#define PPC_STF_OP_64(name, op)                                               \
399
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
400
{                                                                             \
401
    glue(op, MEMSUFFIX)((uint64_t)T0, FT0);                                   \
402
    RETURN();                                                                 \
403
}
404
#endif
405

    
406
static inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
407
{
408
    glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
409
}
410

    
411
static inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d)
412
{
413
    union {
414
        double d;
415
        uint64_t u;
416
    } u;
417

    
418
    /* Store the low order 32 bits without any conversion */
419
    u.d = d;
420
    glue(stl, MEMSUFFIX)(EA, u.u);
421
}
422

    
423
PPC_STF_OP(fd, stfq);
424
PPC_STF_OP(fs, stfs);
425
PPC_STF_OP(fiwx, stfiwx);
426
#if defined(TARGET_PPC64)
427
PPC_STF_OP_64(fd, stfq);
428
PPC_STF_OP_64(fs, stfs);
429
PPC_STF_OP_64(fiwx, stfiwx);
430
#endif
431

    
432
static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
433
{
434
    union {
435
        double d;
436
        uint64_t u;
437
    } u;
438

    
439
    u.d = d;
440
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
441
        ((u.u & 0x00FF000000000000ULL) >> 40) |
442
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
443
        ((u.u & 0x000000FF00000000ULL) >> 8) |
444
        ((u.u & 0x00000000FF000000ULL) << 8) |
445
        ((u.u & 0x0000000000FF0000ULL) << 24) |
446
        ((u.u & 0x000000000000FF00ULL) << 40) |
447
        ((u.u & 0x00000000000000FFULL) << 56);
448
    glue(stfq, MEMSUFFIX)(EA, u.d);
449
}
450

    
451
static inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
452
{
453
    union {
454
        float f;
455
        uint32_t u;
456
    } u;
457

    
458
    u.f = float64_to_float32(d, &env->fp_status);
459
    u.u = ((u.u & 0xFF000000UL) >> 24) |
460
        ((u.u & 0x00FF0000ULL) >> 8) |
461
        ((u.u & 0x0000FF00UL) << 8) |
462
        ((u.u & 0x000000FFULL) << 24);
463
    glue(stfl, MEMSUFFIX)(EA, u.f);
464
}
465

    
466
static inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d)
467
{
468
    union {
469
        double d;
470
        uint64_t u;
471
    } u;
472

    
473
    /* Store the low order 32 bits without any conversion */
474
    u.d = d;
475
    u.u = ((u.u & 0xFF000000UL) >> 24) |
476
        ((u.u & 0x00FF0000ULL) >> 8) |
477
        ((u.u & 0x0000FF00UL) << 8) |
478
        ((u.u & 0x000000FFULL) << 24);
479
    glue(stl, MEMSUFFIX)(EA, u.u);
480
}
481

    
482

    
483
PPC_STF_OP(fd_le, stfqr);
484
PPC_STF_OP(fs_le, stfsr);
485
PPC_STF_OP(fiwx_le, stfiwxr);
486
#if defined(TARGET_PPC64)
487
PPC_STF_OP_64(fd_le, stfqr);
488
PPC_STF_OP_64(fs_le, stfsr);
489
PPC_STF_OP_64(fiwx_le, stfiwxr);
490
#endif
491

    
492
/***                         Floating-point load                           ***/
493
#define PPC_LDF_OP(name, op)                                                  \
494
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
495
{                                                                             \
496
    FT0 = glue(op, MEMSUFFIX)((uint32_t)T0);                                  \
497
    RETURN();                                                                 \
498
}
499

    
500
#if defined(TARGET_PPC64)
501
#define PPC_LDF_OP_64(name, op)                                               \
502
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
503
{                                                                             \
504
    FT0 = glue(op, MEMSUFFIX)((uint64_t)T0);                                  \
505
    RETURN();                                                                 \
506
}
507
#endif
508

    
509
static inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
510
{
511
    return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
512
}
513

    
514
PPC_LDF_OP(fd, ldfq);
515
PPC_LDF_OP(fs, ldfs);
516
#if defined(TARGET_PPC64)
517
PPC_LDF_OP_64(fd, ldfq);
518
PPC_LDF_OP_64(fs, ldfs);
519
#endif
520

    
521
static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
522
{
523
    union {
524
        double d;
525
        uint64_t u;
526
    } u;
527

    
528
    u.d = glue(ldfq, MEMSUFFIX)(EA);
529
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
530
        ((u.u & 0x00FF000000000000ULL) >> 40) |
531
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
532
        ((u.u & 0x000000FF00000000ULL) >> 8) |
533
        ((u.u & 0x00000000FF000000ULL) << 8) |
534
        ((u.u & 0x0000000000FF0000ULL) << 24) |
535
        ((u.u & 0x000000000000FF00ULL) << 40) |
536
        ((u.u & 0x00000000000000FFULL) << 56);
537

    
538
    return u.d;
539
}
540

    
541
static inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
542
{
543
    union {
544
        float f;
545
        uint32_t u;
546
    } u;
547

    
548
    u.f = glue(ldfl, MEMSUFFIX)(EA);
549
    u.u = ((u.u & 0xFF000000UL) >> 24) |
550
        ((u.u & 0x00FF0000ULL) >> 8) |
551
        ((u.u & 0x0000FF00UL) << 8) |
552
        ((u.u & 0x000000FFULL) << 24);
553

    
554
    return float32_to_float64(u.f, &env->fp_status);
555
}
556

    
557
PPC_LDF_OP(fd_le, ldfqr);
558
PPC_LDF_OP(fs_le, ldfsr);
559
#if defined(TARGET_PPC64)
560
PPC_LDF_OP_64(fd_le, ldfqr);
561
PPC_LDF_OP_64(fs_le, ldfsr);
562
#endif
563

    
564
/* Load and set reservation */
565
void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
566
{
567
    if (unlikely(T0 & 0x03)) {
568
        do_raise_exception(POWERPC_EXCP_ALIGN);
569
    } else {
570
        T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
571
        env->reserve = (uint32_t)T0;
572
    }
573
    RETURN();
574
}
575

    
576
#if defined(TARGET_PPC64)
577
void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
578
{
579
    if (unlikely(T0 & 0x03)) {
580
        do_raise_exception(POWERPC_EXCP_ALIGN);
581
    } else {
582
        T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
583
        env->reserve = (uint64_t)T0;
584
    }
585
    RETURN();
586
}
587

    
588
void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
589
{
590
    if (unlikely(T0 & 0x03)) {
591
        do_raise_exception(POWERPC_EXCP_ALIGN);
592
    } else {
593
        T1 = glue(ldq, MEMSUFFIX)((uint32_t)T0);
594
        env->reserve = (uint32_t)T0;
595
    }
596
    RETURN();
597
}
598

    
599
void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
600
{
601
    if (unlikely(T0 & 0x03)) {
602
        do_raise_exception(POWERPC_EXCP_ALIGN);
603
    } else {
604
        T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
605
        env->reserve = (uint64_t)T0;
606
    }
607
    RETURN();
608
}
609
#endif
610

    
611
void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
612
{
613
    if (unlikely(T0 & 0x03)) {
614
        do_raise_exception(POWERPC_EXCP_ALIGN);
615
    } else {
616
        T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
617
        env->reserve = (uint32_t)T0;
618
    }
619
    RETURN();
620
}
621

    
622
#if defined(TARGET_PPC64)
623
void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
624
{
625
    if (unlikely(T0 & 0x03)) {
626
        do_raise_exception(POWERPC_EXCP_ALIGN);
627
    } else {
628
        T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
629
        env->reserve = (uint64_t)T0;
630
    }
631
    RETURN();
632
}
633

    
634
void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
635
{
636
    if (unlikely(T0 & 0x03)) {
637
        do_raise_exception(POWERPC_EXCP_ALIGN);
638
    } else {
639
        T1 = glue(ld64r, MEMSUFFIX)((uint32_t)T0);
640
        env->reserve = (uint32_t)T0;
641
    }
642
    RETURN();
643
}
644

    
645
void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
646
{
647
    if (unlikely(T0 & 0x03)) {
648
        do_raise_exception(POWERPC_EXCP_ALIGN);
649
    } else {
650
        T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
651
        env->reserve = (uint64_t)T0;
652
    }
653
    RETURN();
654
}
655
#endif
656

    
657
/* Store with reservation */
658
void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
659
{
660
    if (unlikely(T0 & 0x03)) {
661
        do_raise_exception(POWERPC_EXCP_ALIGN);
662
    } else {
663
        if (unlikely(env->reserve != (uint32_t)T0)) {
664
            env->crf[0] = xer_so;
665
        } else {
666
            glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
667
            env->crf[0] = xer_so | 0x02;
668
        }
669
    }
670
    env->reserve = -1;
671
    RETURN();
672
}
673

    
674
#if defined(TARGET_PPC64)
675
void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
676
{
677
    if (unlikely(T0 & 0x03)) {
678
        do_raise_exception(POWERPC_EXCP_ALIGN);
679
    } else {
680
        if (unlikely(env->reserve != (uint64_t)T0)) {
681
            env->crf[0] = xer_so;
682
        } else {
683
            glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
684
            env->crf[0] = xer_so | 0x02;
685
        }
686
    }
687
    env->reserve = -1;
688
    RETURN();
689
}
690

    
691
void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
692
{
693
    if (unlikely(T0 & 0x03)) {
694
        do_raise_exception(POWERPC_EXCP_ALIGN);
695
    } else {
696
        if (unlikely(env->reserve != (uint32_t)T0)) {
697
            env->crf[0] = xer_so;
698
        } else {
699
            glue(stq, MEMSUFFIX)((uint32_t)T0, T1);
700
            env->crf[0] = xer_so | 0x02;
701
        }
702
    }
703
    env->reserve = -1;
704
    RETURN();
705
}
706

    
707
void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
708
{
709
    if (unlikely(T0 & 0x03)) {
710
        do_raise_exception(POWERPC_EXCP_ALIGN);
711
    } else {
712
        if (unlikely(env->reserve != (uint64_t)T0)) {
713
            env->crf[0] = xer_so;
714
        } else {
715
            glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
716
            env->crf[0] = xer_so | 0x02;
717
        }
718
    }
719
    env->reserve = -1;
720
    RETURN();
721
}
722
#endif
723

    
724
void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
725
{
726
    if (unlikely(T0 & 0x03)) {
727
        do_raise_exception(POWERPC_EXCP_ALIGN);
728
    } else {
729
        if (unlikely(env->reserve != (uint32_t)T0)) {
730
            env->crf[0] = xer_so;
731
        } else {
732
            glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
733
            env->crf[0] = xer_so | 0x02;
734
        }
735
    }
736
    env->reserve = -1;
737
    RETURN();
738
}
739

    
740
#if defined(TARGET_PPC64)
741
void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
742
{
743
    if (unlikely(T0 & 0x03)) {
744
        do_raise_exception(POWERPC_EXCP_ALIGN);
745
    } else {
746
        if (unlikely(env->reserve != (uint64_t)T0)) {
747
            env->crf[0] = xer_so;
748
        } else {
749
            glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
750
            env->crf[0] = xer_so | 0x02;
751
        }
752
    }
753
    env->reserve = -1;
754
    RETURN();
755
}
756

    
757
void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
758
{
759
    if (unlikely(T0 & 0x03)) {
760
        do_raise_exception(POWERPC_EXCP_ALIGN);
761
    } else {
762
        if (unlikely(env->reserve != (uint32_t)T0)) {
763
            env->crf[0] = xer_so;
764
        } else {
765
            glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
766
            env->crf[0] = xer_so | 0x02;
767
        }
768
    }
769
    env->reserve = -1;
770
    RETURN();
771
}
772

    
773
void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
774
{
775
    if (unlikely(T0 & 0x03)) {
776
        do_raise_exception(POWERPC_EXCP_ALIGN);
777
    } else {
778
        if (unlikely(env->reserve != (uint64_t)T0)) {
779
            env->crf[0] = xer_so;
780
        } else {
781
            glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
782
            env->crf[0] = xer_so | 0x02;
783
        }
784
    }
785
    env->reserve = -1;
786
    RETURN();
787
}
788
#endif
789

    
790
void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
791
{
792
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
793
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
794
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
795
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
796
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
797
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
798
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
799
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
800
#if DCACHE_LINE_SIZE == 64
801
    /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
802
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
803
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
804
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
805
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
806
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
807
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
808
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
809
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
810
#endif
811
    RETURN();
812
}
813

    
814
#if defined(TARGET_PPC64)
815
void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
816
{
817
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
818
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
819
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
820
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
821
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
822
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
823
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
824
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
825
#if DCACHE_LINE_SIZE == 64
826
    /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
827
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
828
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
829
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
830
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
831
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
832
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
833
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
834
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
835
#endif
836
    RETURN();
837
}
838
#endif
839

    
840
/* Instruction cache block invalidate */
841
void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
842
{
843
    glue(do_icbi, MEMSUFFIX)();
844
    RETURN();
845
}
846

    
847
#if defined(TARGET_PPC64)
848
void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
849
{
850
    glue(do_icbi_64, MEMSUFFIX)();
851
    RETURN();
852
}
853
#endif
854

    
855
/* External access */
856
void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
857
{
858
    T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
859
    RETURN();
860
}
861

    
862
#if defined(TARGET_PPC64)
863
void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
864
{
865
    T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
866
    RETURN();
867
}
868
#endif
869

    
870
void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
871
{
872
    glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
873
    RETURN();
874
}
875

    
876
#if defined(TARGET_PPC64)
877
void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
878
{
879
    glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
880
    RETURN();
881
}
882
#endif
883

    
884
void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
885
{
886
    T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
887
    RETURN();
888
}
889

    
890
#if defined(TARGET_PPC64)
891
void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
892
{
893
    T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
894
    RETURN();
895
}
896
#endif
897

    
898
void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
899
{
900
    glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
901
    RETURN();
902
}
903

    
904
#if defined(TARGET_PPC64)
905
void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
906
{
907
    glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
908
    RETURN();
909
}
910
#endif
911

    
912
/* XXX: those micro-ops need tests ! */
913
/* PowerPC 601 specific instructions (POWER bridge) */
914
void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
915
{
916
    /* When byte count is 0, do nothing */
917
    if (likely(T1 != 0)) {
918
        glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
919
    }
920
    RETURN();
921
}
922

    
923
/* POWER2 quad load and store */
924
/* XXX: TAGs are not managed */
925
void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
926
{
927
    glue(do_POWER2_lfq, MEMSUFFIX)();
928
    RETURN();
929
}
930

    
931
void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
932
{
933
    glue(do_POWER2_lfq_le, MEMSUFFIX)();
934
    RETURN();
935
}
936

    
937
void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
938
{
939
    glue(do_POWER2_stfq, MEMSUFFIX)();
940
    RETURN();
941
}
942

    
943
void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
944
{
945
    glue(do_POWER2_stfq_le, MEMSUFFIX)();
946
    RETURN();
947
}
948

    
949
#if defined(TARGET_PPCEMB)
950
/* SPE extension */
951
#define _PPC_SPE_LD_OP(name, op)                                              \
952
void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void)                     \
953
{                                                                             \
954
    T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0);                                \
955
    RETURN();                                                                 \
956
}
957

    
958
#if defined(TARGET_PPC64)
959
#define _PPC_SPE_LD_OP_64(name, op)                                           \
960
void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void)          \
961
{                                                                             \
962
    T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0);                                \
963
    RETURN();                                                                 \
964
}
965
#define PPC_SPE_LD_OP(name, op)                                               \
966
_PPC_SPE_LD_OP(name, op);                                                     \
967
_PPC_SPE_LD_OP_64(name, op)
968
#else
969
#define PPC_SPE_LD_OP(name, op)                                               \
970
_PPC_SPE_LD_OP(name, op)
971
#endif
972

    
973

    
974
#define _PPC_SPE_ST_OP(name, op)                                              \
975
void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void)                    \
976
{                                                                             \
977
    glue(op, MEMSUFFIX)((uint32_t)T0, T1_64);                                 \
978
    RETURN();                                                                 \
979
}
980

    
981
#if defined(TARGET_PPC64)
982
#define _PPC_SPE_ST_OP_64(name, op)                                           \
983
void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void)         \
984
{                                                                             \
985
    glue(op, MEMSUFFIX)((uint64_t)T0, T1_64);                                 \
986
    RETURN();                                                                 \
987
}
988
#define PPC_SPE_ST_OP(name, op)                                               \
989
_PPC_SPE_ST_OP(name, op);                                                     \
990
_PPC_SPE_ST_OP_64(name, op)
991
#else
992
#define PPC_SPE_ST_OP(name, op)                                               \
993
_PPC_SPE_ST_OP(name, op)
994
#endif
995

    
996
#if !defined(TARGET_PPC64)
997
PPC_SPE_LD_OP(dd, ldq);
998
PPC_SPE_ST_OP(dd, stq);
999
PPC_SPE_LD_OP(dd_le, ld64r);
1000
PPC_SPE_ST_OP(dd_le, st64r);
1001
#endif
1002
static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1003
{
1004
    uint64_t ret;
1005
    ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
1006
    ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
1007
    return ret;
1008
}
1009
PPC_SPE_LD_OP(dw, spe_ldw);
1010
static inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, uint64_t data)
1011
{
1012
    glue(stl, MEMSUFFIX)(EA, data >> 32);
1013
    glue(stl, MEMSUFFIX)(EA + 4, data);
1014
}
1015
PPC_SPE_ST_OP(dw, spe_stdw);
1016
static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1017
{
1018
    uint64_t ret;
1019
    ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
1020
    ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
1021
    return ret;
1022
}
1023
PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1024
static inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1025
                                                 uint64_t data)
1026
{
1027
    glue(st32r, MEMSUFFIX)(EA, data >> 32);
1028
    glue(st32r, MEMSUFFIX)(EA + 4, data);
1029
}
1030
PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1031
static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1032
{
1033
    uint64_t ret;
1034
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1035
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
1036
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
1037
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
1038
    return ret;
1039
}
1040
PPC_SPE_LD_OP(dh, spe_ldh);
1041
static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data)
1042
{
1043
    glue(stw, MEMSUFFIX)(EA, data >> 48);
1044
    glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
1045
    glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
1046
    glue(stw, MEMSUFFIX)(EA + 6, data);
1047
}
1048
PPC_SPE_ST_OP(dh, spe_stdh);
1049
static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1050
{
1051
    uint64_t ret;
1052
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1053
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
1054
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
1055
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
1056
    return ret;
1057
}
1058
PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1059
static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1060
                                                 uint64_t data)
1061
{
1062
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1063
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1064
    glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1065
    glue(st16r, MEMSUFFIX)(EA + 6, data);
1066
}
1067
PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1068
static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1069
{
1070
    uint64_t ret;
1071
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1072
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
1073
    return ret;
1074
}
1075
PPC_SPE_LD_OP(whe, spe_lwhe);
1076
static inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, uint64_t data)
1077
{
1078
    glue(stw, MEMSUFFIX)(EA, data >> 48);
1079
    glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1080
}
1081
PPC_SPE_ST_OP(whe, spe_stwhe);
1082
static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1083
{
1084
    uint64_t ret;
1085
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1086
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
1087
    return ret;
1088
}
1089
PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1090
static inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1091
                                                  uint64_t data)
1092
{
1093
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1094
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1095
}
1096
PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1097
static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1098
{
1099
    uint64_t ret;
1100
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
1101
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
1102
    return ret;
1103
}
1104
PPC_SPE_LD_OP(whou, spe_lwhou);
1105
static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1106
{
1107
    uint64_t ret;
1108
    ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1109
    ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1110
    return ret;
1111
}
1112
PPC_SPE_LD_OP(whos, spe_lwhos);
1113
static inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, uint64_t data)
1114
{
1115
    glue(stw, MEMSUFFIX)(EA, data >> 32);
1116
    glue(stw, MEMSUFFIX)(EA + 2, data);
1117
}
1118
PPC_SPE_ST_OP(who, spe_stwho);
1119
static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1120
{
1121
    uint64_t ret;
1122
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1123
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1124
    return ret;
1125
}
1126
PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1127
static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1128
{
1129
    uint64_t ret;
1130
    ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1131
    ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1132
    return ret;
1133
}
1134
PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1135
static inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1136
                                                  uint64_t data)
1137
{
1138
    glue(st16r, MEMSUFFIX)(EA, data >> 32);
1139
    glue(st16r, MEMSUFFIX)(EA + 2, data);
1140
}
1141
PPC_SPE_ST_OP(who_le, spe_stwho_le);
1142
#if !defined(TARGET_PPC64)
1143
static inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, uint64_t data)
1144
{
1145
    glue(stl, MEMSUFFIX)(EA, data);
1146
}
1147
PPC_SPE_ST_OP(wwo, spe_stwwo);
1148
static inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1149
                                                  uint64_t data)
1150
{
1151
    glue(st32r, MEMSUFFIX)(EA, data);
1152
}
1153
PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1154
#endif
1155
static inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1156
{
1157
    uint16_t tmp;
1158
    tmp = glue(lduw, MEMSUFFIX)(EA);
1159
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1160
}
1161
PPC_SPE_LD_OP(h, spe_lh);
1162
static inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1163
{
1164
    uint16_t tmp;
1165
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1166
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1167
}
1168
PPC_SPE_LD_OP(h_le, spe_lh_le);
1169
static inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1170
{
1171
    uint32_t tmp;
1172
    tmp = glue(ldl, MEMSUFFIX)(EA);
1173
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1174
}
1175
PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1176
static inline uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1177
{
1178
    uint32_t tmp;
1179
    tmp = glue(ld32r, MEMSUFFIX)(EA);
1180
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1181
}
1182
PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1183
static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1184
{
1185
    uint64_t ret;
1186
    uint16_t tmp;
1187
    tmp = glue(lduw, MEMSUFFIX)(EA);
1188
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1189
    tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1190
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1191
    return ret;
1192
}
1193
PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1194
static inline uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1195
{
1196
    uint64_t ret;
1197
    uint16_t tmp;
1198
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1199
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1200
    tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1201
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1202
    return ret;
1203
}
1204
PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1205
#endif /* defined(TARGET_PPCEMB) */
1206

    
1207
#undef MEMSUFFIX