Statistics
| Branch: | Revision:

root / target-ppc / op_mem.h @ e1833e1f

History | View | Annotate | Download (33.6 kB)

1
/*
2
 *  PowerPC emulation micro-operations for qemu.
3
 *
4
 *  Copyright (c) 2003-2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20

    
21
static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
22
{
23
    uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24
    return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
25
}
26

    
27
static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
28
{
29
    int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30
    return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
31
}
32

    
33
static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
34
{
35
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36
    return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
38
}
39

    
40
#if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
41
static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
42
{
43
    uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44
    return ((tmp & 0xFF00000000000000ULL) >> 56) |
45
        ((tmp & 0x00FF000000000000ULL) >> 40) |
46
        ((tmp & 0x0000FF0000000000ULL) >> 24) |
47
        ((tmp & 0x000000FF00000000ULL) >> 8) |
48
        ((tmp & 0x00000000FF000000ULL) << 8) |
49
        ((tmp & 0x0000000000FF0000ULL) << 24) |
50
        ((tmp & 0x000000000000FF00ULL) << 40) |
51
        ((tmp & 0x00000000000000FFULL) << 54);
52
}
53
#endif
54

    
55
#if defined(TARGET_PPC64)
56
static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
57
{
58
    return (int32_t)glue(ldl, MEMSUFFIX)(EA);
59
}
60

    
61
static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
62
{
63
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64
    return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
65
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
66
}
67
#endif
68

    
69
static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data)
70
{
71
    uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
72
    glue(stw, MEMSUFFIX)(EA, tmp);
73
}
74

    
75
static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data)
76
{
77
    uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
78
        ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
79
    glue(stl, MEMSUFFIX)(EA, tmp);
80
}
81

    
82
#if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
83
static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data)
84
{
85
    uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
86
        ((data & 0x00FF000000000000ULL) >> 40) |
87
        ((data & 0x0000FF0000000000ULL) >> 24) |
88
        ((data & 0x000000FF00000000ULL) >> 8) |
89
        ((data & 0x00000000FF000000ULL) << 8) |
90
        ((data & 0x0000000000FF0000ULL) << 24) |
91
        ((data & 0x000000000000FF00ULL) << 40) |
92
        ((data & 0x00000000000000FFULL) << 56);
93
    glue(stq, MEMSUFFIX)(EA, tmp);
94
}
95
#endif
96

    
97
/***                             Integer load                              ***/
98
#define PPC_LD_OP(name, op)                                                   \
99
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
100
{                                                                             \
101
    T1 = glue(op, MEMSUFFIX)((uint32_t)T0);                                   \
102
    RETURN();                                                                 \
103
}
104

    
105
#if defined(TARGET_PPC64)
106
#define PPC_LD_OP_64(name, op)                                                \
107
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
108
{                                                                             \
109
    T1 = glue(op, MEMSUFFIX)((uint64_t)T0);                                   \
110
    RETURN();                                                                 \
111
}
112
#endif
113

    
114
#define PPC_ST_OP(name, op)                                                   \
115
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
116
{                                                                             \
117
    glue(op, MEMSUFFIX)((uint32_t)T0, T1);                                    \
118
    RETURN();                                                                 \
119
}
120

    
121
#if defined(TARGET_PPC64)
122
#define PPC_ST_OP_64(name, op)                                                \
123
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
124
{                                                                             \
125
    glue(op, MEMSUFFIX)((uint64_t)T0, T1);                                    \
126
    RETURN();                                                                 \
127
}
128
#endif
129

    
130
PPC_LD_OP(bz, ldub);
131
PPC_LD_OP(ha, ldsw);
132
PPC_LD_OP(hz, lduw);
133
PPC_LD_OP(wz, ldl);
134
#if defined(TARGET_PPC64)
135
PPC_LD_OP(d, ldq);
136
PPC_LD_OP(wa, ldsl);
137
PPC_LD_OP_64(d, ldq);
138
PPC_LD_OP_64(wa, ldsl);
139
PPC_LD_OP_64(bz, ldub);
140
PPC_LD_OP_64(ha, ldsw);
141
PPC_LD_OP_64(hz, lduw);
142
PPC_LD_OP_64(wz, ldl);
143
#endif
144

    
145
PPC_LD_OP(ha_le, ld16rs);
146
PPC_LD_OP(hz_le, ld16r);
147
PPC_LD_OP(wz_le, ld32r);
148
#if defined(TARGET_PPC64)
149
PPC_LD_OP(d_le, ld64r);
150
PPC_LD_OP(wa_le, ld32rs);
151
PPC_LD_OP_64(d_le, ld64r);
152
PPC_LD_OP_64(wa_le, ld32rs);
153
PPC_LD_OP_64(ha_le, ld16rs);
154
PPC_LD_OP_64(hz_le, ld16r);
155
PPC_LD_OP_64(wz_le, ld32r);
156
#endif
157

    
158
/***                              Integer store                            ***/
159
PPC_ST_OP(b, stb);
160
PPC_ST_OP(h, stw);
161
PPC_ST_OP(w, stl);
162
#if defined(TARGET_PPC64)
163
PPC_ST_OP(d, stq);
164
PPC_ST_OP_64(d, stq);
165
PPC_ST_OP_64(b, stb);
166
PPC_ST_OP_64(h, stw);
167
PPC_ST_OP_64(w, stl);
168
#endif
169

    
170
PPC_ST_OP(h_le, st16r);
171
PPC_ST_OP(w_le, st32r);
172
#if defined(TARGET_PPC64)
173
PPC_ST_OP(d_le, st64r);
174
PPC_ST_OP_64(d_le, st64r);
175
PPC_ST_OP_64(h_le, st16r);
176
PPC_ST_OP_64(w_le, st32r);
177
#endif
178

    
179
/***                Integer load and store with byte reverse               ***/
180
PPC_LD_OP(hbr, ld16r);
181
PPC_LD_OP(wbr, ld32r);
182
PPC_ST_OP(hbr, st16r);
183
PPC_ST_OP(wbr, st32r);
184
#if defined(TARGET_PPC64)
185
PPC_LD_OP_64(hbr, ld16r);
186
PPC_LD_OP_64(wbr, ld32r);
187
PPC_ST_OP_64(hbr, st16r);
188
PPC_ST_OP_64(wbr, st32r);
189
#endif
190

    
191
PPC_LD_OP(hbr_le, lduw);
192
PPC_LD_OP(wbr_le, ldl);
193
PPC_ST_OP(hbr_le, stw);
194
PPC_ST_OP(wbr_le, stl);
195
#if defined(TARGET_PPC64)
196
PPC_LD_OP_64(hbr_le, lduw);
197
PPC_LD_OP_64(wbr_le, ldl);
198
PPC_ST_OP_64(hbr_le, stw);
199
PPC_ST_OP_64(wbr_le, stl);
200
#endif
201

    
202
/***                    Integer load and store multiple                    ***/
203
void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
204
{
205
    glue(do_lmw, MEMSUFFIX)(PARAM1);
206
    RETURN();
207
}
208

    
209
#if defined(TARGET_PPC64)
210
void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
211
{
212
    glue(do_lmw_64, MEMSUFFIX)(PARAM1);
213
    RETURN();
214
}
215
#endif
216

    
217
void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
218
{
219
    glue(do_lmw_le, MEMSUFFIX)(PARAM1);
220
    RETURN();
221
}
222

    
223
#if defined(TARGET_PPC64)
224
void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
225
{
226
    glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
227
    RETURN();
228
}
229
#endif
230

    
231
void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
232
{
233
    glue(do_stmw, MEMSUFFIX)(PARAM1);
234
    RETURN();
235
}
236

    
237
#if defined(TARGET_PPC64)
238
void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
239
{
240
    glue(do_stmw_64, MEMSUFFIX)(PARAM1);
241
    RETURN();
242
}
243
#endif
244

    
245
void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
246
{
247
    glue(do_stmw_le, MEMSUFFIX)(PARAM1);
248
    RETURN();
249
}
250

    
251
#if defined(TARGET_PPC64)
252
void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
253
{
254
    glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
255
    RETURN();
256
}
257
#endif
258

    
259
/***                    Integer load and store strings                     ***/
260
void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
261
{
262
    glue(do_lsw, MEMSUFFIX)(PARAM1);
263
    RETURN();
264
}
265

    
266
#if defined(TARGET_PPC64)
267
void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
268
{
269
    glue(do_lsw_64, MEMSUFFIX)(PARAM1);
270
    RETURN();
271
}
272
#endif
273

    
274
void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
275
{
276
    glue(do_lsw_le, MEMSUFFIX)(PARAM1);
277
    RETURN();
278
}
279

    
280
#if defined(TARGET_PPC64)
281
void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
282
{
283
    glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
284
    RETURN();
285
}
286
#endif
287

    
288
/* PPC32 specification says we must generate an exception if
289
 * rA is in the range of registers to be loaded.
290
 * In an other hand, IBM says this is valid, but rA won't be loaded.
291
 * For now, I'll follow the spec...
292
 */
293
void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
294
{
295
    /* Note: T1 comes from xer_bc then no cast is needed */
296
    if (likely(T1 != 0)) {
297
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
298
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
299
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
300
                                   POWERPC_EXCP_INVAL |
301
                                   POWERPC_EXCP_INVAL_LSWX);
302
        } else {
303
            glue(do_lsw, MEMSUFFIX)(PARAM1);
304
        }
305
    }
306
    RETURN();
307
}
308

    
309
#if defined(TARGET_PPC64)
310
void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
311
{
312
    /* Note: T1 comes from xer_bc then no cast is needed */
313
    if (likely(T1 != 0)) {
314
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
315
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
316
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
317
                                   POWERPC_EXCP_INVAL |
318
                                   POWERPC_EXCP_INVAL_LSWX);
319
        } else {
320
            glue(do_lsw_64, MEMSUFFIX)(PARAM1);
321
        }
322
    }
323
    RETURN();
324
}
325
#endif
326

    
327
void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
328
{
329
    /* Note: T1 comes from xer_bc then no cast is needed */
330
    if (likely(T1 != 0)) {
331
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
332
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
333
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
334
                                   POWERPC_EXCP_INVAL |
335
                                   POWERPC_EXCP_INVAL_LSWX);
336
        } else {
337
            glue(do_lsw_le, MEMSUFFIX)(PARAM1);
338
        }
339
    }
340
    RETURN();
341
}
342

    
343
#if defined(TARGET_PPC64)
344
void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
345
{
346
    /* Note: T1 comes from xer_bc then no cast is needed */
347
    if (likely(T1 != 0)) {
348
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
349
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
350
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
351
                                   POWERPC_EXCP_INVAL |
352
                                   POWERPC_EXCP_INVAL_LSWX);
353
        } else {
354
            glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
355
        }
356
    }
357
    RETURN();
358
}
359
#endif
360

    
361
void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
362
{
363
    glue(do_stsw, MEMSUFFIX)(PARAM1);
364
    RETURN();
365
}
366

    
367
#if defined(TARGET_PPC64)
368
void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
369
{
370
    glue(do_stsw_64, MEMSUFFIX)(PARAM1);
371
    RETURN();
372
}
373
#endif
374

    
375
void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
376
{
377
    glue(do_stsw_le, MEMSUFFIX)(PARAM1);
378
    RETURN();
379
}
380

    
381
#if defined(TARGET_PPC64)
382
void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
383
{
384
    glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
385
    RETURN();
386
}
387
#endif
388

    
389
/***                         Floating-point store                          ***/
390
#define PPC_STF_OP(name, op)                                                  \
391
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
392
{                                                                             \
393
    glue(op, MEMSUFFIX)((uint32_t)T0, FT0);                                   \
394
    RETURN();                                                                 \
395
}
396

    
397
#if defined(TARGET_PPC64)
398
#define PPC_STF_OP_64(name, op)                                               \
399
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
400
{                                                                             \
401
    glue(op, MEMSUFFIX)((uint64_t)T0, FT0);                                   \
402
    RETURN();                                                                 \
403
}
404
#endif
405

    
406
PPC_STF_OP(fd, stfq);
407
PPC_STF_OP(fs, stfl);
408
#if defined(TARGET_PPC64)
409
PPC_STF_OP_64(fd, stfq);
410
PPC_STF_OP_64(fs, stfl);
411
#endif
412

    
413
static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
414
{
415
    union {
416
        double d;
417
        uint64_t u;
418
    } u;
419

    
420
    u.d = d;
421
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
422
        ((u.u & 0x00FF000000000000ULL) >> 40) |
423
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
424
        ((u.u & 0x000000FF00000000ULL) >> 8) |
425
        ((u.u & 0x00000000FF000000ULL) << 8) |
426
        ((u.u & 0x0000000000FF0000ULL) << 24) |
427
        ((u.u & 0x000000000000FF00ULL) << 40) |
428
        ((u.u & 0x00000000000000FFULL) << 56);
429
    glue(stfq, MEMSUFFIX)(EA, u.d);
430
}
431

    
432
static inline void glue(stflr, MEMSUFFIX) (target_ulong EA, float f)
433
{
434
    union {
435
        float f;
436
        uint32_t u;
437
    } u;
438

    
439
    u.f = f;
440
    u.u = ((u.u & 0xFF000000UL) >> 24) |
441
        ((u.u & 0x00FF0000ULL) >> 8) |
442
        ((u.u & 0x0000FF00UL) << 8) |
443
        ((u.u & 0x000000FFULL) << 24);
444
    glue(stfl, MEMSUFFIX)(EA, u.f);
445
}
446

    
447
PPC_STF_OP(fd_le, stfqr);
448
PPC_STF_OP(fs_le, stflr);
449
#if defined(TARGET_PPC64)
450
PPC_STF_OP_64(fd_le, stfqr);
451
PPC_STF_OP_64(fs_le, stflr);
452
#endif
453

    
454
/***                         Floating-point load                           ***/
455
#define PPC_LDF_OP(name, op)                                                  \
456
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
457
{                                                                             \
458
    FT0 = glue(op, MEMSUFFIX)((uint32_t)T0);                                  \
459
    RETURN();                                                                 \
460
}
461

    
462
#if defined(TARGET_PPC64)
463
#define PPC_LDF_OP_64(name, op)                                               \
464
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
465
{                                                                             \
466
    FT0 = glue(op, MEMSUFFIX)((uint64_t)T0);                                  \
467
    RETURN();                                                                 \
468
}
469
#endif
470

    
471
PPC_LDF_OP(fd, ldfq);
472
PPC_LDF_OP(fs, ldfl);
473
#if defined(TARGET_PPC64)
474
PPC_LDF_OP_64(fd, ldfq);
475
PPC_LDF_OP_64(fs, ldfl);
476
#endif
477

    
478
static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
479
{
480
    union {
481
        double d;
482
        uint64_t u;
483
    } u;
484

    
485
    u.d = glue(ldfq, MEMSUFFIX)(EA);
486
    u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
487
        ((u.u & 0x00FF000000000000ULL) >> 40) |
488
        ((u.u & 0x0000FF0000000000ULL) >> 24) |
489
        ((u.u & 0x000000FF00000000ULL) >> 8) |
490
        ((u.u & 0x00000000FF000000ULL) << 8) |
491
        ((u.u & 0x0000000000FF0000ULL) << 24) |
492
        ((u.u & 0x000000000000FF00ULL) << 40) |
493
        ((u.u & 0x00000000000000FFULL) << 56);
494

    
495
    return u.d;
496
}
497

    
498
static inline float glue(ldflr, MEMSUFFIX) (target_ulong EA)
499
{
500
    union {
501
        float f;
502
        uint32_t u;
503
    } u;
504

    
505
    u.f = glue(ldfl, MEMSUFFIX)(EA);
506
    u.u = ((u.u & 0xFF000000UL) >> 24) |
507
        ((u.u & 0x00FF0000ULL) >> 8) |
508
        ((u.u & 0x0000FF00UL) << 8) |
509
        ((u.u & 0x000000FFULL) << 24);
510

    
511
    return u.f;
512
}
513

    
514
PPC_LDF_OP(fd_le, ldfqr);
515
PPC_LDF_OP(fs_le, ldflr);
516
#if defined(TARGET_PPC64)
517
PPC_LDF_OP_64(fd_le, ldfqr);
518
PPC_LDF_OP_64(fs_le, ldflr);
519
#endif
520

    
521
/* Load and set reservation */
522
void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
523
{
524
    if (unlikely(T0 & 0x03)) {
525
        do_raise_exception(POWERPC_EXCP_ALIGN);
526
    } else {
527
        T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
528
        env->reserve = (uint32_t)T0;
529
    }
530
    RETURN();
531
}
532

    
533
#if defined(TARGET_PPC64)
534
void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
535
{
536
    if (unlikely(T0 & 0x03)) {
537
        do_raise_exception(POWERPC_EXCP_ALIGN);
538
    } else {
539
        T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
540
        env->reserve = (uint64_t)T0;
541
    }
542
    RETURN();
543
}
544

    
545
void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
546
{
547
    if (unlikely(T0 & 0x03)) {
548
        do_raise_exception(POWERPC_EXCP_ALIGN);
549
    } else {
550
        T1 = glue(ldq, MEMSUFFIX)((uint32_t)T0);
551
        env->reserve = (uint32_t)T0;
552
    }
553
    RETURN();
554
}
555

    
556
void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
557
{
558
    if (unlikely(T0 & 0x03)) {
559
        do_raise_exception(POWERPC_EXCP_ALIGN);
560
    } else {
561
        T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
562
        env->reserve = (uint64_t)T0;
563
    }
564
    RETURN();
565
}
566
#endif
567

    
568
void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
569
{
570
    if (unlikely(T0 & 0x03)) {
571
        do_raise_exception(POWERPC_EXCP_ALIGN);
572
    } else {
573
        T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
574
        env->reserve = (uint32_t)T0;
575
    }
576
    RETURN();
577
}
578

    
579
#if defined(TARGET_PPC64)
580
void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
581
{
582
    if (unlikely(T0 & 0x03)) {
583
        do_raise_exception(POWERPC_EXCP_ALIGN);
584
    } else {
585
        T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
586
        env->reserve = (uint64_t)T0;
587
    }
588
    RETURN();
589
}
590

    
591
void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
592
{
593
    if (unlikely(T0 & 0x03)) {
594
        do_raise_exception(POWERPC_EXCP_ALIGN);
595
    } else {
596
        T1 = glue(ld64r, MEMSUFFIX)((uint32_t)T0);
597
        env->reserve = (uint32_t)T0;
598
    }
599
    RETURN();
600
}
601

    
602
void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
603
{
604
    if (unlikely(T0 & 0x03)) {
605
        do_raise_exception(POWERPC_EXCP_ALIGN);
606
    } else {
607
        T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
608
        env->reserve = (uint64_t)T0;
609
    }
610
    RETURN();
611
}
612
#endif
613

    
614
/* Store with reservation */
615
void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
616
{
617
    if (unlikely(T0 & 0x03)) {
618
        do_raise_exception(POWERPC_EXCP_ALIGN);
619
    } else {
620
        if (unlikely(env->reserve != (uint32_t)T0)) {
621
            env->crf[0] = xer_so;
622
        } else {
623
            glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
624
            env->crf[0] = xer_so | 0x02;
625
        }
626
    }
627
    env->reserve = -1;
628
    RETURN();
629
}
630

    
631
#if defined(TARGET_PPC64)
632
void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
633
{
634
    if (unlikely(T0 & 0x03)) {
635
        do_raise_exception(POWERPC_EXCP_ALIGN);
636
    } else {
637
        if (unlikely(env->reserve != (uint64_t)T0)) {
638
            env->crf[0] = xer_so;
639
        } else {
640
            glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
641
            env->crf[0] = xer_so | 0x02;
642
        }
643
    }
644
    env->reserve = -1;
645
    RETURN();
646
}
647

    
648
void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
649
{
650
    if (unlikely(T0 & 0x03)) {
651
        do_raise_exception(POWERPC_EXCP_ALIGN);
652
    } else {
653
        if (unlikely(env->reserve != (uint32_t)T0)) {
654
            env->crf[0] = xer_so;
655
        } else {
656
            glue(stq, MEMSUFFIX)((uint32_t)T0, T1);
657
            env->crf[0] = xer_so | 0x02;
658
        }
659
    }
660
    env->reserve = -1;
661
    RETURN();
662
}
663

    
664
void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
665
{
666
    if (unlikely(T0 & 0x03)) {
667
        do_raise_exception(POWERPC_EXCP_ALIGN);
668
    } else {
669
        if (unlikely(env->reserve != (uint64_t)T0)) {
670
            env->crf[0] = xer_so;
671
        } else {
672
            glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
673
            env->crf[0] = xer_so | 0x02;
674
        }
675
    }
676
    env->reserve = -1;
677
    RETURN();
678
}
679
#endif
680

    
681
void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
682
{
683
    if (unlikely(T0 & 0x03)) {
684
        do_raise_exception(POWERPC_EXCP_ALIGN);
685
    } else {
686
        if (unlikely(env->reserve != (uint32_t)T0)) {
687
            env->crf[0] = xer_so;
688
        } else {
689
            glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
690
            env->crf[0] = xer_so | 0x02;
691
        }
692
    }
693
    env->reserve = -1;
694
    RETURN();
695
}
696

    
697
#if defined(TARGET_PPC64)
698
void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
699
{
700
    if (unlikely(T0 & 0x03)) {
701
        do_raise_exception(POWERPC_EXCP_ALIGN);
702
    } else {
703
        if (unlikely(env->reserve != (uint64_t)T0)) {
704
            env->crf[0] = xer_so;
705
        } else {
706
            glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
707
            env->crf[0] = xer_so | 0x02;
708
        }
709
    }
710
    env->reserve = -1;
711
    RETURN();
712
}
713

    
714
void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
715
{
716
    if (unlikely(T0 & 0x03)) {
717
        do_raise_exception(POWERPC_EXCP_ALIGN);
718
    } else {
719
        if (unlikely(env->reserve != (uint32_t)T0)) {
720
            env->crf[0] = xer_so;
721
        } else {
722
            glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
723
            env->crf[0] = xer_so | 0x02;
724
        }
725
    }
726
    env->reserve = -1;
727
    RETURN();
728
}
729

    
730
void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
731
{
732
    if (unlikely(T0 & 0x03)) {
733
        do_raise_exception(POWERPC_EXCP_ALIGN);
734
    } else {
735
        if (unlikely(env->reserve != (uint64_t)T0)) {
736
            env->crf[0] = xer_so;
737
        } else {
738
            glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
739
            env->crf[0] = xer_so | 0x02;
740
        }
741
    }
742
    env->reserve = -1;
743
    RETURN();
744
}
745
#endif
746

    
747
void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
748
{
749
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
750
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
751
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
752
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
753
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
754
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
755
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
756
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
757
#if DCACHE_LINE_SIZE == 64
758
    /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
759
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
760
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
761
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
762
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
763
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
764
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
765
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
766
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
767
#endif
768
    RETURN();
769
}
770

    
771
#if defined(TARGET_PPC64)
772
void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
773
{
774
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
775
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
776
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
777
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
778
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
779
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
780
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
781
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
782
#if DCACHE_LINE_SIZE == 64
783
    /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
784
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
785
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
786
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
787
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
788
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
789
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
790
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
791
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
792
#endif
793
    RETURN();
794
}
795
#endif
796

    
797
/* Instruction cache block invalidate */
798
void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
799
{
800
    glue(do_icbi, MEMSUFFIX)();
801
    RETURN();
802
}
803

    
804
#if defined(TARGET_PPC64)
805
void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
806
{
807
    glue(do_icbi_64, MEMSUFFIX)();
808
    RETURN();
809
}
810
#endif
811

    
812
/* External access */
813
void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
814
{
815
    T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
816
    RETURN();
817
}
818

    
819
#if defined(TARGET_PPC64)
820
void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
821
{
822
    T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
823
    RETURN();
824
}
825
#endif
826

    
827
void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
828
{
829
    glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
830
    RETURN();
831
}
832

    
833
#if defined(TARGET_PPC64)
834
void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
835
{
836
    glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
837
    RETURN();
838
}
839
#endif
840

    
841
void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
842
{
843
    T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
844
    RETURN();
845
}
846

    
847
#if defined(TARGET_PPC64)
848
void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
849
{
850
    T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
851
    RETURN();
852
}
853
#endif
854

    
855
void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
856
{
857
    glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
858
    RETURN();
859
}
860

    
861
#if defined(TARGET_PPC64)
862
void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
863
{
864
    glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
865
    RETURN();
866
}
867
#endif
868

    
869
/* XXX: those micro-ops need tests ! */
870
/* PowerPC 601 specific instructions (POWER bridge) */
871
void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
872
{
873
    /* When byte count is 0, do nothing */
874
    if (likely(T1 != 0)) {
875
        glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
876
    }
877
    RETURN();
878
}
879

    
880
/* POWER2 quad load and store */
881
/* XXX: TAGs are not managed */
882
void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
883
{
884
    glue(do_POWER2_lfq, MEMSUFFIX)();
885
    RETURN();
886
}
887

    
888
void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
889
{
890
    glue(do_POWER2_lfq_le, MEMSUFFIX)();
891
    RETURN();
892
}
893

    
894
void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
895
{
896
    glue(do_POWER2_stfq, MEMSUFFIX)();
897
    RETURN();
898
}
899

    
900
void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
901
{
902
    glue(do_POWER2_stfq_le, MEMSUFFIX)();
903
    RETURN();
904
}
905

    
906
#if defined(TARGET_PPCEMB)
907
/* SPE extension */
908
#define _PPC_SPE_LD_OP(name, op)                                              \
909
void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void)                     \
910
{                                                                             \
911
    T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0);                                \
912
    RETURN();                                                                 \
913
}
914

    
915
#if defined(TARGET_PPC64)
916
#define _PPC_SPE_LD_OP_64(name, op)                                           \
917
void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void)          \
918
{                                                                             \
919
    T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0);                                \
920
    RETURN();                                                                 \
921
}
922
#define PPC_SPE_LD_OP(name, op)                                               \
923
_PPC_SPE_LD_OP(name, op);                                                     \
924
_PPC_SPE_LD_OP_64(name, op)
925
#else
926
#define PPC_SPE_LD_OP(name, op)                                               \
927
_PPC_SPE_LD_OP(name, op)
928
#endif
929

    
930

    
931
#define _PPC_SPE_ST_OP(name, op)                                              \
932
void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void)                    \
933
{                                                                             \
934
    glue(op, MEMSUFFIX)((uint32_t)T0, T1_64);                                 \
935
    RETURN();                                                                 \
936
}
937

    
938
#if defined(TARGET_PPC64)
939
#define _PPC_SPE_ST_OP_64(name, op)                                           \
940
void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void)         \
941
{                                                                             \
942
    glue(op, MEMSUFFIX)((uint64_t)T0, T1_64);                                 \
943
    RETURN();                                                                 \
944
}
945
#define PPC_SPE_ST_OP(name, op)                                               \
946
_PPC_SPE_ST_OP(name, op);                                                     \
947
_PPC_SPE_ST_OP_64(name, op)
948
#else
949
#define PPC_SPE_ST_OP(name, op)                                               \
950
_PPC_SPE_ST_OP(name, op)
951
#endif
952

    
953
#if !defined(TARGET_PPC64)
954
PPC_SPE_LD_OP(dd, ldq);
955
PPC_SPE_ST_OP(dd, stq);
956
PPC_SPE_LD_OP(dd_le, ld64r);
957
PPC_SPE_ST_OP(dd_le, st64r);
958
#endif
959
static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
960
{
961
    uint64_t ret;
962
    ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
963
    ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
964
    return ret;
965
}
966
PPC_SPE_LD_OP(dw, spe_ldw);
967
static inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, uint64_t data)
968
{
969
    glue(stl, MEMSUFFIX)(EA, data >> 32);
970
    glue(stl, MEMSUFFIX)(EA + 4, data);
971
}
972
PPC_SPE_ST_OP(dw, spe_stdw);
973
static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
974
{
975
    uint64_t ret;
976
    ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
977
    ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
978
    return ret;
979
}
980
PPC_SPE_LD_OP(dw_le, spe_ldw_le);
981
static inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
982
                                                 uint64_t data)
983
{
984
    glue(st32r, MEMSUFFIX)(EA, data >> 32);
985
    glue(st32r, MEMSUFFIX)(EA + 4, data);
986
}
987
PPC_SPE_ST_OP(dw_le, spe_stdw_le);
988
static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
989
{
990
    uint64_t ret;
991
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
992
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
993
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
994
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
995
    return ret;
996
}
997
PPC_SPE_LD_OP(dh, spe_ldh);
998
static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data)
999
{
1000
    glue(stw, MEMSUFFIX)(EA, data >> 48);
1001
    glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
1002
    glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
1003
    glue(stw, MEMSUFFIX)(EA + 6, data);
1004
}
1005
PPC_SPE_ST_OP(dh, spe_stdh);
1006
static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1007
{
1008
    uint64_t ret;
1009
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1010
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
1011
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
1012
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
1013
    return ret;
1014
}
1015
PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1016
static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1017
                                                 uint64_t data)
1018
{
1019
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1020
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1021
    glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1022
    glue(st16r, MEMSUFFIX)(EA + 6, data);
1023
}
1024
PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1025
static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1026
{
1027
    uint64_t ret;
1028
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1029
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
1030
    return ret;
1031
}
1032
PPC_SPE_LD_OP(whe, spe_lwhe);
1033
static inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, uint64_t data)
1034
{
1035
    glue(stw, MEMSUFFIX)(EA, data >> 48);
1036
    glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1037
}
1038
PPC_SPE_ST_OP(whe, spe_stwhe);
1039
static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1040
{
1041
    uint64_t ret;
1042
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1043
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
1044
    return ret;
1045
}
1046
PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1047
static inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1048
                                                  uint64_t data)
1049
{
1050
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1051
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1052
}
1053
PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1054
static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1055
{
1056
    uint64_t ret;
1057
    ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
1058
    ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
1059
    return ret;
1060
}
1061
PPC_SPE_LD_OP(whou, spe_lwhou);
1062
static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1063
{
1064
    uint64_t ret;
1065
    ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1066
    ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1067
    return ret;
1068
}
1069
PPC_SPE_LD_OP(whos, spe_lwhos);
1070
static inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, uint64_t data)
1071
{
1072
    glue(stw, MEMSUFFIX)(EA, data >> 32);
1073
    glue(stw, MEMSUFFIX)(EA + 2, data);
1074
}
1075
PPC_SPE_ST_OP(who, spe_stwho);
1076
static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1077
{
1078
    uint64_t ret;
1079
    ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1080
    ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1081
    return ret;
1082
}
1083
PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1084
static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1085
{
1086
    uint64_t ret;
1087
    ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1088
    ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1089
    return ret;
1090
}
1091
PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1092
static inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1093
                                                  uint64_t data)
1094
{
1095
    glue(st16r, MEMSUFFIX)(EA, data >> 32);
1096
    glue(st16r, MEMSUFFIX)(EA + 2, data);
1097
}
1098
PPC_SPE_ST_OP(who_le, spe_stwho_le);
1099
#if !defined(TARGET_PPC64)
1100
static inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, uint64_t data)
1101
{
1102
    glue(stl, MEMSUFFIX)(EA, data);
1103
}
1104
PPC_SPE_ST_OP(wwo, spe_stwwo);
1105
static inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1106
                                                  uint64_t data)
1107
{
1108
    glue(st32r, MEMSUFFIX)(EA, data);
1109
}
1110
PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1111
#endif
1112
static inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1113
{
1114
    uint16_t tmp;
1115
    tmp = glue(lduw, MEMSUFFIX)(EA);
1116
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1117
}
1118
PPC_SPE_LD_OP(h, spe_lh);
1119
static inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1120
{
1121
    uint16_t tmp;
1122
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1123
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1124
}
1125
PPC_SPE_LD_OP(h_le, spe_lh_le);
1126
static inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1127
{
1128
    uint32_t tmp;
1129
    tmp = glue(ldl, MEMSUFFIX)(EA);
1130
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1131
}
1132
PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1133
static inline uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1134
{
1135
    uint32_t tmp;
1136
    tmp = glue(ld32r, MEMSUFFIX)(EA);
1137
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1138
}
1139
PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1140
static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1141
{
1142
    uint64_t ret;
1143
    uint16_t tmp;
1144
    tmp = glue(lduw, MEMSUFFIX)(EA);
1145
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1146
    tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1147
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1148
    return ret;
1149
}
1150
PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1151
static inline uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1152
{
1153
    uint64_t ret;
1154
    uint16_t tmp;
1155
    tmp = glue(ld16r, MEMSUFFIX)(EA);
1156
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1157
    tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1158
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1159
    return ret;
1160
}
1161
PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1162
#endif /* defined(TARGET_PPCEMB) */
1163

    
1164
#undef MEMSUFFIX