Statistics
| Branch: | Revision:

root / target-ppc / op_mem.h @ f8ed7070

History | View | Annotate | Download (36.4 kB)

1
/*
2
 *  PowerPC emulation micro-operations for qemu.
3
 *
4
 *  Copyright (c) 2003-2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20

    
21
#include "op_mem_access.h"
22

    
23
/***                             Integer load                              ***/
24
#define PPC_LD_OP(name, op)                                                   \
25
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
26
{                                                                             \
27
    T1 = glue(op, MEMSUFFIX)((uint32_t)T0);                                   \
28
    RETURN();                                                                 \
29
}
30

    
31
#if defined(TARGET_PPC64)
32
#define PPC_LD_OP_64(name, op)                                                \
33
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
34
{                                                                             \
35
    T1 = glue(op, MEMSUFFIX)((uint64_t)T0);                                   \
36
    RETURN();                                                                 \
37
}
38
#endif
39

    
40
#define PPC_ST_OP(name, op)                                                   \
41
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
42
{                                                                             \
43
    glue(op, MEMSUFFIX)((uint32_t)T0, T1);                                    \
44
    RETURN();                                                                 \
45
}
46

    
47
#if defined(TARGET_PPC64)
48
#define PPC_ST_OP_64(name, op)                                                \
49
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
50
{                                                                             \
51
    glue(op, MEMSUFFIX)((uint64_t)T0, T1);                                    \
52
    RETURN();                                                                 \
53
}
54
#endif
55

    
56
PPC_LD_OP(bz, ldu8);
57
PPC_LD_OP(ha, lds16);
58
PPC_LD_OP(hz, ldu16);
59
PPC_LD_OP(wz, ldu32);
60
#if defined(TARGET_PPC64)
61
PPC_LD_OP(wa, lds32);
62
PPC_LD_OP(d, ldu64);
63
PPC_LD_OP_64(bz, ldu8);
64
PPC_LD_OP_64(ha, lds16);
65
PPC_LD_OP_64(hz, ldu16);
66
PPC_LD_OP_64(wz, ldu32);
67
PPC_LD_OP_64(wa, lds32);
68
PPC_LD_OP_64(d, ldu64);
69
#endif
70

    
71
PPC_LD_OP(ha_le, lds16r);
72
PPC_LD_OP(hz_le, ldu16r);
73
PPC_LD_OP(wz_le, ldu32r);
74
#if defined(TARGET_PPC64)
75
PPC_LD_OP(wa_le, lds32r);
76
PPC_LD_OP(d_le, ldu64r);
77
PPC_LD_OP_64(ha_le, lds16r);
78
PPC_LD_OP_64(hz_le, ldu16r);
79
PPC_LD_OP_64(wz_le, ldu32r);
80
PPC_LD_OP_64(wa_le, lds32r);
81
PPC_LD_OP_64(d_le, ldu64r);
82
#endif
83

    
84
/***                              Integer store                            ***/
85
PPC_ST_OP(b, st8);
86
PPC_ST_OP(h, st16);
87
PPC_ST_OP(w, st32);
88
#if defined(TARGET_PPC64)
89
PPC_ST_OP(d, st64);
90
PPC_ST_OP_64(b, st8);
91
PPC_ST_OP_64(h, st16);
92
PPC_ST_OP_64(w, st32);
93
PPC_ST_OP_64(d, st64);
94
#endif
95

    
96
PPC_ST_OP(h_le, st16r);
97
PPC_ST_OP(w_le, st32r);
98
#if defined(TARGET_PPC64)
99
PPC_ST_OP(d_le, st64r);
100
PPC_ST_OP_64(h_le, st16r);
101
PPC_ST_OP_64(w_le, st32r);
102
PPC_ST_OP_64(d_le, st64r);
103
#endif
104

    
105
/***                Integer load and store with byte reverse               ***/
106
PPC_LD_OP(hbr, ldu16r);
107
PPC_LD_OP(wbr, ldu32r);
108
PPC_ST_OP(hbr, st16r);
109
PPC_ST_OP(wbr, st32r);
110
#if defined(TARGET_PPC64)
111
PPC_LD_OP_64(hbr, ldu16r);
112
PPC_LD_OP_64(wbr, ldu32r);
113
PPC_ST_OP_64(hbr, st16r);
114
PPC_ST_OP_64(wbr, st32r);
115
#endif
116

    
117
PPC_LD_OP(hbr_le, ldu16);
118
PPC_LD_OP(wbr_le, ldu32);
119
PPC_ST_OP(hbr_le, st16);
120
PPC_ST_OP(wbr_le, st32);
121
#if defined(TARGET_PPC64)
122
PPC_LD_OP_64(hbr_le, ldu16);
123
PPC_LD_OP_64(wbr_le, ldu32);
124
PPC_ST_OP_64(hbr_le, st16);
125
PPC_ST_OP_64(wbr_le, st32);
126
#endif
127

    
128
/***                    Integer load and store multiple                    ***/
129
void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
130
{
131
    glue(do_lmw, MEMSUFFIX)(PARAM1);
132
    RETURN();
133
}
134

    
135
#if defined(TARGET_PPC64)
136
void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
137
{
138
    glue(do_lmw_64, MEMSUFFIX)(PARAM1);
139
    RETURN();
140
}
141
#endif
142

    
143
void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
144
{
145
    glue(do_lmw_le, MEMSUFFIX)(PARAM1);
146
    RETURN();
147
}
148

    
149
#if defined(TARGET_PPC64)
150
void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
151
{
152
    glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
153
    RETURN();
154
}
155
#endif
156

    
157
void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
158
{
159
    glue(do_stmw, MEMSUFFIX)(PARAM1);
160
    RETURN();
161
}
162

    
163
#if defined(TARGET_PPC64)
164
void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
165
{
166
    glue(do_stmw_64, MEMSUFFIX)(PARAM1);
167
    RETURN();
168
}
169
#endif
170

    
171
void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
172
{
173
    glue(do_stmw_le, MEMSUFFIX)(PARAM1);
174
    RETURN();
175
}
176

    
177
#if defined(TARGET_PPC64)
178
void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
179
{
180
    glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
181
    RETURN();
182
}
183
#endif
184

    
185
/***                    Integer load and store strings                     ***/
186
void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
187
{
188
    glue(do_lsw, MEMSUFFIX)(PARAM1);
189
    RETURN();
190
}
191

    
192
#if defined(TARGET_PPC64)
193
void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
194
{
195
    glue(do_lsw_64, MEMSUFFIX)(PARAM1);
196
    RETURN();
197
}
198
#endif
199

    
200
/* PPC32 specification says we must generate an exception if
201
 * rA is in the range of registers to be loaded.
202
 * In an other hand, IBM says this is valid, but rA won't be loaded.
203
 * For now, I'll follow the spec...
204
 */
205
void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
206
{
207
    /* Note: T1 comes from xer_bc then no cast is needed */
208
    if (likely(T1 != 0)) {
209
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
210
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
211
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
212
                                   POWERPC_EXCP_INVAL |
213
                                   POWERPC_EXCP_INVAL_LSWX);
214
        } else {
215
            glue(do_lsw, MEMSUFFIX)(PARAM1);
216
        }
217
    }
218
    RETURN();
219
}
220

    
221
#if defined(TARGET_PPC64)
222
void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
223
{
224
    /* Note: T1 comes from xer_bc then no cast is needed */
225
    if (likely(T1 != 0)) {
226
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
227
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
228
            do_raise_exception_err(POWERPC_EXCP_PROGRAM,
229
                                   POWERPC_EXCP_INVAL |
230
                                   POWERPC_EXCP_INVAL_LSWX);
231
        } else {
232
            glue(do_lsw_64, MEMSUFFIX)(PARAM1);
233
        }
234
    }
235
    RETURN();
236
}
237
#endif
238

    
239
void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
240
{
241
    glue(do_stsw, MEMSUFFIX)(PARAM1);
242
    RETURN();
243
}
244

    
245
#if defined(TARGET_PPC64)
246
void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
247
{
248
    glue(do_stsw_64, MEMSUFFIX)(PARAM1);
249
    RETURN();
250
}
251
#endif
252

    
253
/***                         Floating-point store                          ***/
254
#define PPC_STF_OP(name, op)                                                  \
255
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
256
{                                                                             \
257
    glue(op, MEMSUFFIX)((uint32_t)T0, FT0);                                   \
258
    RETURN();                                                                 \
259
}
260

    
261
#if defined(TARGET_PPC64)
262
#define PPC_STF_OP_64(name, op)                                               \
263
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
264
{                                                                             \
265
    glue(op, MEMSUFFIX)((uint64_t)T0, FT0);                                   \
266
    RETURN();                                                                 \
267
}
268
#endif
269

    
270
static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, float64 d)
271
{
272
    glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
273
}
274

    
275
static always_inline void glue(stfiw, MEMSUFFIX) (target_ulong EA, float64 d)
276
{
277
    CPU_DoubleU u;
278

    
279
    /* Store the low order 32 bits without any conversion */
280
    u.d = d;
281
    glue(st32, MEMSUFFIX)(EA, u.l.lower);
282
}
283

    
284
PPC_STF_OP(fd, stfq);
285
PPC_STF_OP(fs, stfs);
286
PPC_STF_OP(fiw, stfiw);
287
#if defined(TARGET_PPC64)
288
PPC_STF_OP_64(fd, stfq);
289
PPC_STF_OP_64(fs, stfs);
290
PPC_STF_OP_64(fiw, stfiw);
291
#endif
292

    
293
static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, float64 d)
294
{
295
    CPU_DoubleU u;
296

    
297
    u.d = d;
298
    u.ll = bswap64(u.ll);
299
    glue(stfq, MEMSUFFIX)(EA, u.d);
300
}
301

    
302
static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, float64 d)
303
{
304
    CPU_FloatU u;
305

    
306
    u.f = float64_to_float32(d, &env->fp_status);
307
    u.l = bswap32(u.l);
308
    glue(stfl, MEMSUFFIX)(EA, u.f);
309
}
310

    
311
static always_inline void glue(stfiwr, MEMSUFFIX) (target_ulong EA, float64 d)
312
{
313
    CPU_DoubleU u;
314

    
315
    /* Store the low order 32 bits without any conversion */
316
    u.d = d;
317
    u.l.lower = bswap32(u.l.lower);
318
    glue(st32, MEMSUFFIX)(EA, u.l.lower);
319
}
320

    
321
PPC_STF_OP(fd_le, stfqr);
322
PPC_STF_OP(fs_le, stfsr);
323
PPC_STF_OP(fiw_le, stfiwr);
324
#if defined(TARGET_PPC64)
325
PPC_STF_OP_64(fd_le, stfqr);
326
PPC_STF_OP_64(fs_le, stfsr);
327
PPC_STF_OP_64(fiw_le, stfiwr);
328
#endif
329

    
330
/***                         Floating-point load                           ***/
331
#define PPC_LDF_OP(name, op)                                                  \
332
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
333
{                                                                             \
334
    FT0 = glue(op, MEMSUFFIX)((uint32_t)T0);                                  \
335
    RETURN();                                                                 \
336
}
337

    
338
#if defined(TARGET_PPC64)
339
#define PPC_LDF_OP_64(name, op)                                               \
340
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
341
{                                                                             \
342
    FT0 = glue(op, MEMSUFFIX)((uint64_t)T0);                                  \
343
    RETURN();                                                                 \
344
}
345
#endif
346

    
347
static always_inline float64 glue(ldfs, MEMSUFFIX) (target_ulong EA)
348
{
349
    return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
350
}
351

    
352
PPC_LDF_OP(fd, ldfq);
353
PPC_LDF_OP(fs, ldfs);
354
#if defined(TARGET_PPC64)
355
PPC_LDF_OP_64(fd, ldfq);
356
PPC_LDF_OP_64(fs, ldfs);
357
#endif
358

    
359
static always_inline float64 glue(ldfqr, MEMSUFFIX) (target_ulong EA)
360
{
361
    CPU_DoubleU u;
362

    
363
    u.d = glue(ldfq, MEMSUFFIX)(EA);
364
    u.ll = bswap64(u.ll);
365

    
366
    return u.d;
367
}
368

    
369
static always_inline float64 glue(ldfsr, MEMSUFFIX) (target_ulong EA)
370
{
371
    CPU_FloatU u;
372

    
373
    u.f = glue(ldfl, MEMSUFFIX)(EA);
374
    u.l = bswap32(u.l);
375

    
376
    return float32_to_float64(u.f, &env->fp_status);
377
}
378

    
379
PPC_LDF_OP(fd_le, ldfqr);
380
PPC_LDF_OP(fs_le, ldfsr);
381
#if defined(TARGET_PPC64)
382
PPC_LDF_OP_64(fd_le, ldfqr);
383
PPC_LDF_OP_64(fs_le, ldfsr);
384
#endif
385

    
386
/* Load and set reservation */
387
void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
388
{
389
    if (unlikely(T0 & 0x03)) {
390
        do_raise_exception(POWERPC_EXCP_ALIGN);
391
    } else {
392
        T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0);
393
        env->reserve = (uint32_t)T0;
394
    }
395
    RETURN();
396
}
397

    
398
#if defined(TARGET_PPC64)
399
void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
400
{
401
    if (unlikely(T0 & 0x03)) {
402
        do_raise_exception(POWERPC_EXCP_ALIGN);
403
    } else {
404
        T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0);
405
        env->reserve = (uint64_t)T0;
406
    }
407
    RETURN();
408
}
409

    
410
void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
411
{
412
    if (unlikely(T0 & 0x03)) {
413
        do_raise_exception(POWERPC_EXCP_ALIGN);
414
    } else {
415
        T1 = glue(ldu64, MEMSUFFIX)((uint32_t)T0);
416
        env->reserve = (uint32_t)T0;
417
    }
418
    RETURN();
419
}
420

    
421
void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
422
{
423
    if (unlikely(T0 & 0x03)) {
424
        do_raise_exception(POWERPC_EXCP_ALIGN);
425
    } else {
426
        T1 = glue(ldu64, MEMSUFFIX)((uint64_t)T0);
427
        env->reserve = (uint64_t)T0;
428
    }
429
    RETURN();
430
}
431
#endif
432

    
433
void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
434
{
435
    if (unlikely(T0 & 0x03)) {
436
        do_raise_exception(POWERPC_EXCP_ALIGN);
437
    } else {
438
        T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0);
439
        env->reserve = (uint32_t)T0;
440
    }
441
    RETURN();
442
}
443

    
444
#if defined(TARGET_PPC64)
445
void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
446
{
447
    if (unlikely(T0 & 0x03)) {
448
        do_raise_exception(POWERPC_EXCP_ALIGN);
449
    } else {
450
        T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0);
451
        env->reserve = (uint64_t)T0;
452
    }
453
    RETURN();
454
}
455

    
456
void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
457
{
458
    if (unlikely(T0 & 0x03)) {
459
        do_raise_exception(POWERPC_EXCP_ALIGN);
460
    } else {
461
        T1 = glue(ldu64r, MEMSUFFIX)((uint32_t)T0);
462
        env->reserve = (uint32_t)T0;
463
    }
464
    RETURN();
465
}
466

    
467
void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
468
{
469
    if (unlikely(T0 & 0x03)) {
470
        do_raise_exception(POWERPC_EXCP_ALIGN);
471
    } else {
472
        T1 = glue(ldu64r, MEMSUFFIX)((uint64_t)T0);
473
        env->reserve = (uint64_t)T0;
474
    }
475
    RETURN();
476
}
477
#endif
478

    
479
/* Store with reservation */
480
void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
481
{
482
    if (unlikely(T0 & 0x03)) {
483
        do_raise_exception(POWERPC_EXCP_ALIGN);
484
    } else {
485
        if (unlikely(env->reserve != (uint32_t)T0)) {
486
            env->crf[0] = xer_so;
487
        } else {
488
            glue(st32, MEMSUFFIX)((uint32_t)T0, T1);
489
            env->crf[0] = xer_so | 0x02;
490
        }
491
    }
492
    env->reserve = (target_ulong)-1ULL;
493
    RETURN();
494
}
495

    
496
#if defined(TARGET_PPC64)
497
void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
498
{
499
    if (unlikely(T0 & 0x03)) {
500
        do_raise_exception(POWERPC_EXCP_ALIGN);
501
    } else {
502
        if (unlikely(env->reserve != (uint64_t)T0)) {
503
            env->crf[0] = xer_so;
504
        } else {
505
            glue(st32, MEMSUFFIX)((uint64_t)T0, T1);
506
            env->crf[0] = xer_so | 0x02;
507
        }
508
    }
509
    env->reserve = (target_ulong)-1ULL;
510
    RETURN();
511
}
512

    
513
void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
514
{
515
    if (unlikely(T0 & 0x03)) {
516
        do_raise_exception(POWERPC_EXCP_ALIGN);
517
    } else {
518
        if (unlikely(env->reserve != (uint32_t)T0)) {
519
            env->crf[0] = xer_so;
520
        } else {
521
            glue(st64, MEMSUFFIX)((uint32_t)T0, T1);
522
            env->crf[0] = xer_so | 0x02;
523
        }
524
    }
525
    env->reserve = (target_ulong)-1ULL;
526
    RETURN();
527
}
528

    
529
void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
530
{
531
    if (unlikely(T0 & 0x03)) {
532
        do_raise_exception(POWERPC_EXCP_ALIGN);
533
    } else {
534
        if (unlikely(env->reserve != (uint64_t)T0)) {
535
            env->crf[0] = xer_so;
536
        } else {
537
            glue(st64, MEMSUFFIX)((uint64_t)T0, T1);
538
            env->crf[0] = xer_so | 0x02;
539
        }
540
    }
541
    env->reserve = (target_ulong)-1ULL;
542
    RETURN();
543
}
544
#endif
545

    
546
void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
547
{
548
    if (unlikely(T0 & 0x03)) {
549
        do_raise_exception(POWERPC_EXCP_ALIGN);
550
    } else {
551
        if (unlikely(env->reserve != (uint32_t)T0)) {
552
            env->crf[0] = xer_so;
553
        } else {
554
            glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
555
            env->crf[0] = xer_so | 0x02;
556
        }
557
    }
558
    env->reserve = (target_ulong)-1ULL;
559
    RETURN();
560
}
561

    
562
#if defined(TARGET_PPC64)
563
void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
564
{
565
    if (unlikely(T0 & 0x03)) {
566
        do_raise_exception(POWERPC_EXCP_ALIGN);
567
    } else {
568
        if (unlikely(env->reserve != (uint64_t)T0)) {
569
            env->crf[0] = xer_so;
570
        } else {
571
            glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
572
            env->crf[0] = xer_so | 0x02;
573
        }
574
    }
575
    env->reserve = (target_ulong)-1ULL;
576
    RETURN();
577
}
578

    
579
void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
580
{
581
    if (unlikely(T0 & 0x03)) {
582
        do_raise_exception(POWERPC_EXCP_ALIGN);
583
    } else {
584
        if (unlikely(env->reserve != (uint32_t)T0)) {
585
            env->crf[0] = xer_so;
586
        } else {
587
            glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
588
            env->crf[0] = xer_so | 0x02;
589
        }
590
    }
591
    env->reserve = (target_ulong)-1ULL;
592
    RETURN();
593
}
594

    
595
void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
596
{
597
    if (unlikely(T0 & 0x03)) {
598
        do_raise_exception(POWERPC_EXCP_ALIGN);
599
    } else {
600
        if (unlikely(env->reserve != (uint64_t)T0)) {
601
            env->crf[0] = xer_so;
602
        } else {
603
            glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
604
            env->crf[0] = xer_so | 0x02;
605
        }
606
    }
607
    env->reserve = (target_ulong)-1ULL;
608
    RETURN();
609
}
610
#endif
611

    
612
void OPPROTO glue(op_dcbz_l32, MEMSUFFIX) (void)
613
{
614
    T0 &= ~((uint32_t)31);
615
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
616
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
617
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
618
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
619
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
620
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
621
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
622
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
623
    RETURN();
624
}
625

    
626
void OPPROTO glue(op_dcbz_l64, MEMSUFFIX) (void)
627
{
628
    T0 &= ~((uint32_t)63);
629
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
630
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
631
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
632
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
633
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
634
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
635
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
636
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
637
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
638
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
639
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
640
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
641
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
642
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
643
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
644
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
645
    RETURN();
646
}
647

    
648
void OPPROTO glue(op_dcbz_l128, MEMSUFFIX) (void)
649
{
650
    T0 &= ~((uint32_t)127);
651
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
652
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
653
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
654
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
655
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
656
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
657
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
658
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
659
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
660
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
661
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
662
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
663
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
664
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
665
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
666
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
667
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0);
668
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0);
669
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0);
670
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0);
671
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0);
672
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0);
673
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0);
674
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0);
675
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0);
676
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0);
677
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0);
678
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0);
679
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0);
680
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0);
681
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0);
682
    glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0);
683
    RETURN();
684
}
685

    
686
void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
687
{
688
    glue(do_dcbz, MEMSUFFIX)();
689
    RETURN();
690
}
691

    
692
#if defined(TARGET_PPC64)
693
void OPPROTO glue(op_dcbz_l32_64, MEMSUFFIX) (void)
694
{
695
    T0 &= ~((uint64_t)31);
696
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
697
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
698
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
699
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
700
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
701
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
702
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
703
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
704
    RETURN();
705
}
706

    
707
void OPPROTO glue(op_dcbz_l64_64, MEMSUFFIX) (void)
708
{
709
    T0 &= ~((uint64_t)63);
710
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
711
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
712
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
713
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
714
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
715
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
716
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
717
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
718
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
719
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
720
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
721
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
722
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
723
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
724
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
725
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
726
    RETURN();
727
}
728

    
729
void OPPROTO glue(op_dcbz_l128_64, MEMSUFFIX) (void)
730
{
731
    T0 &= ~((uint64_t)127);
732
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
733
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
734
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
735
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
736
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
737
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
738
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
739
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
740
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
741
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
742
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
743
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
744
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
745
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
746
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
747
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
748
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0);
749
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0);
750
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0);
751
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0);
752
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0);
753
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0);
754
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0);
755
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0);
756
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0);
757
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0);
758
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0);
759
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0);
760
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0);
761
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0);
762
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0);
763
    glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0);
764
    RETURN();
765
}
766

    
767
void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
768
{
769
    glue(do_dcbz_64, MEMSUFFIX)();
770
    RETURN();
771
}
772
#endif
773

    
774
/* Instruction cache block invalidate */
775
void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
776
{
777
    glue(do_icbi, MEMSUFFIX)();
778
    RETURN();
779
}
780

    
781
#if defined(TARGET_PPC64)
782
void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
783
{
784
    glue(do_icbi_64, MEMSUFFIX)();
785
    RETURN();
786
}
787
#endif
788

    
789
/* External access */
790
void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
791
{
792
    T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0);
793
    RETURN();
794
}
795

    
796
#if defined(TARGET_PPC64)
797
void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
798
{
799
    T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0);
800
    RETURN();
801
}
802
#endif
803

    
804
void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
805
{
806
    glue(st32, MEMSUFFIX)((uint32_t)T0, T1);
807
    RETURN();
808
}
809

    
810
#if defined(TARGET_PPC64)
811
void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
812
{
813
    glue(st32, MEMSUFFIX)((uint64_t)T0, T1);
814
    RETURN();
815
}
816
#endif
817

    
818
void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
819
{
820
    T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0);
821
    RETURN();
822
}
823

    
824
#if defined(TARGET_PPC64)
825
void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
826
{
827
    T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0);
828
    RETURN();
829
}
830
#endif
831

    
832
void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
833
{
834
    glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
835
    RETURN();
836
}
837

    
838
#if defined(TARGET_PPC64)
839
void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
840
{
841
    glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
842
    RETURN();
843
}
844
#endif
845

    
846
/* XXX: those micro-ops need tests ! */
847
/* PowerPC 601 specific instructions (POWER bridge) */
848
void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
849
{
850
    /* When byte count is 0, do nothing */
851
    if (likely(T1 != 0)) {
852
        glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
853
    }
854
    RETURN();
855
}
856

    
857
/* POWER2 quad load and store */
858
/* XXX: TAGs are not managed */
859
void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
860
{
861
    glue(do_POWER2_lfq, MEMSUFFIX)();
862
    RETURN();
863
}
864

    
865
void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
866
{
867
    glue(do_POWER2_lfq_le, MEMSUFFIX)();
868
    RETURN();
869
}
870

    
871
void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
872
{
873
    glue(do_POWER2_stfq, MEMSUFFIX)();
874
    RETURN();
875
}
876

    
877
void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
878
{
879
    glue(do_POWER2_stfq_le, MEMSUFFIX)();
880
    RETURN();
881
}
882

    
883
/* Altivec vector extension */
884
#if defined(WORDS_BIGENDIAN)
885
#define VR_DWORD0 0
886
#define VR_DWORD1 1
887
#else
888
#define VR_DWORD0 1
889
#define VR_DWORD1 0
890
#endif
891
void OPPROTO glue(op_vr_lvx, MEMSUFFIX) (void)
892
{
893
    AVR0.u64[VR_DWORD0] = glue(ldu64, MEMSUFFIX)((uint32_t)T0);
894
    AVR0.u64[VR_DWORD1] = glue(ldu64, MEMSUFFIX)((uint32_t)T0 + 8);
895
}
896

    
897
void OPPROTO glue(op_vr_lvx_le, MEMSUFFIX) (void)
898
{
899
    AVR0.u64[VR_DWORD1] = glue(ldu64r, MEMSUFFIX)((uint32_t)T0);
900
    AVR0.u64[VR_DWORD0] = glue(ldu64r, MEMSUFFIX)((uint32_t)T0 + 8);
901
}
902

    
903
void OPPROTO glue(op_vr_stvx, MEMSUFFIX) (void)
904
{
905
    glue(st64, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD0]);
906
    glue(st64, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD1]);
907
}
908

    
909
void OPPROTO glue(op_vr_stvx_le, MEMSUFFIX) (void)
910
{
911
    glue(st64r, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD1]);
912
    glue(st64r, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD0]);
913
}
914

    
915
#if defined(TARGET_PPC64)
916
void OPPROTO glue(op_vr_lvx_64, MEMSUFFIX) (void)
917
{
918
    AVR0.u64[VR_DWORD0] = glue(ldu64, MEMSUFFIX)((uint64_t)T0);
919
    AVR0.u64[VR_DWORD1] = glue(ldu64, MEMSUFFIX)((uint64_t)T0 + 8);
920
}
921

    
922
void OPPROTO glue(op_vr_lvx_le_64, MEMSUFFIX) (void)
923
{
924
    AVR0.u64[VR_DWORD1] = glue(ldu64r, MEMSUFFIX)((uint64_t)T0);
925
    AVR0.u64[VR_DWORD0] = glue(ldu64r, MEMSUFFIX)((uint64_t)T0 + 8);
926
}
927

    
928
void OPPROTO glue(op_vr_stvx_64, MEMSUFFIX) (void)
929
{
930
    glue(st64, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD0]);
931
    glue(st64, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD1]);
932
}
933

    
934
void OPPROTO glue(op_vr_stvx_le_64, MEMSUFFIX) (void)
935
{
936
    glue(st64r, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD1]);
937
    glue(st64r, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD0]);
938
}
939
#endif
940
#undef VR_DWORD0
941
#undef VR_DWORD1
942

    
943
/* SPE extension */
944
#define _PPC_SPE_LD_OP(name, op)                                              \
945
void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void)                     \
946
{                                                                             \
947
    T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0);                                \
948
    RETURN();                                                                 \
949
}
950

    
951
#if defined(TARGET_PPC64)
952
#define _PPC_SPE_LD_OP_64(name, op)                                           \
953
void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void)          \
954
{                                                                             \
955
    T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0);                                \
956
    RETURN();                                                                 \
957
}
958
#define PPC_SPE_LD_OP(name, op)                                               \
959
_PPC_SPE_LD_OP(name, op);                                                     \
960
_PPC_SPE_LD_OP_64(name, op)
961
#else
962
#define PPC_SPE_LD_OP(name, op)                                               \
963
_PPC_SPE_LD_OP(name, op)
964
#endif
965

    
966
#define _PPC_SPE_ST_OP(name, op)                                              \
967
void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void)                    \
968
{                                                                             \
969
    glue(op, MEMSUFFIX)((uint32_t)T0, T1_64);                                 \
970
    RETURN();                                                                 \
971
}
972

    
973
#if defined(TARGET_PPC64)
974
#define _PPC_SPE_ST_OP_64(name, op)                                           \
975
void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void)         \
976
{                                                                             \
977
    glue(op, MEMSUFFIX)((uint64_t)T0, T1_64);                                 \
978
    RETURN();                                                                 \
979
}
980
#define PPC_SPE_ST_OP(name, op)                                               \
981
_PPC_SPE_ST_OP(name, op);                                                     \
982
_PPC_SPE_ST_OP_64(name, op)
983
#else
984
#define PPC_SPE_ST_OP(name, op)                                               \
985
_PPC_SPE_ST_OP(name, op)
986
#endif
987

    
988
#if !defined(TARGET_PPC64)
989
PPC_SPE_LD_OP(dd, ldu64);
990
PPC_SPE_ST_OP(dd, st64);
991
PPC_SPE_LD_OP(dd_le, ldu64r);
992
PPC_SPE_ST_OP(dd_le, st64r);
993
#endif
994
static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
995
{
996
    uint64_t ret;
997
    ret = (uint64_t)glue(ldu32, MEMSUFFIX)(EA) << 32;
998
    ret |= (uint64_t)glue(ldu32, MEMSUFFIX)(EA + 4);
999
    return ret;
1000
}
1001
PPC_SPE_LD_OP(dw, spe_ldw);
1002
static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA,
1003
                                                     uint64_t data)
1004
{
1005
    glue(st32, MEMSUFFIX)(EA, data >> 32);
1006
    glue(st32, MEMSUFFIX)(EA + 4, data);
1007
}
1008
PPC_SPE_ST_OP(dw, spe_stdw);
1009
static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1010
{
1011
    uint64_t ret;
1012
    ret = (uint64_t)glue(ldu32r, MEMSUFFIX)(EA) << 32;
1013
    ret |= (uint64_t)glue(ldu32r, MEMSUFFIX)(EA + 4);
1014
    return ret;
1015
}
1016
PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1017
static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1018
                                                        uint64_t data)
1019
{
1020
    glue(st32r, MEMSUFFIX)(EA, data >> 32);
1021
    glue(st32r, MEMSUFFIX)(EA + 4, data);
1022
}
1023
PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1024
static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1025
{
1026
    uint64_t ret;
1027
    ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48;
1028
    ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 32;
1029
    ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 4) << 16;
1030
    ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 6);
1031
    return ret;
1032
}
1033
PPC_SPE_LD_OP(dh, spe_ldh);
1034
static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA,
1035
                                                     uint64_t data)
1036
{
1037
    glue(st16, MEMSUFFIX)(EA, data >> 48);
1038
    glue(st16, MEMSUFFIX)(EA + 2, data >> 32);
1039
    glue(st16, MEMSUFFIX)(EA + 4, data >> 16);
1040
    glue(st16, MEMSUFFIX)(EA + 6, data);
1041
}
1042
PPC_SPE_ST_OP(dh, spe_stdh);
1043
static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1044
{
1045
    uint64_t ret;
1046
    ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48;
1047
    ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 32;
1048
    ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 4) << 16;
1049
    ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 6);
1050
    return ret;
1051
}
1052
PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1053
static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1054
                                                        uint64_t data)
1055
{
1056
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1057
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1058
    glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1059
    glue(st16r, MEMSUFFIX)(EA + 6, data);
1060
}
1061
PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1062
static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1063
{
1064
    uint64_t ret;
1065
    ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48;
1066
    ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 16;
1067
    return ret;
1068
}
1069
PPC_SPE_LD_OP(whe, spe_lwhe);
1070
static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA,
1071
                                                      uint64_t data)
1072
{
1073
    glue(st16, MEMSUFFIX)(EA, data >> 48);
1074
    glue(st16, MEMSUFFIX)(EA + 2, data >> 16);
1075
}
1076
PPC_SPE_ST_OP(whe, spe_stwhe);
1077
static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1078
{
1079
    uint64_t ret;
1080
    ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48;
1081
    ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 16;
1082
    return ret;
1083
}
1084
PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1085
static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1086
                                                         uint64_t data)
1087
{
1088
    glue(st16r, MEMSUFFIX)(EA, data >> 48);
1089
    glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1090
}
1091
PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1092
static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1093
{
1094
    uint64_t ret;
1095
    ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 32;
1096
    ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2);
1097
    return ret;
1098
}
1099
PPC_SPE_LD_OP(whou, spe_lwhou);
1100
static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1101
{
1102
    uint64_t ret;
1103
    ret = ((uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA))) << 32;
1104
    ret |= (uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA + 2));
1105
    return ret;
1106
}
1107
PPC_SPE_LD_OP(whos, spe_lwhos);
1108
static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA,
1109
                                                      uint64_t data)
1110
{
1111
    glue(st16, MEMSUFFIX)(EA, data >> 32);
1112
    glue(st16, MEMSUFFIX)(EA + 2, data);
1113
}
1114
PPC_SPE_ST_OP(who, spe_stwho);
1115
static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1116
{
1117
    uint64_t ret;
1118
    ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 32;
1119
    ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2);
1120
    return ret;
1121
}
1122
PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1123
static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1124
{
1125
    uint64_t ret;
1126
    ret = ((uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA))) << 32;
1127
    ret |= (uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA + 2));
1128
    return ret;
1129
}
1130
PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1131
static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1132
                                                         uint64_t data)
1133
{
1134
    glue(st16r, MEMSUFFIX)(EA, data >> 32);
1135
    glue(st16r, MEMSUFFIX)(EA + 2, data);
1136
}
1137
PPC_SPE_ST_OP(who_le, spe_stwho_le);
1138
#if !defined(TARGET_PPC64)
1139
static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA,
1140
                                                      uint64_t data)
1141
{
1142
    glue(st32, MEMSUFFIX)(EA, data);
1143
}
1144
PPC_SPE_ST_OP(wwo, spe_stwwo);
1145
static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1146
                                                         uint64_t data)
1147
{
1148
    glue(st32r, MEMSUFFIX)(EA, data);
1149
}
1150
PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1151
#endif
1152
static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1153
{
1154
    uint16_t tmp;
1155
    tmp = glue(ldu16, MEMSUFFIX)(EA);
1156
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1157
}
1158
PPC_SPE_LD_OP(h, spe_lh);
1159
static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1160
{
1161
    uint16_t tmp;
1162
    tmp = glue(ldu16r, MEMSUFFIX)(EA);
1163
    return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1164
}
1165
PPC_SPE_LD_OP(h_le, spe_lh_le);
1166
static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1167
{
1168
    uint32_t tmp;
1169
    tmp = glue(ldu32, MEMSUFFIX)(EA);
1170
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1171
}
1172
PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1173
static always_inline
1174
uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1175
{
1176
    uint32_t tmp;
1177
    tmp = glue(ldu32r, MEMSUFFIX)(EA);
1178
    return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1179
}
1180
PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1181
static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1182
{
1183
    uint64_t ret;
1184
    uint16_t tmp;
1185
    tmp = glue(ldu16, MEMSUFFIX)(EA);
1186
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1187
    tmp = glue(ldu16, MEMSUFFIX)(EA + 2);
1188
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1189
    return ret;
1190
}
1191
PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1192
static always_inline
1193
uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1194
{
1195
    uint64_t ret;
1196
    uint16_t tmp;
1197
    tmp = glue(ldu16r, MEMSUFFIX)(EA);
1198
    ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1199
    tmp = glue(ldu16r, MEMSUFFIX)(EA + 2);
1200
    ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1201
    return ret;
1202
}
1203
PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1204

    
1205
#undef MEMSUFFIX