Revision d9bce9d9 target-ppc/op_mem.h

b/target-ppc/op_mem.h
37 37
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
38 38
}
39 39

  
40
#if defined(TARGET_PPC64)
41
static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
42
{
43
    return (int32_t)glue(ldl, MEMSUFFIX)(EA);
44
}
45

  
46
static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
47
{
48
    uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
49
    return ((tmp & 0xFF00000000000000ULL) >> 56) |
50
        ((tmp & 0x00FF000000000000ULL) >> 40) |
51
        ((tmp & 0x0000FF0000000000ULL) >> 24) |
52
        ((tmp & 0x000000FF00000000ULL) >> 8) |
53
        ((tmp & 0x00000000FF000000ULL) << 8) |
54
        ((tmp & 0x0000000000FF0000ULL) << 24) |
55
        ((tmp & 0x000000000000FF00ULL) << 40) |
56
        ((tmp & 0x00000000000000FFULL) << 54);
57
}
58

  
59
static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
60
{
61
    uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
62
    return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
63
        ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
64
}
65
#endif
66

  
40 67
static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data)
41 68
{
42 69
    uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
......
50 77
    glue(stl, MEMSUFFIX)(EA, tmp);
51 78
}
52 79

  
80
#if defined(TARGET_PPC64)
81
static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data)
82
{
83
    uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
84
        ((data & 0x00FF000000000000ULL) >> 40) |
85
        ((data & 0x0000FF0000000000ULL) >> 24) |
86
        ((data & 0x000000FF00000000ULL) >> 8) |
87
        ((data & 0x00000000FF000000ULL) << 8) |
88
        ((data & 0x0000000000FF0000ULL) << 24) |
89
        ((data & 0x000000000000FF00ULL) << 40) |
90
        ((data & 0x00000000000000FFULL) << 56);
91
    glue(stq, MEMSUFFIX)(EA, tmp);
92
}
93
#endif
94

  
53 95
/***                             Integer load                              ***/
54 96
#define PPC_LD_OP(name, op)                                                   \
55
PPC_OP(glue(glue(l, name), MEMSUFFIX))                                        \
97
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
56 98
{                                                                             \
57
    T1 = glue(op, MEMSUFFIX)(T0);                                             \
99
    T1 = glue(op, MEMSUFFIX)((uint32_t)T0);                                   \
58 100
    RETURN();                                                                 \
59 101
}
60 102

  
103
#if defined(TARGET_PPC64)
104
#define PPC_LD_OP_64(name, op)                                                \
105
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
106
{                                                                             \
107
    T1 = glue(op, MEMSUFFIX)((uint64_t)T0);                                   \
108
    RETURN();                                                                 \
109
}
110
#endif
111

  
61 112
#define PPC_ST_OP(name, op)                                                   \
62
PPC_OP(glue(glue(st, name), MEMSUFFIX))                                       \
113
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
63 114
{                                                                             \
64
    glue(op, MEMSUFFIX)(T0, T1);                                              \
115
    glue(op, MEMSUFFIX)((uint32_t)T0, T1);                                    \
65 116
    RETURN();                                                                 \
66 117
}
67 118

  
119
#if defined(TARGET_PPC64)
120
#define PPC_ST_OP_64(name, op)                                                \
121
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
122
{                                                                             \
123
    glue(op, MEMSUFFIX)((uint64_t)T0, T1);                                    \
124
    RETURN();                                                                 \
125
}
126
#endif
127

  
68 128
PPC_LD_OP(bz, ldub);
69 129
PPC_LD_OP(ha, ldsw);
70 130
PPC_LD_OP(hz, lduw);
71 131
PPC_LD_OP(wz, ldl);
132
#if defined(TARGET_PPC64)
133
PPC_LD_OP(d, ldq);
134
PPC_LD_OP(wa, ldsl);
135
PPC_LD_OP_64(d, ldq);
136
PPC_LD_OP_64(wa, ldsl);
137
PPC_LD_OP_64(bz, ldub);
138
PPC_LD_OP_64(ha, ldsw);
139
PPC_LD_OP_64(hz, lduw);
140
PPC_LD_OP_64(wz, ldl);
141
#endif
72 142

  
73 143
PPC_LD_OP(ha_le, ld16rs);
74 144
PPC_LD_OP(hz_le, ld16r);
75 145
PPC_LD_OP(wz_le, ld32r);
146
#if defined(TARGET_PPC64)
147
PPC_LD_OP(d_le, ld64r);
148
PPC_LD_OP(wa_le, ld32rs);
149
PPC_LD_OP_64(d_le, ld64r);
150
PPC_LD_OP_64(wa_le, ld32rs);
151
PPC_LD_OP_64(ha_le, ld16rs);
152
PPC_LD_OP_64(hz_le, ld16r);
153
PPC_LD_OP_64(wz_le, ld32r);
154
#endif
76 155

  
77 156
/***                              Integer store                            ***/
78 157
PPC_ST_OP(b, stb);
79 158
PPC_ST_OP(h, stw);
80 159
PPC_ST_OP(w, stl);
160
#if defined(TARGET_PPC64)
161
PPC_ST_OP(d, stq);
162
PPC_ST_OP_64(d, stq);
163
PPC_ST_OP_64(b, stb);
164
PPC_ST_OP_64(h, stw);
165
PPC_ST_OP_64(w, stl);
166
#endif
81 167

  
82 168
PPC_ST_OP(h_le, st16r);
83 169
PPC_ST_OP(w_le, st32r);
170
#if defined(TARGET_PPC64)
171
PPC_ST_OP(d_le, st64r);
172
PPC_ST_OP_64(d_le, st64r);
173
PPC_ST_OP_64(h_le, st16r);
174
PPC_ST_OP_64(w_le, st32r);
175
#endif
84 176

  
85 177
/***                Integer load and store with byte reverse               ***/
86 178
PPC_LD_OP(hbr, ld16r);
87 179
PPC_LD_OP(wbr, ld32r);
88 180
PPC_ST_OP(hbr, st16r);
89 181
PPC_ST_OP(wbr, st32r);
182
#if defined(TARGET_PPC64)
183
PPC_LD_OP_64(hbr, ld16r);
184
PPC_LD_OP_64(wbr, ld32r);
185
PPC_ST_OP_64(hbr, st16r);
186
PPC_ST_OP_64(wbr, st32r);
187
#endif
90 188

  
91 189
PPC_LD_OP(hbr_le, lduw);
92 190
PPC_LD_OP(wbr_le, ldl);
93 191
PPC_ST_OP(hbr_le, stw);
94 192
PPC_ST_OP(wbr_le, stl);
193
#if defined(TARGET_PPC64)
194
PPC_LD_OP_64(hbr_le, lduw);
195
PPC_LD_OP_64(wbr_le, ldl);
196
PPC_ST_OP_64(hbr_le, stw);
197
PPC_ST_OP_64(wbr_le, stl);
198
#endif
95 199

  
96 200
/***                    Integer load and store multiple                    ***/
97
PPC_OP(glue(lmw, MEMSUFFIX))
201
void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
98 202
{
99 203
    glue(do_lmw, MEMSUFFIX)(PARAM1);
100 204
    RETURN();
101 205
}
102 206

  
103
PPC_OP(glue(lmw_le, MEMSUFFIX))
207
#if defined(TARGET_PPC64)
208
void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
209
{
210
    glue(do_lmw_64, MEMSUFFIX)(PARAM1);
211
    RETURN();
212
}
213
#endif
214

  
215
void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
104 216
{
105 217
    glue(do_lmw_le, MEMSUFFIX)(PARAM1);
106 218
    RETURN();
107 219
}
108 220

  
109
PPC_OP(glue(stmw, MEMSUFFIX))
221
#if defined(TARGET_PPC64)
222
void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
223
{
224
    glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
225
    RETURN();
226
}
227
#endif
228

  
229
void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
110 230
{
111 231
    glue(do_stmw, MEMSUFFIX)(PARAM1);
112 232
    RETURN();
113 233
}
114 234

  
115
PPC_OP(glue(stmw_le, MEMSUFFIX))
235
#if defined(TARGET_PPC64)
236
void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
237
{
238
    glue(do_stmw_64, MEMSUFFIX)(PARAM1);
239
    RETURN();
240
}
241
#endif
242

  
243
void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
116 244
{
117 245
    glue(do_stmw_le, MEMSUFFIX)(PARAM1);
118 246
    RETURN();
119 247
}
120 248

  
249
#if defined(TARGET_PPC64)
250
void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
251
{
252
    glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
253
    RETURN();
254
}
255
#endif
256

  
121 257
/***                    Integer load and store strings                     ***/
122
PPC_OP(glue(lswi, MEMSUFFIX))
258
void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
259
{
260
    glue(do_lsw, MEMSUFFIX)(PARAM1);
261
    RETURN();
262
}
263

  
264
#if defined(TARGET_PPC64)
265
void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
123 266
{
124
    glue(do_lsw, MEMSUFFIX)(PARAM(1));
267
    glue(do_lsw_64, MEMSUFFIX)(PARAM1);
125 268
    RETURN();
126 269
}
270
#endif
127 271

  
128
PPC_OP(glue(lswi_le, MEMSUFFIX))
272
void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
129 273
{
130
    glue(do_lsw_le, MEMSUFFIX)(PARAM(1));
274
    glue(do_lsw_le, MEMSUFFIX)(PARAM1);
131 275
    RETURN();
132 276
}
133 277

  
278
#if defined(TARGET_PPC64)
279
void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
280
{
281
    glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
282
    RETURN();
283
}
284
#endif
285

  
134 286
/* PPC32 specification says we must generate an exception if
135 287
 * rA is in the range of registers to be loaded.
136 288
 * In an other hand, IBM says this is valid, but rA won't be loaded.
137 289
 * For now, I'll follow the spec...
138 290
 */
139
PPC_OP(glue(lswx, MEMSUFFIX))
291
void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
292
{
293
    /* Note: T1 comes from xer_bc then no cast is needed */
294
    if (likely(T1 != 0)) {
295
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
296
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
297
            do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
298
        } else {
299
            glue(do_lsw, MEMSUFFIX)(PARAM1);
300
        }
301
    }
302
    RETURN();
303
}
304

  
305
#if defined(TARGET_PPC64)
306
void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
307
{
308
    /* Note: T1 comes from xer_bc then no cast is needed */
309
    if (likely(T1 != 0)) {
310
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
311
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
312
            do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
313
        } else {
314
            glue(do_lsw_64, MEMSUFFIX)(PARAM1);
315
        }
316
    }
317
    RETURN();
318
}
319
#endif
320

  
321
void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
140 322
{
141
    if (unlikely(T1 > 0)) {
323
    /* Note: T1 comes from xer_bc then no cast is needed */
324
    if (likely(T1 != 0)) {
142 325
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
143 326
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
144 327
            do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
145 328
        } else {
146
            glue(do_lsw, MEMSUFFIX)(PARAM(1));
329
            glue(do_lsw_le, MEMSUFFIX)(PARAM1);
147 330
        }
148 331
    }
149 332
    RETURN();
150 333
}
151 334

  
152
PPC_OP(glue(lswx_le, MEMSUFFIX))
335
#if defined(TARGET_PPC64)
336
void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
153 337
{
154
    if (unlikely(T1 > 0)) {
338
    /* Note: T1 comes from xer_bc then no cast is needed */
339
    if (likely(T1 != 0)) {
155 340
        if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
156 341
                     (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
157 342
            do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
158 343
        } else {
159
            glue(do_lsw_le, MEMSUFFIX)(PARAM(1));
344
            glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
160 345
        }
161 346
    }
162 347
    RETURN();
163 348
}
349
#endif
350

  
351
void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
352
{
353
    glue(do_stsw, MEMSUFFIX)(PARAM1);
354
    RETURN();
355
}
356

  
357
#if defined(TARGET_PPC64)
358
void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
359
{
360
    glue(do_stsw_64, MEMSUFFIX)(PARAM1);
361
    RETURN();
362
}
363
#endif
164 364

  
165
PPC_OP(glue(stsw, MEMSUFFIX))
365
void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
166 366
{
167
    glue(do_stsw, MEMSUFFIX)(PARAM(1));
367
    glue(do_stsw_le, MEMSUFFIX)(PARAM1);
168 368
    RETURN();
169 369
}
170 370

  
171
PPC_OP(glue(stsw_le, MEMSUFFIX))
371
#if defined(TARGET_PPC64)
372
void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
172 373
{
173
    glue(do_stsw_le, MEMSUFFIX)(PARAM(1));
374
    glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
174 375
    RETURN();
175 376
}
377
#endif
176 378

  
177 379
/***                         Floating-point store                          ***/
178 380
#define PPC_STF_OP(name, op)                                                  \
179
PPC_OP(glue(glue(st, name), MEMSUFFIX))                                       \
381
void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void)                        \
382
{                                                                             \
383
    glue(op, MEMSUFFIX)((uint32_t)T0, FT0);                                   \
384
    RETURN();                                                                 \
385
}
386

  
387
#if defined(TARGET_PPC64)
388
#define PPC_STF_OP_64(name, op)                                               \
389
void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void)             \
180 390
{                                                                             \
181
    glue(op, MEMSUFFIX)(T0, FT0);                                             \
391
    glue(op, MEMSUFFIX)((uint64_t)T0, FT0);                                   \
182 392
    RETURN();                                                                 \
183 393
}
394
#endif
184 395

  
185 396
PPC_STF_OP(fd, stfq);
186 397
PPC_STF_OP(fs, stfl);
398
#if defined(TARGET_PPC64)
399
PPC_STF_OP_64(fd, stfq);
400
PPC_STF_OP_64(fs, stfl);
401
#endif
187 402

  
188 403
static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
189 404
{
......
221 436

  
222 437
PPC_STF_OP(fd_le, stfqr);
223 438
PPC_STF_OP(fs_le, stflr);
439
#if defined(TARGET_PPC64)
440
PPC_STF_OP_64(fd_le, stfqr);
441
PPC_STF_OP_64(fs_le, stflr);
442
#endif
224 443

  
225 444
/***                         Floating-point load                           ***/
226 445
#define PPC_LDF_OP(name, op)                                                  \
227
PPC_OP(glue(glue(l, name), MEMSUFFIX))                                        \
446
void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void)                         \
447
{                                                                             \
448
    FT0 = glue(op, MEMSUFFIX)((uint32_t)T0);                                  \
449
    RETURN();                                                                 \
450
}
451

  
452
#if defined(TARGET_PPC64)
453
#define PPC_LDF_OP_64(name, op)                                               \
454
void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void)              \
228 455
{                                                                             \
229
    FT0 = glue(op, MEMSUFFIX)(T0);                                            \
456
    FT0 = glue(op, MEMSUFFIX)((uint64_t)T0);                                  \
230 457
    RETURN();                                                                 \
231 458
}
459
#endif
232 460

  
233 461
PPC_LDF_OP(fd, ldfq);
234 462
PPC_LDF_OP(fs, ldfl);
463
#if defined(TARGET_PPC64)
464
PPC_LDF_OP_64(fd, ldfq);
465
PPC_LDF_OP_64(fs, ldfl);
466
#endif
235 467

  
236 468
static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
237 469
{
......
271 503

  
272 504
PPC_LDF_OP(fd_le, ldfqr);
273 505
PPC_LDF_OP(fs_le, ldflr);
506
#if defined(TARGET_PPC64)
507
PPC_LDF_OP_64(fd_le, ldfqr);
508
PPC_LDF_OP_64(fs_le, ldflr);
509
#endif
274 510

  
275 511
/* Load and set reservation */
276
PPC_OP(glue(lwarx, MEMSUFFIX))
512
void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
513
{
514
    if (unlikely(T0 & 0x03)) {
515
        do_raise_exception(EXCP_ALIGN);
516
    } else {
517
        T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
518
        regs->reserve = (uint32_t)T0;
519
    }
520
    RETURN();
521
}
522

  
523
#if defined(TARGET_PPC64)
524
void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
525
{
526
    if (unlikely(T0 & 0x03)) {
527
        do_raise_exception(EXCP_ALIGN);
528
    } else {
529
        T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
530
        regs->reserve = (uint64_t)T0;
531
    }
532
    RETURN();
533
}
534

  
535
void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
536
{
537
    if (unlikely(T0 & 0x03)) {
538
        do_raise_exception(EXCP_ALIGN);
539
    } else {
540
        T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
541
        regs->reserve = (uint64_t)T0;
542
    }
543
    RETURN();
544
}
545
#endif
546

  
547
void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
548
{
549
    if (unlikely(T0 & 0x03)) {
550
        do_raise_exception(EXCP_ALIGN);
551
    } else {
552
        T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
553
        regs->reserve = (uint32_t)T0;
554
    }
555
    RETURN();
556
}
557

  
558
#if defined(TARGET_PPC64)
559
void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
277 560
{
278 561
    if (unlikely(T0 & 0x03)) {
279 562
        do_raise_exception(EXCP_ALIGN);
280 563
    } else {
281
        T1 = glue(ldl, MEMSUFFIX)(T0);
282
        regs->reserve = T0;
564
        T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
565
        regs->reserve = (uint64_t)T0;
283 566
    }
284 567
    RETURN();
285 568
}
286 569

  
287
PPC_OP(glue(lwarx_le, MEMSUFFIX))
570
void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
288 571
{
289 572
    if (unlikely(T0 & 0x03)) {
290 573
        do_raise_exception(EXCP_ALIGN);
291 574
    } else {
292
        T1 = glue(ld32r, MEMSUFFIX)(T0);
293
        regs->reserve = T0;
575
        T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
576
        regs->reserve = (uint64_t)T0;
294 577
    }
295 578
    RETURN();
296 579
}
580
#endif
297 581

  
298 582
/* Store with reservation */
299
PPC_OP(glue(stwcx, MEMSUFFIX))
583
void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
584
{
585
    if (unlikely(T0 & 0x03)) {
586
        do_raise_exception(EXCP_ALIGN);
587
    } else {
588
        if (unlikely(regs->reserve != (uint32_t)T0)) {
589
            env->crf[0] = xer_ov;
590
        } else {
591
            glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
592
            env->crf[0] = xer_ov | 0x02;
593
        }
594
    }
595
    regs->reserve = -1;
596
    RETURN();
597
}
598

  
599
#if defined(TARGET_PPC64)
600
void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
601
{
602
    if (unlikely(T0 & 0x03)) {
603
        do_raise_exception(EXCP_ALIGN);
604
    } else {
605
        if (unlikely(regs->reserve != (uint64_t)T0)) {
606
            env->crf[0] = xer_ov;
607
        } else {
608
            glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
609
            env->crf[0] = xer_ov | 0x02;
610
        }
611
    }
612
    regs->reserve = -1;
613
    RETURN();
614
}
615

  
616
void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
300 617
{
301 618
    if (unlikely(T0 & 0x03)) {
302 619
        do_raise_exception(EXCP_ALIGN);
303 620
    } else {
304
        if (unlikely(regs->reserve != T0)) {
621
        if (unlikely(regs->reserve != (uint64_t)T0)) {
305 622
            env->crf[0] = xer_ov;
306 623
        } else {
307
            glue(stl, MEMSUFFIX)(T0, T1);
624
            glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
625
            env->crf[0] = xer_ov | 0x02;
626
        }
627
    }
628
    regs->reserve = -1;
629
    RETURN();
630
}
631
#endif
632

  
633
void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
634
{
635
    if (unlikely(T0 & 0x03)) {
636
        do_raise_exception(EXCP_ALIGN);
637
    } else {
638
        if (unlikely(regs->reserve != (uint32_t)T0)) {
639
            env->crf[0] = xer_ov;
640
        } else {
641
            glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
308 642
            env->crf[0] = xer_ov | 0x02;
309 643
        }
310 644
    }
......
312 646
    RETURN();
313 647
}
314 648

  
315
PPC_OP(glue(stwcx_le, MEMSUFFIX))
649
#if defined(TARGET_PPC64)
650
void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
316 651
{
317 652
    if (unlikely(T0 & 0x03)) {
318 653
        do_raise_exception(EXCP_ALIGN);
319 654
    } else {
320
        if (unlikely(regs->reserve != T0)) {
655
        if (unlikely(regs->reserve != (uint64_t)T0)) {
321 656
            env->crf[0] = xer_ov;
322 657
        } else {
323
            glue(st32r, MEMSUFFIX)(T0, T1);
658
            glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
324 659
            env->crf[0] = xer_ov | 0x02;
325 660
        }
326 661
    }
......
328 663
    RETURN();
329 664
}
330 665

  
331
PPC_OP(glue(dcbz, MEMSUFFIX))
666
void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
667
{
668
    if (unlikely(T0 & 0x03)) {
669
        do_raise_exception(EXCP_ALIGN);
670
    } else {
671
        if (unlikely(regs->reserve != (uint64_t)T0)) {
672
            env->crf[0] = xer_ov;
673
        } else {
674
            glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
675
            env->crf[0] = xer_ov | 0x02;
676
        }
677
    }
678
    regs->reserve = -1;
679
    RETURN();
680
}
681
#endif
682

  
683
void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
684
{
685
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
686
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
687
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
688
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
689
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
690
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
691
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
692
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
693
#if DCACHE_LINE_SIZE == 64
694
    /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
695
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
696
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
697
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
698
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
699
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
700
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
701
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
702
    glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
703
#endif
704
    RETURN();
705
}
706

  
707
#if defined(TARGET_PPC64)
708
void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
332 709
{
333
    glue(stl, MEMSUFFIX)(T0 + 0x00, 0);
334
    glue(stl, MEMSUFFIX)(T0 + 0x04, 0);
335
    glue(stl, MEMSUFFIX)(T0 + 0x08, 0);
336
    glue(stl, MEMSUFFIX)(T0 + 0x0C, 0);
337
    glue(stl, MEMSUFFIX)(T0 + 0x10, 0);
338
    glue(stl, MEMSUFFIX)(T0 + 0x14, 0);
339
    glue(stl, MEMSUFFIX)(T0 + 0x18, 0);
340
    glue(stl, MEMSUFFIX)(T0 + 0x1C, 0);
710
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
711
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
712
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
713
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
714
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
715
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
716
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
717
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
341 718
#if DCACHE_LINE_SIZE == 64
342 719
    /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
343
    glue(stl, MEMSUFFIX)(T0 + 0x20UL, 0);
344
    glue(stl, MEMSUFFIX)(T0 + 0x24UL, 0);
345
    glue(stl, MEMSUFFIX)(T0 + 0x28UL, 0);
346
    glue(stl, MEMSUFFIX)(T0 + 0x2CUL, 0);
347
    glue(stl, MEMSUFFIX)(T0 + 0x30UL, 0);
348
    glue(stl, MEMSUFFIX)(T0 + 0x34UL, 0);
349
    glue(stl, MEMSUFFIX)(T0 + 0x38UL, 0);
350
    glue(stl, MEMSUFFIX)(T0 + 0x3CUL, 0);
720
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
721
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
722
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
723
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
724
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
725
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
726
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
727
    glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
351 728
#endif
352 729
    RETURN();
353 730
}
731
#endif
354 732

  
355 733
/* External access */
356
PPC_OP(glue(eciwx, MEMSUFFIX))
734
void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
735
{
736
    T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
737
    RETURN();
738
}
739

  
740
#if defined(TARGET_PPC64)
741
void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
742
{
743
    T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
744
    RETURN();
745
}
746
#endif
747

  
748
void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
749
{
750
    glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
751
    RETURN();
752
}
753

  
754
#if defined(TARGET_PPC64)
755
void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
357 756
{
358
    T1 = glue(ldl, MEMSUFFIX)(T0);
757
    glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
359 758
    RETURN();
360 759
}
760
#endif
361 761

  
362
PPC_OP(glue(ecowx, MEMSUFFIX))
762
void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
363 763
{
364
    glue(stl, MEMSUFFIX)(T0, T1);
764
    T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
365 765
    RETURN();
366 766
}
367 767

  
368
PPC_OP(glue(eciwx_le, MEMSUFFIX))
768
#if defined(TARGET_PPC64)
769
void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
369 770
{
370
    T1 = glue(ld32r, MEMSUFFIX)(T0);
771
    T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
371 772
    RETURN();
372 773
}
774
#endif
373 775

  
374
PPC_OP(glue(ecowx_le, MEMSUFFIX))
776
void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
375 777
{
376
    glue(st32r, MEMSUFFIX)(T0, T1);
778
    glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
377 779
    RETURN();
378 780
}
379 781

  
782
#if defined(TARGET_PPC64)
783
void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
784
{
785
    glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
786
    RETURN();
787
}
788
#endif
789

  
380 790
/* XXX: those micro-ops need tests ! */
381 791
/* PowerPC 601 specific instructions (POWER bridge) */
382 792
void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
383 793
{
384 794
    /* When byte count is 0, do nothing */
385
    if (likely(T1 > 0)) {
795
    if (likely(T1 != 0)) {
386 796
        glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
387 797
    }
388 798
    RETURN();

Also available in: Unified diff