Revision 2374e73e

b/target-alpha/helper.h
104 104
DEF_HELPER_1(hw_ret, void, i64)
105 105
DEF_HELPER_2(mfpr, i64, int, i64)
106 106
DEF_HELPER_2(mtpr, void, int, i64)
107
DEF_HELPER_0(set_alt_mode, void)
108
DEF_HELPER_0(restore_mode, void)
109

  
110
DEF_HELPER_1(ld_virt_to_phys, i64, i64)
111
DEF_HELPER_1(st_virt_to_phys, i64, i64)
112
DEF_HELPER_2(ldl_raw, void, i64, i64)
113
DEF_HELPER_2(ldq_raw, void, i64, i64)
114
DEF_HELPER_2(ldl_l_raw, void, i64, i64)
115
DEF_HELPER_2(ldq_l_raw, void, i64, i64)
116
DEF_HELPER_2(ldl_kernel, void, i64, i64)
117
DEF_HELPER_2(ldq_kernel, void, i64, i64)
118
DEF_HELPER_2(ldl_data, void, i64, i64)
119
DEF_HELPER_2(ldq_data, void, i64, i64)
120
DEF_HELPER_2(stl_raw, void, i64, i64)
121
DEF_HELPER_2(stq_raw, void, i64, i64)
122
DEF_HELPER_2(stl_c_raw, i64, i64, i64)
123
DEF_HELPER_2(stq_c_raw, i64, i64, i64)
107

  
108
DEF_HELPER_1(ldl_phys, i64, i64)
109
DEF_HELPER_1(ldq_phys, i64, i64)
110
DEF_HELPER_1(ldl_l_phys, i64, i64)
111
DEF_HELPER_1(ldq_l_phys, i64, i64)
112
DEF_HELPER_2(stl_phys, void, i64, i64)
113
DEF_HELPER_2(stq_phys, void, i64, i64)
114
DEF_HELPER_2(stl_c_phys, i64, i64, i64)
115
DEF_HELPER_2(stq_c_phys, i64, i64, i64)
124 116
#endif
125 117

  
126 118
#include "def-helper.h"
b/target-alpha/op_helper.c
1188 1188
{
1189 1189
    cpu_alpha_mtpr(env, iprn, val, NULL);
1190 1190
}
1191

  
1192
void helper_set_alt_mode (void)
1193
{
1194
    env->saved_mode = env->ps & 0xC;
1195
    env->ps = (env->ps & ~0xC) | (env->ipr[IPR_ALT_MODE] & 0xC);
1196
}
1197

  
1198
void helper_restore_mode (void)
1199
{
1200
    env->ps = (env->ps & ~0xC) | env->saved_mode;
1201
}
1202

  
1203 1191
#endif
1204 1192

  
1205 1193
/*****************************************************************************/
1206 1194
/* Softmmu support */
1207 1195
#if !defined (CONFIG_USER_ONLY)
1208

  
1209
/* XXX: the two following helpers are pure hacks.
1210
 *      Hopefully, we emulate the PALcode, then we should never see
1211
 *      HW_LD / HW_ST instructions.
1212
 */
1213
uint64_t helper_ld_virt_to_phys (uint64_t virtaddr)
1214
{
1215
    uint64_t tlb_addr, physaddr;
1216
    int index, mmu_idx;
1217
    void *retaddr;
1218

  
1219
    mmu_idx = cpu_mmu_index(env);
1220
    index = (virtaddr >> TARGET_PAGE_BITS) & (CPU_TLB_SIZE - 1);
1221
 redo:
1222
    tlb_addr = env->tlb_table[mmu_idx][index].addr_read;
1223
    if ((virtaddr & TARGET_PAGE_MASK) ==
1224
        (tlb_addr & (TARGET_PAGE_MASK | TLB_INVALID_MASK))) {
1225
        physaddr = virtaddr + env->tlb_table[mmu_idx][index].addend;
1226
    } else {
1227
        /* the page is not in the TLB : fill it */
1228
        retaddr = GETPC();
1229
        tlb_fill(virtaddr, 0, mmu_idx, retaddr);
1230
        goto redo;
1231
    }
1232
    return physaddr;
1233
}
1234

  
1235
uint64_t helper_st_virt_to_phys (uint64_t virtaddr)
1236
{
1237
    uint64_t tlb_addr, physaddr;
1238
    int index, mmu_idx;
1239
    void *retaddr;
1240

  
1241
    mmu_idx = cpu_mmu_index(env);
1242
    index = (virtaddr >> TARGET_PAGE_BITS) & (CPU_TLB_SIZE - 1);
1243
 redo:
1244
    tlb_addr = env->tlb_table[mmu_idx][index].addr_write;
1245
    if ((virtaddr & TARGET_PAGE_MASK) ==
1246
        (tlb_addr & (TARGET_PAGE_MASK | TLB_INVALID_MASK))) {
1247
        physaddr = virtaddr + env->tlb_table[mmu_idx][index].addend;
1248
    } else {
1249
        /* the page is not in the TLB : fill it */
1250
        retaddr = GETPC();
1251
        tlb_fill(virtaddr, 1, mmu_idx, retaddr);
1252
        goto redo;
1253
    }
1254
    return physaddr;
1255
}
1256

  
1257
void helper_ldl_raw(uint64_t t0, uint64_t t1)
1196
uint64_t helper_ldl_phys(uint64_t p)
1258 1197
{
1259
    ldl_raw(t1, t0);
1198
    return (int32_t)ldl_phys(p);
1260 1199
}
1261 1200

  
1262
void helper_ldq_raw(uint64_t t0, uint64_t t1)
1201
uint64_t helper_ldq_phys(uint64_t p)
1263 1202
{
1264
    ldq_raw(t1, t0);
1203
    return ldq_phys(p);
1265 1204
}
1266 1205

  
1267
void helper_ldl_l_raw(uint64_t t0, uint64_t t1)
1206
uint64_t helper_ldl_l_phys(uint64_t p)
1268 1207
{
1269
    env->lock = t1;
1270
    ldl_raw(t1, t0);
1208
    env->lock_addr = p;
1209
    return env->lock_value = (int32_t)ldl_phys(p);
1271 1210
}
1272 1211

  
1273
void helper_ldq_l_raw(uint64_t t0, uint64_t t1)
1212
uint64_t helper_ldq_l_phys(uint64_t p)
1274 1213
{
1275
    env->lock = t1;
1276
    ldl_raw(t1, t0);
1214
    env->lock_addr = p;
1215
    return env->lock_value = ldl_phys(p);
1277 1216
}
1278 1217

  
1279
void helper_ldl_kernel(uint64_t t0, uint64_t t1)
1218
void helper_stl_phys(uint64_t p, uint64_t v)
1280 1219
{
1281
    ldl_kernel(t1, t0);
1220
    stl_phys(p, v);
1282 1221
}
1283 1222

  
1284
void helper_ldq_kernel(uint64_t t0, uint64_t t1)
1223
void helper_stq_phys(uint64_t p, uint64_t v)
1285 1224
{
1286
    ldq_kernel(t1, t0);
1225
    stq_phys(p, v);
1287 1226
}
1288 1227

  
1289
void helper_ldl_data(uint64_t t0, uint64_t t1)
1228
uint64_t helper_stl_c_phys(uint64_t p, uint64_t v)
1290 1229
{
1291
    ldl_data(t1, t0);
1292
}
1230
    uint64_t ret = 0;
1293 1231

  
1294
void helper_ldq_data(uint64_t t0, uint64_t t1)
1295
{
1296
    ldq_data(t1, t0);
1297
}
1298

  
1299
void helper_stl_raw(uint64_t t0, uint64_t t1)
1300
{
1301
    stl_raw(t1, t0);
1302
}
1303

  
1304
void helper_stq_raw(uint64_t t0, uint64_t t1)
1305
{
1306
    stq_raw(t1, t0);
1307
}
1308

  
1309
uint64_t helper_stl_c_raw(uint64_t t0, uint64_t t1)
1310
{
1311
    uint64_t ret;
1312

  
1313
    if (t1 == env->lock) {
1314
        stl_raw(t1, t0);
1315
        ret = 0;
1316
    } else
1317
        ret = 1;
1318

  
1319
    env->lock = 1;
1232
    if (p == env->lock_addr) {
1233
        int32_t old = ldl_phys(p);
1234
        if (old == (int32_t)env->lock_value) {
1235
            stl_phys(p, v);
1236
            ret = 1;
1237
        }
1238
    }
1239
    env->lock_addr = -1;
1320 1240

  
1321 1241
    return ret;
1322 1242
}
1323 1243

  
1324
uint64_t helper_stq_c_raw(uint64_t t0, uint64_t t1)
1244
uint64_t helper_stq_c_phys(uint64_t p, uint64_t v)
1325 1245
{
1326
    uint64_t ret;
1246
    uint64_t ret = 0;
1327 1247

  
1328
    if (t1 == env->lock) {
1329
        stq_raw(t1, t0);
1330
        ret = 0;
1331
    } else
1332
        ret = 1;
1333

  
1334
    env->lock = 1;
1248
    if (p == env->lock_addr) {
1249
        uint64_t old = ldq_phys(p);
1250
        if (old == env->lock_value) {
1251
            stq_phys(p, v);
1252
            ret = 1;
1253
        }
1254
    }
1255
    env->lock_addr = -1;
1335 1256

  
1336 1257
    return ret;
1337 1258
}
b/target-alpha/translate.c
2616 2616
            switch ((insn >> 12) & 0xF) {
2617 2617
            case 0x0:
2618 2618
                /* Longword physical access (hw_ldl/p) */
2619
                gen_helper_ldl_raw(cpu_ir[ra], addr);
2619
                gen_helper_ldl_phys(cpu_ir[ra], addr);
2620 2620
                break;
2621 2621
            case 0x1:
2622 2622
                /* Quadword physical access (hw_ldq/p) */
2623
                gen_helper_ldq_raw(cpu_ir[ra], addr);
2623
                gen_helper_ldq_phys(cpu_ir[ra], addr);
2624 2624
                break;
2625 2625
            case 0x2:
2626 2626
                /* Longword physical access with lock (hw_ldl_l/p) */
2627
                gen_helper_ldl_l_raw(cpu_ir[ra], addr);
2627
                gen_helper_ldl_l_phys(cpu_ir[ra], addr);
2628 2628
                break;
2629 2629
            case 0x3:
2630 2630
                /* Quadword physical access with lock (hw_ldq_l/p) */
2631
                gen_helper_ldq_l_raw(cpu_ir[ra], addr);
2631
                gen_helper_ldq_l_phys(cpu_ir[ra], addr);
2632 2632
                break;
2633 2633
            case 0x4:
2634 2634
                /* Longword virtual PTE fetch (hw_ldl/v) */
2635
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
2636
                break;
2635
                goto invalid_opc;
2637 2636
            case 0x5:
2638 2637
                /* Quadword virtual PTE fetch (hw_ldq/v) */
2639
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
2638
                goto invalid_opc;
2640 2639
                break;
2641 2640
            case 0x6:
2642 2641
                /* Incpu_ir[ra]id */
......
2646 2645
                goto invalid_opc;
2647 2646
            case 0x8:
2648 2647
                /* Longword virtual access (hw_ldl) */
2649
                gen_helper_st_virt_to_phys(addr, addr);
2650
                gen_helper_ldl_raw(cpu_ir[ra], addr);
2651
                break;
2648
                goto invalid_opc;
2652 2649
            case 0x9:
2653 2650
                /* Quadword virtual access (hw_ldq) */
2654
                gen_helper_st_virt_to_phys(addr, addr);
2655
                gen_helper_ldq_raw(cpu_ir[ra], addr);
2656
                break;
2651
                goto invalid_opc;
2657 2652
            case 0xA:
2658 2653
                /* Longword virtual access with protection check (hw_ldl/w) */
2659 2654
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
......
2664 2659
                break;
2665 2660
            case 0xC:
2666 2661
                /* Longword virtual access with alt access mode (hw_ldl/a)*/
2667
                gen_helper_set_alt_mode();
2668
                gen_helper_st_virt_to_phys(addr, addr);
2669
                gen_helper_ldl_raw(cpu_ir[ra], addr);
2670
                gen_helper_restore_mode();
2671
                break;
2662
                goto invalid_opc;
2672 2663
            case 0xD:
2673 2664
                /* Quadword virtual access with alt access mode (hw_ldq/a) */
2674
                gen_helper_set_alt_mode();
2675
                gen_helper_st_virt_to_phys(addr, addr);
2676
                gen_helper_ldq_raw(cpu_ir[ra], addr);
2677
                gen_helper_restore_mode();
2678
                break;
2665
                goto invalid_opc;
2679 2666
            case 0xE:
2680 2667
                /* Longword virtual access with alternate access mode and
2681
                 * protection checks (hw_ldl/wa)
2682
                 */
2683
                gen_helper_set_alt_mode();
2684
                gen_helper_ldl_data(cpu_ir[ra], addr);
2685
                gen_helper_restore_mode();
2668
                   protection checks (hw_ldl/wa) */
2669
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, MMU_USER_IDX);
2686 2670
                break;
2687 2671
            case 0xF:
2688 2672
                /* Quadword virtual access with alternate access mode and
2689
                 * protection checks (hw_ldq/wa)
2690
                 */
2691
                gen_helper_set_alt_mode();
2692
                gen_helper_ldq_data(cpu_ir[ra], addr);
2693
                gen_helper_restore_mode();
2673
                   protection checks (hw_ldq/wa) */
2674
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, MMU_USER_IDX);
2694 2675
                break;
2695 2676
            }
2696 2677
            tcg_temp_free(addr);
......
2940 2921
            switch ((insn >> 12) & 0xF) {
2941 2922
            case 0x0:
2942 2923
                /* Longword physical access */
2943
                gen_helper_stl_raw(val, addr);
2924
                gen_helper_stl_phys(addr, val);
2944 2925
                break;
2945 2926
            case 0x1:
2946 2927
                /* Quadword physical access */
2947
                gen_helper_stq_raw(val, addr);
2928
                gen_helper_stq_phys(addr, val);
2948 2929
                break;
2949 2930
            case 0x2:
2950 2931
                /* Longword physical access with lock */
2951
                gen_helper_stl_c_raw(val, val, addr);
2932
                gen_helper_stl_c_phys(val, addr, val);
2952 2933
                break;
2953 2934
            case 0x3:
2954 2935
                /* Quadword physical access with lock */
2955
                gen_helper_stq_c_raw(val, val, addr);
2936
                gen_helper_stq_c_phys(val, addr, val);
2956 2937
                break;
2957 2938
            case 0x4:
2958 2939
                /* Longword virtual access */
2959
                gen_helper_st_virt_to_phys(addr, addr);
2960
                gen_helper_stl_raw(val, addr);
2961
                break;
2940
                goto invalid_opc;
2962 2941
            case 0x5:
2963 2942
                /* Quadword virtual access */
2964
                gen_helper_st_virt_to_phys(addr, addr);
2965
                gen_helper_stq_raw(val, addr);
2966
                break;
2943
                goto invalid_opc;
2967 2944
            case 0x6:
2968 2945
                /* Invalid */
2969 2946
                goto invalid_opc;
......
2984 2961
                goto invalid_opc;
2985 2962
            case 0xC:
2986 2963
                /* Longword virtual access with alternate access mode */
2987
                gen_helper_set_alt_mode();
2988
                gen_helper_st_virt_to_phys(addr, addr);
2989
                gen_helper_stl_raw(val, addr);
2990
                gen_helper_restore_mode();
2991
                break;
2964
                goto invalid_opc;
2992 2965
            case 0xD:
2993 2966
                /* Quadword virtual access with alternate access mode */
2994
                gen_helper_set_alt_mode();
2995
                gen_helper_st_virt_to_phys(addr, addr);
2996
                gen_helper_stl_raw(val, addr);
2997
                gen_helper_restore_mode();
2998
                break;
2967
                goto invalid_opc;
2999 2968
            case 0xE:
3000 2969
                /* Invalid */
3001 2970
                goto invalid_opc;

Also available in: Unified diff