Revision 426f5abc

b/linux-user/main.c
524 524
    return 0;
525 525
}
526 526

  
527
static int do_strex(CPUARMState *env)
528
{
529
    uint32_t val;
530
    int size;
531
    int rc = 1;
532
    int segv = 0;
533
    uint32_t addr;
534
    start_exclusive();
535
    addr = env->exclusive_addr;
536
    if (addr != env->exclusive_test) {
537
        goto fail;
538
    }
539
    size = env->exclusive_info & 0xf;
540
    switch (size) {
541
    case 0:
542
        segv = get_user_u8(val, addr);
543
        break;
544
    case 1:
545
        segv = get_user_u16(val, addr);
546
        break;
547
    case 2:
548
    case 3:
549
        segv = get_user_u32(val, addr);
550
        break;
551
    }
552
    if (segv) {
553
        env->cp15.c6_data = addr;
554
        goto done;
555
    }
556
    if (val != env->exclusive_val) {
557
        goto fail;
558
    }
559
    if (size == 3) {
560
        segv = get_user_u32(val, addr + 4);
561
        if (segv) {
562
            env->cp15.c6_data = addr + 4;
563
            goto done;
564
        }
565
        if (val != env->exclusive_high) {
566
            goto fail;
567
        }
568
    }
569
    val = env->regs[(env->exclusive_info >> 8) & 0xf];
570
    switch (size) {
571
    case 0:
572
        segv = put_user_u8(val, addr);
573
        break;
574
    case 1:
575
        segv = put_user_u16(val, addr);
576
        break;
577
    case 2:
578
    case 3:
579
        segv = put_user_u32(val, addr);
580
        break;
581
    }
582
    if (segv) {
583
        env->cp15.c6_data = addr;
584
        goto done;
585
    }
586
    if (size == 3) {
587
        val = env->regs[(env->exclusive_info >> 12) & 0xf];
588
        segv = put_user_u32(val, addr);
589
        if (segv) {
590
            env->cp15.c6_data = addr + 4;
591
            goto done;
592
        }
593
    }
594
    rc = 0;
595
fail:
596
    env->regs[(env->exclusive_info >> 4) & 0xf] = rc;
597
done:
598
    end_exclusive();
599
    return segv;
600
}
601

  
527 602
void cpu_loop(CPUARMState *env)
528 603
{
529 604
    int trapnr;
......
717 792
            if (do_kernel_trap(env))
718 793
              goto error;
719 794
            break;
795
        case EXCP_STREX:
796
            if (do_strex(env)) {
797
                addr = env->cp15.c6_data;
798
                goto do_segv;
799
            }
720 800
        default:
721 801
        error:
722 802
            fprintf(stderr, "qemu: unhandled CPU exception 0x%x - aborting\n",
b/target-arm/cpu.h
40 40
#define EXCP_BKPT            7
41 41
#define EXCP_EXCEPTION_EXIT  8   /* Return from v7M exception.  */
42 42
#define EXCP_KERNEL_TRAP     9   /* Jumped to kernel code page.  */
43
#define EXCP_STREX          10
43 44

  
44 45
#define ARMV7M_EXCP_RESET   1
45 46
#define ARMV7M_EXCP_NMI     2
......
180 181

  
181 182
        float_status fp_status;
182 183
    } vfp;
184
    uint32_t exclusive_addr;
185
    uint32_t exclusive_val;
186
    uint32_t exclusive_high;
183 187
#if defined(CONFIG_USER_ONLY)
184
    struct mmon_state *mmon_entry;
185
#else
186
    uint32_t mmon_addr;
188
    uint32_t exclusive_test;
189
    uint32_t exclusive_info;
187 190
#endif
188 191

  
189 192
    /* iwMMXt coprocessor state.  */
b/target-arm/helper.c
470 470
    env->exception_index = -1;
471 471
}
472 472

  
473
/* Structure used to record exclusive memory locations.  */
474
typedef struct mmon_state {
475
    struct mmon_state *next;
476
    CPUARMState *cpu_env;
477
    uint32_t addr;
478
} mmon_state;
479

  
480
/* Chain of current locks.  */
481
static mmon_state* mmon_head = NULL;
482

  
483 473
int cpu_arm_handle_mmu_fault (CPUState *env, target_ulong address, int rw,
484 474
                              int mmu_idx, int is_softmmu)
485 475
{
......
493 483
    return 1;
494 484
}
495 485

  
496
static void allocate_mmon_state(CPUState *env)
497
{
498
    env->mmon_entry = malloc(sizeof (mmon_state));
499
    memset (env->mmon_entry, 0, sizeof (mmon_state));
500
    env->mmon_entry->cpu_env = env;
501
    mmon_head = env->mmon_entry;
502
}
503

  
504
/* Flush any monitor locks for the specified address.  */
505
static void flush_mmon(uint32_t addr)
506
{
507
    mmon_state *mon;
508

  
509
    for (mon = mmon_head; mon; mon = mon->next)
510
      {
511
        if (mon->addr != addr)
512
          continue;
513

  
514
        mon->addr = 0;
515
        break;
516
      }
517
}
518

  
519
/* Mark an address for exclusive access.  */
520
void HELPER(mark_exclusive)(CPUState *env, uint32_t addr)
521
{
522
    if (!env->mmon_entry)
523
        allocate_mmon_state(env);
524
    /* Clear any previous locks.  */
525
    flush_mmon(addr);
526
    env->mmon_entry->addr = addr;
527
}
528

  
529
/* Test if an exclusive address is still exclusive.  Returns zero
530
   if the address is still exclusive.   */
531
uint32_t HELPER(test_exclusive)(CPUState *env, uint32_t addr)
532
{
533
    int res;
534

  
535
    if (!env->mmon_entry)
536
        return 1;
537
    if (env->mmon_entry->addr == addr)
538
        res = 0;
539
    else
540
        res = 1;
541
    flush_mmon(addr);
542
    return res;
543
}
544

  
545
void HELPER(clrex)(CPUState *env)
546
{
547
    if (!(env->mmon_entry && env->mmon_entry->addr))
548
        return;
549
    flush_mmon(env->mmon_entry->addr);
550
}
551

  
552 486
target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
553 487
{
554 488
    return addr;
......
1273 1207
    return phys_addr;
1274 1208
}
1275 1209

  
1276
/* Not really implemented.  Need to figure out a sane way of doing this.
1277
   Maybe add generic watchpoint support and use that.  */
1278

  
1279
void HELPER(mark_exclusive)(CPUState *env, uint32_t addr)
1280
{
1281
    env->mmon_addr = addr;
1282
}
1283

  
1284
uint32_t HELPER(test_exclusive)(CPUState *env, uint32_t addr)
1285
{
1286
    return (env->mmon_addr != addr);
1287
}
1288

  
1289
void HELPER(clrex)(CPUState *env)
1290
{
1291
    env->mmon_addr = -1;
1292
}
1293

  
1294 1210
void HELPER(set_cp)(CPUState *env, uint32_t insn, uint32_t val)
1295 1211
{
1296 1212
    int cp_num = (insn >> 8) & 0xf;
b/target-arm/helpers.h
68 68
DEF_HELPER_2(get_r13_banked, i32, env, i32)
69 69
DEF_HELPER_3(set_r13_banked, void, env, i32, i32)
70 70

  
71
DEF_HELPER_2(mark_exclusive, void, env, i32)
72
DEF_HELPER_2(test_exclusive, i32, env, i32)
73
DEF_HELPER_1(clrex, void, env)
74

  
75 71
DEF_HELPER_1(get_user_reg, i32, i32)
76 72
DEF_HELPER_2(set_user_reg, void, i32, i32)
77 73

  
b/target-arm/translate.c
76 76
/* We reuse the same 64-bit temporaries for efficiency.  */
77 77
static TCGv_i64 cpu_V0, cpu_V1, cpu_M0;
78 78
static TCGv_i32 cpu_R[16];
79
static TCGv_i32 cpu_exclusive_addr;
80
static TCGv_i32 cpu_exclusive_val;
81
static TCGv_i32 cpu_exclusive_high;
82
#ifdef CONFIG_USER_ONLY
83
static TCGv_i32 cpu_exclusive_test;
84
static TCGv_i32 cpu_exclusive_info;
85
#endif
79 86

  
80 87
/* FIXME:  These should be removed.  */
81 88
static TCGv cpu_F0s, cpu_F1s;
......
99 106
                                          offsetof(CPUState, regs[i]),
100 107
                                          regnames[i]);
101 108
    }
109
    cpu_exclusive_addr = tcg_global_mem_new_i32(TCG_AREG0,
110
        offsetof(CPUState, exclusive_addr), "exclusive_addr");
111
    cpu_exclusive_val = tcg_global_mem_new_i32(TCG_AREG0,
112
        offsetof(CPUState, exclusive_val), "exclusive_val");
113
    cpu_exclusive_high = tcg_global_mem_new_i32(TCG_AREG0,
114
        offsetof(CPUState, exclusive_high), "exclusive_high");
115
#ifdef CONFIG_USER_ONLY
116
    cpu_exclusive_test = tcg_global_mem_new_i32(TCG_AREG0,
117
        offsetof(CPUState, exclusive_test), "exclusive_test");
118
    cpu_exclusive_info = tcg_global_mem_new_i32(TCG_AREG0,
119
        offsetof(CPUState, exclusive_info), "exclusive_info");
120
#endif
102 121

  
103 122
#define GEN_HELPER 2
104 123
#include "helpers.h"
......
5819 5838
    dead_tmp(tmp);
5820 5839
}
5821 5840

  
5841
/* Load/Store exclusive instructions are implemented by remembering
5842
   the value/address loaded, and seeing if these are the same
5843
   when the store is performed. This should be is sufficient to implement
5844
   the architecturally mandated semantics, and avoids having to monitor
5845
   regular stores.
5846

  
5847
   In system emulation mode only one CPU will be running at once, so
5848
   this sequence is effectively atomic.  In user emulation mode we
5849
   throw an exception and handle the atomic operation elsewhere.  */
5850
static void gen_load_exclusive(DisasContext *s, int rt, int rt2,
5851
                               TCGv addr, int size)
5852
{
5853
    TCGv tmp;
5854

  
5855
    switch (size) {
5856
    case 0:
5857
        tmp = gen_ld8u(addr, IS_USER(s));
5858
        break;
5859
    case 1:
5860
        tmp = gen_ld16u(addr, IS_USER(s));
5861
        break;
5862
    case 2:
5863
    case 3:
5864
        tmp = gen_ld32(addr, IS_USER(s));
5865
        break;
5866
    default:
5867
        abort();
5868
    }
5869
    tcg_gen_mov_i32(cpu_exclusive_val, tmp);
5870
    store_reg(s, rt, tmp);
5871
    if (size == 3) {
5872
        tcg_gen_addi_i32(addr, addr, 4);
5873
        tmp = gen_ld32(addr, IS_USER(s));
5874
        tcg_gen_mov_i32(cpu_exclusive_high, tmp);
5875
        store_reg(s, rt2, tmp);
5876
    }
5877
    tcg_gen_mov_i32(cpu_exclusive_addr, addr);
5878
}
5879

  
5880
static void gen_clrex(DisasContext *s)
5881
{
5882
    tcg_gen_movi_i32(cpu_exclusive_addr, -1);
5883
}
5884

  
5885
#ifdef CONFIG_USER_ONLY
5886
static void gen_store_exclusive(DisasContext *s, int rd, int rt, int rt2,
5887
                                TCGv addr, int size)
5888
{
5889
    tcg_gen_mov_i32(cpu_exclusive_test, addr);
5890
    tcg_gen_movi_i32(cpu_exclusive_info,
5891
                     size | (rd << 4) | (rt << 8) | (rt2 << 12));
5892
    gen_set_condexec(s);
5893
    gen_set_pc_im(s->pc - 4);
5894
    gen_exception(EXCP_STREX);
5895
    s->is_jmp = DISAS_JUMP;
5896
}
5897
#else
5898
static void gen_store_exclusive(DisasContext *s, int rd, int rt, int rt2,
5899
                                TCGv addr, int size)
5900
{
5901
    TCGv tmp;
5902
    int done_label;
5903
    int fail_label;
5904

  
5905
    /* if (env->exclusive_addr == addr && env->exclusive_val == [addr]) {
5906
         [addr] = {Rt};
5907
         {Rd} = 0;
5908
       } else {
5909
         {Rd} = 1;
5910
       } */
5911
    fail_label = gen_new_label();
5912
    done_label = gen_new_label();
5913
    tcg_gen_brcond_i32(TCG_COND_NE, addr, cpu_exclusive_addr, fail_label);
5914
    switch (size) {
5915
    case 0:
5916
        tmp = gen_ld8u(addr, IS_USER(s));
5917
        break;
5918
    case 1:
5919
        tmp = gen_ld16u(addr, IS_USER(s));
5920
        break;
5921
    case 2:
5922
    case 3:
5923
        tmp = gen_ld32(addr, IS_USER(s));
5924
        break;
5925
    default:
5926
        abort();
5927
    }
5928
    tcg_gen_brcond_i32(TCG_COND_NE, tmp, cpu_exclusive_val, fail_label);
5929
    dead_tmp(tmp);
5930
    if (size == 3) {
5931
        TCGv tmp2 = new_tmp();
5932
        tcg_gen_addi_i32(tmp2, addr, 4);
5933
        tmp = gen_ld32(addr, IS_USER(s));
5934
        dead_tmp(tmp2);
5935
        tcg_gen_brcond_i32(TCG_COND_NE, tmp, cpu_exclusive_high, fail_label);
5936
        dead_tmp(tmp);
5937
    }
5938
    tmp = load_reg(s, rt);
5939
    switch (size) {
5940
    case 0:
5941
        gen_st8(tmp, addr, IS_USER(s));
5942
        break;
5943
    case 1:
5944
        gen_st16(tmp, addr, IS_USER(s));
5945
        break;
5946
    case 2:
5947
    case 3:
5948
        gen_st32(tmp, addr, IS_USER(s));
5949
        break;
5950
    default:
5951
        abort();
5952
    }
5953
    if (size == 3) {
5954
        tcg_gen_addi_i32(addr, addr, 4);
5955
        tmp = load_reg(s, rt2);
5956
        gen_st32(tmp, addr, IS_USER(s));
5957
    }
5958
    tcg_gen_movi_i32(cpu_R[rd], 0);
5959
    tcg_gen_br(done_label);
5960
    gen_set_label(fail_label);
5961
    tcg_gen_movi_i32(cpu_R[rd], 1);
5962
    gen_set_label(done_label);
5963
    tcg_gen_movi_i32(cpu_exclusive_addr, -1);
5964
}
5965
#endif
5966

  
5822 5967
static void disas_arm_insn(CPUState * env, DisasContext *s)
5823 5968
{
5824 5969
    unsigned int cond, insn, val, op1, i, shift, rm, rs, rn, rd, sh;
......
5869 6014
            switch ((insn >> 4) & 0xf) {
5870 6015
            case 1: /* clrex */
5871 6016
                ARCH(6K);
5872
                gen_helper_clrex(cpu_env);
6017
                gen_clrex(s);
5873 6018
                return;
5874 6019
            case 4: /* dsb */
5875 6020
            case 5: /* dmb */
......
6454 6599
                        addr = tcg_temp_local_new_i32();
6455 6600
                        load_reg_var(s, addr, rn);
6456 6601
                        if (insn & (1 << 20)) {
6457
                            gen_helper_mark_exclusive(cpu_env, addr);
6458 6602
                            switch (op1) {
6459 6603
                            case 0: /* ldrex */
6460
                                tmp = gen_ld32(addr, IS_USER(s));
6604
                                gen_load_exclusive(s, rd, 15, addr, 2);
6461 6605
                                break;
6462 6606
                            case 1: /* ldrexd */
6463
                                tmp = gen_ld32(addr, IS_USER(s));
6464
                                store_reg(s, rd, tmp);
6465
                                tcg_gen_addi_i32(addr, addr, 4);
6466
                                tmp = gen_ld32(addr, IS_USER(s));
6467
                                rd++;
6607
                                gen_load_exclusive(s, rd, rd + 1, addr, 3);
6468 6608
                                break;
6469 6609
                            case 2: /* ldrexb */
6470
                                tmp = gen_ld8u(addr, IS_USER(s));
6610
                                gen_load_exclusive(s, rd, 15, addr, 0);
6471 6611
                                break;
6472 6612
                            case 3: /* ldrexh */
6473
                                tmp = gen_ld16u(addr, IS_USER(s));
6613
                                gen_load_exclusive(s, rd, 15, addr, 1);
6474 6614
                                break;
6475 6615
                            default:
6476 6616
                                abort();
6477 6617
                            }
6478
                            store_reg(s, rd, tmp);
6479 6618
                        } else {
6480
                            int label = gen_new_label();
6481 6619
                            rm = insn & 0xf;
6482
                            tmp2 = tcg_temp_local_new_i32();
6483
                            gen_helper_test_exclusive(tmp2, cpu_env, addr);
6484
                            tcg_gen_brcondi_i32(TCG_COND_NE, tmp2, 0, label);
6485
                            tmp = load_reg(s,rm);
6486 6620
                            switch (op1) {
6487 6621
                            case 0:  /*  strex */
6488
                                gen_st32(tmp, addr, IS_USER(s));
6622
                                gen_store_exclusive(s, rd, rm, 15, addr, 2);
6489 6623
                                break;
6490 6624
                            case 1: /*  strexd */
6491
                                gen_st32(tmp, addr, IS_USER(s));
6492
                                tcg_gen_addi_i32(addr, addr, 4);
6493
                                tmp = load_reg(s, rm + 1);
6494
                                gen_st32(tmp, addr, IS_USER(s));
6625
                                gen_store_exclusive(s, rd, rm, rm + 1, addr, 2);
6495 6626
                                break;
6496 6627
                            case 2: /*  strexb */
6497
                                gen_st8(tmp, addr, IS_USER(s));
6628
                                gen_store_exclusive(s, rd, rm, 15, addr, 0);
6498 6629
                                break;
6499 6630
                            case 3: /* strexh */
6500
                                gen_st16(tmp, addr, IS_USER(s));
6631
                                gen_store_exclusive(s, rd, rm, 15, addr, 1);
6501 6632
                                break;
6502 6633
                            default:
6503 6634
                                abort();
6504 6635
                            }
6505
                            gen_set_label(label);
6506
                            tcg_gen_mov_i32(cpu_R[rd], tmp2);
6507
                            tcg_temp_free(tmp2);
6508 6636
                        }
6509 6637
                        tcg_temp_free(addr);
6510 6638
                    } else {
......
7259 7387
                /* Load/store exclusive word.  */
7260 7388
                addr = tcg_temp_local_new();
7261 7389
                load_reg_var(s, addr, rn);
7390
                tcg_gen_addi_i32(addr, addr, (insn & 0xff) << 2);
7262 7391
                if (insn & (1 << 20)) {
7263
                    gen_helper_mark_exclusive(cpu_env, addr);
7264
                    tmp = gen_ld32(addr, IS_USER(s));
7265
                    store_reg(s, rd, tmp);
7392
                    gen_load_exclusive(s, rs, 15, addr, 2);
7266 7393
                } else {
7267
                    int label = gen_new_label();
7268
                    tmp2 = tcg_temp_local_new();
7269
                    gen_helper_test_exclusive(tmp2, cpu_env, addr);
7270
                    tcg_gen_brcondi_i32(TCG_COND_NE, tmp2, 0, label);
7271
                    tmp = load_reg(s, rs);
7272
                    gen_st32(tmp, addr, IS_USER(s));
7273
                    gen_set_label(label);
7274
                    tcg_gen_mov_i32(cpu_R[rd], tmp2);
7275
                    tcg_temp_free(tmp2);
7394
                    gen_store_exclusive(s, rd, rs, 15, addr, 2);
7276 7395
                }
7277 7396
                tcg_temp_free(addr);
7278 7397
            } else if ((insn & (1 << 6)) == 0) {
......
7300 7419
                store_reg(s, 15, tmp);
7301 7420
            } else {
7302 7421
                /* Load/store exclusive byte/halfword/doubleword.  */
7303
                /* ??? These are not really atomic.  However we know
7304
                   we never have multiple CPUs running in parallel,
7305
                   so it is good enough.  */
7422
                ARCH(7);
7306 7423
                op = (insn >> 4) & 0x3;
7424
                if (op == 2) {
7425
                    goto illegal_op;
7426
                }
7307 7427
                addr = tcg_temp_local_new();
7308 7428
                load_reg_var(s, addr, rn);
7309 7429
                if (insn & (1 << 20)) {
7310
                    gen_helper_mark_exclusive(cpu_env, addr);
7311
                    switch (op) {
7312
                    case 0:
7313
                        tmp = gen_ld8u(addr, IS_USER(s));
7314
                        break;
7315
                    case 1:
7316
                        tmp = gen_ld16u(addr, IS_USER(s));
7317
                        break;
7318
                    case 3:
7319
                        tmp = gen_ld32(addr, IS_USER(s));
7320
                        tcg_gen_addi_i32(addr, addr, 4);
7321
                        tmp2 = gen_ld32(addr, IS_USER(s));
7322
                        store_reg(s, rd, tmp2);
7323
                        break;
7324
                    default:
7325
                        goto illegal_op;
7326
                    }
7327
                    store_reg(s, rs, tmp);
7430
                    gen_load_exclusive(s, rs, rd, addr, op);
7328 7431
                } else {
7329
                    int label = gen_new_label();
7330
                    tmp2 = tcg_temp_local_new();
7331
                    gen_helper_test_exclusive(tmp2, cpu_env, addr);
7332
                    tcg_gen_brcondi_i32(TCG_COND_NE, tmp2, 0, label);
7333
                    tmp = load_reg(s, rs);
7334
                    switch (op) {
7335
                    case 0:
7336
                        gen_st8(tmp, addr, IS_USER(s));
7337
                        break;
7338
                    case 1:
7339
                        gen_st16(tmp, addr, IS_USER(s));
7340
                        break;
7341
                    case 3:
7342
                        gen_st32(tmp, addr, IS_USER(s));
7343
                        tcg_gen_addi_i32(addr, addr, 4);
7344
                        tmp = load_reg(s, rd);
7345
                        gen_st32(tmp, addr, IS_USER(s));
7346
                        break;
7347
                    default:
7348
                        goto illegal_op;
7349
                    }
7350
                    gen_set_label(label);
7351
                    tcg_gen_mov_i32(cpu_R[rm], tmp2);
7352
                    tcg_temp_free(tmp2);
7432
                    gen_store_exclusive(s, rm, rs, rd, addr, op);
7353 7433
                }
7354 7434
                tcg_temp_free(addr);
7355 7435
            }
......
7845 7925
                        }
7846 7926
                        break;
7847 7927
                    case 3: /* Special control operations.  */
7928
                        ARCH(7);
7848 7929
                        op = (insn >> 4) & 0xf;
7849 7930
                        switch (op) {
7850 7931
                        case 2: /* clrex */
7851
                            gen_helper_clrex(cpu_env);
7932
                            gen_clrex(s);
7852 7933
                            break;
7853 7934
                        case 4: /* dsb */
7854 7935
                        case 5: /* dmb */
7855 7936
                        case 6: /* isb */
7856 7937
                            /* These execute as NOPs.  */
7857
                            ARCH(7);
7858 7938
                            break;
7859 7939
                        default:
7860 7940
                            goto illegal_op;

Also available in: Unified diff