Revision 426f5abc target-arm/translate.c
b/target-arm/translate.c | ||
---|---|---|
76 | 76 |
/* We reuse the same 64-bit temporaries for efficiency. */ |
77 | 77 |
static TCGv_i64 cpu_V0, cpu_V1, cpu_M0; |
78 | 78 |
static TCGv_i32 cpu_R[16]; |
79 |
static TCGv_i32 cpu_exclusive_addr; |
|
80 |
static TCGv_i32 cpu_exclusive_val; |
|
81 |
static TCGv_i32 cpu_exclusive_high; |
|
82 |
#ifdef CONFIG_USER_ONLY |
|
83 |
static TCGv_i32 cpu_exclusive_test; |
|
84 |
static TCGv_i32 cpu_exclusive_info; |
|
85 |
#endif |
|
79 | 86 |
|
80 | 87 |
/* FIXME: These should be removed. */ |
81 | 88 |
static TCGv cpu_F0s, cpu_F1s; |
... | ... | |
99 | 106 |
offsetof(CPUState, regs[i]), |
100 | 107 |
regnames[i]); |
101 | 108 |
} |
109 |
cpu_exclusive_addr = tcg_global_mem_new_i32(TCG_AREG0, |
|
110 |
offsetof(CPUState, exclusive_addr), "exclusive_addr"); |
|
111 |
cpu_exclusive_val = tcg_global_mem_new_i32(TCG_AREG0, |
|
112 |
offsetof(CPUState, exclusive_val), "exclusive_val"); |
|
113 |
cpu_exclusive_high = tcg_global_mem_new_i32(TCG_AREG0, |
|
114 |
offsetof(CPUState, exclusive_high), "exclusive_high"); |
|
115 |
#ifdef CONFIG_USER_ONLY |
|
116 |
cpu_exclusive_test = tcg_global_mem_new_i32(TCG_AREG0, |
|
117 |
offsetof(CPUState, exclusive_test), "exclusive_test"); |
|
118 |
cpu_exclusive_info = tcg_global_mem_new_i32(TCG_AREG0, |
|
119 |
offsetof(CPUState, exclusive_info), "exclusive_info"); |
|
120 |
#endif |
|
102 | 121 |
|
103 | 122 |
#define GEN_HELPER 2 |
104 | 123 |
#include "helpers.h" |
... | ... | |
5819 | 5838 |
dead_tmp(tmp); |
5820 | 5839 |
} |
5821 | 5840 |
|
5841 |
/* Load/Store exclusive instructions are implemented by remembering |
|
5842 |
the value/address loaded, and seeing if these are the same |
|
5843 |
when the store is performed. This should be is sufficient to implement |
|
5844 |
the architecturally mandated semantics, and avoids having to monitor |
|
5845 |
regular stores. |
|
5846 |
|
|
5847 |
In system emulation mode only one CPU will be running at once, so |
|
5848 |
this sequence is effectively atomic. In user emulation mode we |
|
5849 |
throw an exception and handle the atomic operation elsewhere. */ |
|
5850 |
static void gen_load_exclusive(DisasContext *s, int rt, int rt2, |
|
5851 |
TCGv addr, int size) |
|
5852 |
{ |
|
5853 |
TCGv tmp; |
|
5854 |
|
|
5855 |
switch (size) { |
|
5856 |
case 0: |
|
5857 |
tmp = gen_ld8u(addr, IS_USER(s)); |
|
5858 |
break; |
|
5859 |
case 1: |
|
5860 |
tmp = gen_ld16u(addr, IS_USER(s)); |
|
5861 |
break; |
|
5862 |
case 2: |
|
5863 |
case 3: |
|
5864 |
tmp = gen_ld32(addr, IS_USER(s)); |
|
5865 |
break; |
|
5866 |
default: |
|
5867 |
abort(); |
|
5868 |
} |
|
5869 |
tcg_gen_mov_i32(cpu_exclusive_val, tmp); |
|
5870 |
store_reg(s, rt, tmp); |
|
5871 |
if (size == 3) { |
|
5872 |
tcg_gen_addi_i32(addr, addr, 4); |
|
5873 |
tmp = gen_ld32(addr, IS_USER(s)); |
|
5874 |
tcg_gen_mov_i32(cpu_exclusive_high, tmp); |
|
5875 |
store_reg(s, rt2, tmp); |
|
5876 |
} |
|
5877 |
tcg_gen_mov_i32(cpu_exclusive_addr, addr); |
|
5878 |
} |
|
5879 |
|
|
5880 |
static void gen_clrex(DisasContext *s) |
|
5881 |
{ |
|
5882 |
tcg_gen_movi_i32(cpu_exclusive_addr, -1); |
|
5883 |
} |
|
5884 |
|
|
5885 |
#ifdef CONFIG_USER_ONLY |
|
5886 |
static void gen_store_exclusive(DisasContext *s, int rd, int rt, int rt2, |
|
5887 |
TCGv addr, int size) |
|
5888 |
{ |
|
5889 |
tcg_gen_mov_i32(cpu_exclusive_test, addr); |
|
5890 |
tcg_gen_movi_i32(cpu_exclusive_info, |
|
5891 |
size | (rd << 4) | (rt << 8) | (rt2 << 12)); |
|
5892 |
gen_set_condexec(s); |
|
5893 |
gen_set_pc_im(s->pc - 4); |
|
5894 |
gen_exception(EXCP_STREX); |
|
5895 |
s->is_jmp = DISAS_JUMP; |
|
5896 |
} |
|
5897 |
#else |
|
5898 |
static void gen_store_exclusive(DisasContext *s, int rd, int rt, int rt2, |
|
5899 |
TCGv addr, int size) |
|
5900 |
{ |
|
5901 |
TCGv tmp; |
|
5902 |
int done_label; |
|
5903 |
int fail_label; |
|
5904 |
|
|
5905 |
/* if (env->exclusive_addr == addr && env->exclusive_val == [addr]) { |
|
5906 |
[addr] = {Rt}; |
|
5907 |
{Rd} = 0; |
|
5908 |
} else { |
|
5909 |
{Rd} = 1; |
|
5910 |
} */ |
|
5911 |
fail_label = gen_new_label(); |
|
5912 |
done_label = gen_new_label(); |
|
5913 |
tcg_gen_brcond_i32(TCG_COND_NE, addr, cpu_exclusive_addr, fail_label); |
|
5914 |
switch (size) { |
|
5915 |
case 0: |
|
5916 |
tmp = gen_ld8u(addr, IS_USER(s)); |
|
5917 |
break; |
|
5918 |
case 1: |
|
5919 |
tmp = gen_ld16u(addr, IS_USER(s)); |
|
5920 |
break; |
|
5921 |
case 2: |
|
5922 |
case 3: |
|
5923 |
tmp = gen_ld32(addr, IS_USER(s)); |
|
5924 |
break; |
|
5925 |
default: |
|
5926 |
abort(); |
|
5927 |
} |
|
5928 |
tcg_gen_brcond_i32(TCG_COND_NE, tmp, cpu_exclusive_val, fail_label); |
|
5929 |
dead_tmp(tmp); |
|
5930 |
if (size == 3) { |
|
5931 |
TCGv tmp2 = new_tmp(); |
|
5932 |
tcg_gen_addi_i32(tmp2, addr, 4); |
|
5933 |
tmp = gen_ld32(addr, IS_USER(s)); |
|
5934 |
dead_tmp(tmp2); |
|
5935 |
tcg_gen_brcond_i32(TCG_COND_NE, tmp, cpu_exclusive_high, fail_label); |
|
5936 |
dead_tmp(tmp); |
|
5937 |
} |
|
5938 |
tmp = load_reg(s, rt); |
|
5939 |
switch (size) { |
|
5940 |
case 0: |
|
5941 |
gen_st8(tmp, addr, IS_USER(s)); |
|
5942 |
break; |
|
5943 |
case 1: |
|
5944 |
gen_st16(tmp, addr, IS_USER(s)); |
|
5945 |
break; |
|
5946 |
case 2: |
|
5947 |
case 3: |
|
5948 |
gen_st32(tmp, addr, IS_USER(s)); |
|
5949 |
break; |
|
5950 |
default: |
|
5951 |
abort(); |
|
5952 |
} |
|
5953 |
if (size == 3) { |
|
5954 |
tcg_gen_addi_i32(addr, addr, 4); |
|
5955 |
tmp = load_reg(s, rt2); |
|
5956 |
gen_st32(tmp, addr, IS_USER(s)); |
|
5957 |
} |
|
5958 |
tcg_gen_movi_i32(cpu_R[rd], 0); |
|
5959 |
tcg_gen_br(done_label); |
|
5960 |
gen_set_label(fail_label); |
|
5961 |
tcg_gen_movi_i32(cpu_R[rd], 1); |
|
5962 |
gen_set_label(done_label); |
|
5963 |
tcg_gen_movi_i32(cpu_exclusive_addr, -1); |
|
5964 |
} |
|
5965 |
#endif |
|
5966 |
|
|
5822 | 5967 |
static void disas_arm_insn(CPUState * env, DisasContext *s) |
5823 | 5968 |
{ |
5824 | 5969 |
unsigned int cond, insn, val, op1, i, shift, rm, rs, rn, rd, sh; |
... | ... | |
5869 | 6014 |
switch ((insn >> 4) & 0xf) { |
5870 | 6015 |
case 1: /* clrex */ |
5871 | 6016 |
ARCH(6K); |
5872 |
gen_helper_clrex(cpu_env);
|
|
6017 |
gen_clrex(s);
|
|
5873 | 6018 |
return; |
5874 | 6019 |
case 4: /* dsb */ |
5875 | 6020 |
case 5: /* dmb */ |
... | ... | |
6454 | 6599 |
addr = tcg_temp_local_new_i32(); |
6455 | 6600 |
load_reg_var(s, addr, rn); |
6456 | 6601 |
if (insn & (1 << 20)) { |
6457 |
gen_helper_mark_exclusive(cpu_env, addr); |
|
6458 | 6602 |
switch (op1) { |
6459 | 6603 |
case 0: /* ldrex */ |
6460 |
tmp = gen_ld32(addr, IS_USER(s));
|
|
6604 |
gen_load_exclusive(s, rd, 15, addr, 2);
|
|
6461 | 6605 |
break; |
6462 | 6606 |
case 1: /* ldrexd */ |
6463 |
tmp = gen_ld32(addr, IS_USER(s)); |
|
6464 |
store_reg(s, rd, tmp); |
|
6465 |
tcg_gen_addi_i32(addr, addr, 4); |
|
6466 |
tmp = gen_ld32(addr, IS_USER(s)); |
|
6467 |
rd++; |
|
6607 |
gen_load_exclusive(s, rd, rd + 1, addr, 3); |
|
6468 | 6608 |
break; |
6469 | 6609 |
case 2: /* ldrexb */ |
6470 |
tmp = gen_ld8u(addr, IS_USER(s));
|
|
6610 |
gen_load_exclusive(s, rd, 15, addr, 0);
|
|
6471 | 6611 |
break; |
6472 | 6612 |
case 3: /* ldrexh */ |
6473 |
tmp = gen_ld16u(addr, IS_USER(s));
|
|
6613 |
gen_load_exclusive(s, rd, 15, addr, 1);
|
|
6474 | 6614 |
break; |
6475 | 6615 |
default: |
6476 | 6616 |
abort(); |
6477 | 6617 |
} |
6478 |
store_reg(s, rd, tmp); |
|
6479 | 6618 |
} else { |
6480 |
int label = gen_new_label(); |
|
6481 | 6619 |
rm = insn & 0xf; |
6482 |
tmp2 = tcg_temp_local_new_i32(); |
|
6483 |
gen_helper_test_exclusive(tmp2, cpu_env, addr); |
|
6484 |
tcg_gen_brcondi_i32(TCG_COND_NE, tmp2, 0, label); |
|
6485 |
tmp = load_reg(s,rm); |
|
6486 | 6620 |
switch (op1) { |
6487 | 6621 |
case 0: /* strex */ |
6488 |
gen_st32(tmp, addr, IS_USER(s));
|
|
6622 |
gen_store_exclusive(s, rd, rm, 15, addr, 2);
|
|
6489 | 6623 |
break; |
6490 | 6624 |
case 1: /* strexd */ |
6491 |
gen_st32(tmp, addr, IS_USER(s)); |
|
6492 |
tcg_gen_addi_i32(addr, addr, 4); |
|
6493 |
tmp = load_reg(s, rm + 1); |
|
6494 |
gen_st32(tmp, addr, IS_USER(s)); |
|
6625 |
gen_store_exclusive(s, rd, rm, rm + 1, addr, 2); |
|
6495 | 6626 |
break; |
6496 | 6627 |
case 2: /* strexb */ |
6497 |
gen_st8(tmp, addr, IS_USER(s));
|
|
6628 |
gen_store_exclusive(s, rd, rm, 15, addr, 0);
|
|
6498 | 6629 |
break; |
6499 | 6630 |
case 3: /* strexh */ |
6500 |
gen_st16(tmp, addr, IS_USER(s));
|
|
6631 |
gen_store_exclusive(s, rd, rm, 15, addr, 1);
|
|
6501 | 6632 |
break; |
6502 | 6633 |
default: |
6503 | 6634 |
abort(); |
6504 | 6635 |
} |
6505 |
gen_set_label(label); |
|
6506 |
tcg_gen_mov_i32(cpu_R[rd], tmp2); |
|
6507 |
tcg_temp_free(tmp2); |
|
6508 | 6636 |
} |
6509 | 6637 |
tcg_temp_free(addr); |
6510 | 6638 |
} else { |
... | ... | |
7259 | 7387 |
/* Load/store exclusive word. */ |
7260 | 7388 |
addr = tcg_temp_local_new(); |
7261 | 7389 |
load_reg_var(s, addr, rn); |
7390 |
tcg_gen_addi_i32(addr, addr, (insn & 0xff) << 2); |
|
7262 | 7391 |
if (insn & (1 << 20)) { |
7263 |
gen_helper_mark_exclusive(cpu_env, addr); |
|
7264 |
tmp = gen_ld32(addr, IS_USER(s)); |
|
7265 |
store_reg(s, rd, tmp); |
|
7392 |
gen_load_exclusive(s, rs, 15, addr, 2); |
|
7266 | 7393 |
} else { |
7267 |
int label = gen_new_label(); |
|
7268 |
tmp2 = tcg_temp_local_new(); |
|
7269 |
gen_helper_test_exclusive(tmp2, cpu_env, addr); |
|
7270 |
tcg_gen_brcondi_i32(TCG_COND_NE, tmp2, 0, label); |
|
7271 |
tmp = load_reg(s, rs); |
|
7272 |
gen_st32(tmp, addr, IS_USER(s)); |
|
7273 |
gen_set_label(label); |
|
7274 |
tcg_gen_mov_i32(cpu_R[rd], tmp2); |
|
7275 |
tcg_temp_free(tmp2); |
|
7394 |
gen_store_exclusive(s, rd, rs, 15, addr, 2); |
|
7276 | 7395 |
} |
7277 | 7396 |
tcg_temp_free(addr); |
7278 | 7397 |
} else if ((insn & (1 << 6)) == 0) { |
... | ... | |
7300 | 7419 |
store_reg(s, 15, tmp); |
7301 | 7420 |
} else { |
7302 | 7421 |
/* Load/store exclusive byte/halfword/doubleword. */ |
7303 |
/* ??? These are not really atomic. However we know |
|
7304 |
we never have multiple CPUs running in parallel, |
|
7305 |
so it is good enough. */ |
|
7422 |
ARCH(7); |
|
7306 | 7423 |
op = (insn >> 4) & 0x3; |
7424 |
if (op == 2) { |
|
7425 |
goto illegal_op; |
|
7426 |
} |
|
7307 | 7427 |
addr = tcg_temp_local_new(); |
7308 | 7428 |
load_reg_var(s, addr, rn); |
7309 | 7429 |
if (insn & (1 << 20)) { |
7310 |
gen_helper_mark_exclusive(cpu_env, addr); |
|
7311 |
switch (op) { |
|
7312 |
case 0: |
|
7313 |
tmp = gen_ld8u(addr, IS_USER(s)); |
|
7314 |
break; |
|
7315 |
case 1: |
|
7316 |
tmp = gen_ld16u(addr, IS_USER(s)); |
|
7317 |
break; |
|
7318 |
case 3: |
|
7319 |
tmp = gen_ld32(addr, IS_USER(s)); |
|
7320 |
tcg_gen_addi_i32(addr, addr, 4); |
|
7321 |
tmp2 = gen_ld32(addr, IS_USER(s)); |
|
7322 |
store_reg(s, rd, tmp2); |
|
7323 |
break; |
|
7324 |
default: |
|
7325 |
goto illegal_op; |
|
7326 |
} |
|
7327 |
store_reg(s, rs, tmp); |
|
7430 |
gen_load_exclusive(s, rs, rd, addr, op); |
|
7328 | 7431 |
} else { |
7329 |
int label = gen_new_label(); |
|
7330 |
tmp2 = tcg_temp_local_new(); |
|
7331 |
gen_helper_test_exclusive(tmp2, cpu_env, addr); |
|
7332 |
tcg_gen_brcondi_i32(TCG_COND_NE, tmp2, 0, label); |
|
7333 |
tmp = load_reg(s, rs); |
|
7334 |
switch (op) { |
|
7335 |
case 0: |
|
7336 |
gen_st8(tmp, addr, IS_USER(s)); |
|
7337 |
break; |
|
7338 |
case 1: |
|
7339 |
gen_st16(tmp, addr, IS_USER(s)); |
|
7340 |
break; |
|
7341 |
case 3: |
|
7342 |
gen_st32(tmp, addr, IS_USER(s)); |
|
7343 |
tcg_gen_addi_i32(addr, addr, 4); |
|
7344 |
tmp = load_reg(s, rd); |
|
7345 |
gen_st32(tmp, addr, IS_USER(s)); |
|
7346 |
break; |
|
7347 |
default: |
|
7348 |
goto illegal_op; |
|
7349 |
} |
|
7350 |
gen_set_label(label); |
|
7351 |
tcg_gen_mov_i32(cpu_R[rm], tmp2); |
|
7352 |
tcg_temp_free(tmp2); |
|
7432 |
gen_store_exclusive(s, rm, rs, rd, addr, op); |
|
7353 | 7433 |
} |
7354 | 7434 |
tcg_temp_free(addr); |
7355 | 7435 |
} |
... | ... | |
7845 | 7925 |
} |
7846 | 7926 |
break; |
7847 | 7927 |
case 3: /* Special control operations. */ |
7928 |
ARCH(7); |
|
7848 | 7929 |
op = (insn >> 4) & 0xf; |
7849 | 7930 |
switch (op) { |
7850 | 7931 |
case 2: /* clrex */ |
7851 |
gen_helper_clrex(cpu_env);
|
|
7932 |
gen_clrex(s);
|
|
7852 | 7933 |
break; |
7853 | 7934 |
case 4: /* dsb */ |
7854 | 7935 |
case 5: /* dmb */ |
7855 | 7936 |
case 6: /* isb */ |
7856 | 7937 |
/* These execute as NOPs. */ |
7857 |
ARCH(7); |
|
7858 | 7938 |
break; |
7859 | 7939 |
default: |
7860 | 7940 |
goto illegal_op; |
Also available in: Unified diff