Revision cf360a32 target-ppc/translate.c
b/target-ppc/translate.c | ||
---|---|---|
66 | 66 |
static TCGv cpu_ctr; |
67 | 67 |
static TCGv cpu_lr; |
68 | 68 |
static TCGv cpu_xer; |
69 |
static TCGv cpu_reserve; |
|
69 | 70 |
static TCGv_i32 cpu_fpscr; |
70 | 71 |
static TCGv_i32 cpu_access_type; |
71 | 72 |
|
... | ... | |
161 | 162 |
cpu_xer = tcg_global_mem_new(TCG_AREG0, |
162 | 163 |
offsetof(CPUState, xer), "xer"); |
163 | 164 |
|
165 |
cpu_reserve = tcg_global_mem_new(TCG_AREG0, |
|
166 |
offsetof(CPUState, reserve), "reserve"); |
|
167 |
|
|
164 | 168 |
cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0, |
165 | 169 |
offsetof(CPUState, fpscr), "fpscr"); |
166 | 170 |
|
... | ... | |
2468 | 2472 |
tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); |
2469 | 2473 |
} |
2470 | 2474 |
|
2475 |
static always_inline void gen_check_align (DisasContext *ctx, TCGv EA, int mask) |
|
2476 |
{ |
|
2477 |
int l1 = gen_new_label(); |
|
2478 |
TCGv t0 = tcg_temp_new(); |
|
2479 |
TCGv_i32 t1, t2; |
|
2480 |
/* NIP cannot be restored if the memory exception comes from an helper */ |
|
2481 |
gen_update_nip(ctx, ctx->nip - 4); |
|
2482 |
tcg_gen_andi_tl(t0, EA, mask); |
|
2483 |
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); |
|
2484 |
t1 = tcg_const_i32(POWERPC_EXCP_ALIGN); |
|
2485 |
t2 = tcg_const_i32(0); |
|
2486 |
gen_helper_raise_exception_err(t1, t2); |
|
2487 |
tcg_temp_free_i32(t1); |
|
2488 |
tcg_temp_free_i32(t2); |
|
2489 |
gen_set_label(l1); |
|
2490 |
tcg_temp_free(t0); |
|
2491 |
} |
|
2492 |
|
|
2471 | 2493 |
#if defined(TARGET_PPC64) |
2472 | 2494 |
#define _GEN_MEM_FUNCS(name, mode) \ |
2473 | 2495 |
&gen_op_##name##_##mode, \ |
... | ... | |
3220 | 3242 |
GEN_STOP(ctx); |
3221 | 3243 |
} |
3222 | 3244 |
|
3223 |
#define op_lwarx() (*gen_op_lwarx[ctx->mem_idx])() |
|
3224 |
#define op_stwcx() (*gen_op_stwcx[ctx->mem_idx])() |
|
3225 |
static GenOpFunc *gen_op_lwarx[NB_MEM_FUNCS] = { |
|
3226 |
GEN_MEM_FUNCS(lwarx), |
|
3227 |
}; |
|
3228 |
static GenOpFunc *gen_op_stwcx[NB_MEM_FUNCS] = { |
|
3229 |
GEN_MEM_FUNCS(stwcx), |
|
3230 |
}; |
|
3231 |
|
|
3232 | 3245 |
/* lwarx */ |
3233 | 3246 |
GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES) |
3234 | 3247 |
{ |
3235 |
/* NIP cannot be restored if the memory exception comes from an helper */ |
|
3236 |
gen_update_nip(ctx, ctx->nip - 4); |
|
3248 |
TCGv t0 = tcg_temp_local_new(); |
|
3237 | 3249 |
gen_set_access_type(ACCESS_RES); |
3238 |
gen_addr_reg_index(cpu_T[0], ctx); |
|
3239 |
op_lwarx(); |
|
3240 |
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); |
|
3250 |
gen_addr_reg_index(t0, ctx); |
|
3251 |
gen_check_align(ctx, t0, 0x03); |
|
3252 |
#if defined(TARGET_PPC64) |
|
3253 |
if (!ctx->sf_mode) |
|
3254 |
tcg_gen_ext32u_tl(t0, t0); |
|
3255 |
#endif |
|
3256 |
gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx); |
|
3257 |
tcg_gen_mov_tl(cpu_reserve, t0); |
|
3258 |
tcg_temp_free(t0); |
|
3241 | 3259 |
} |
3242 | 3260 |
|
3243 | 3261 |
/* stwcx. */ |
3244 | 3262 |
GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES) |
3245 | 3263 |
{ |
3246 |
/* NIP cannot be restored if the memory exception comes from an helper */
|
|
3247 |
gen_update_nip(ctx, ctx->nip - 4);
|
|
3264 |
int l1 = gen_new_label();
|
|
3265 |
TCGv t0 = tcg_temp_local_new();
|
|
3248 | 3266 |
gen_set_access_type(ACCESS_RES); |
3249 |
gen_addr_reg_index(cpu_T[0], ctx); |
|
3250 |
tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); |
|
3251 |
op_stwcx(); |
|
3267 |
gen_addr_reg_index(t0, ctx); |
|
3268 |
gen_check_align(ctx, t0, 0x03); |
|
3269 |
#if defined(TARGET_PPC64) |
|
3270 |
if (!ctx->sf_mode) |
|
3271 |
tcg_gen_ext32u_tl(t0, t0); |
|
3272 |
#endif |
|
3273 |
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer); |
|
3274 |
tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO); |
|
3275 |
tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1); |
|
3276 |
tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); |
|
3277 |
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); |
|
3278 |
gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], t0, ctx->mem_idx); |
|
3279 |
gen_set_label(l1); |
|
3280 |
tcg_gen_movi_tl(cpu_reserve, -1); |
|
3281 |
tcg_temp_free(t0); |
|
3252 | 3282 |
} |
3253 | 3283 |
|
3254 | 3284 |
#if defined(TARGET_PPC64) |
3255 |
#define op_ldarx() (*gen_op_ldarx[ctx->mem_idx])() |
|
3256 |
#define op_stdcx() (*gen_op_stdcx[ctx->mem_idx])() |
|
3257 |
static GenOpFunc *gen_op_ldarx[NB_MEM_FUNCS] = { |
|
3258 |
GEN_MEM_FUNCS(ldarx), |
|
3259 |
}; |
|
3260 |
static GenOpFunc *gen_op_stdcx[NB_MEM_FUNCS] = { |
|
3261 |
GEN_MEM_FUNCS(stdcx), |
|
3262 |
}; |
|
3263 |
|
|
3264 | 3285 |
/* ldarx */ |
3265 | 3286 |
GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B) |
3266 | 3287 |
{ |
3267 |
/* NIP cannot be restored if the memory exception comes from an helper */ |
|
3268 |
gen_update_nip(ctx, ctx->nip - 4); |
|
3288 |
TCGv t0 = tcg_temp_local_new(); |
|
3269 | 3289 |
gen_set_access_type(ACCESS_RES); |
3270 |
gen_addr_reg_index(cpu_T[0], ctx); |
|
3271 |
op_ldarx(); |
|
3272 |
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); |
|
3290 |
gen_addr_reg_index(t0, ctx); |
|
3291 |
gen_check_align(ctx, t0, 0x07); |
|
3292 |
if (!ctx->sf_mode) |
|
3293 |
tcg_gen_ext32u_tl(t0, t0); |
|
3294 |
gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx); |
|
3295 |
tcg_gen_mov_tl(cpu_reserve, t0); |
|
3296 |
tcg_temp_free(t0); |
|
3273 | 3297 |
} |
3274 | 3298 |
|
3275 | 3299 |
/* stdcx. */ |
3276 | 3300 |
GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B) |
3277 | 3301 |
{ |
3278 |
/* NIP cannot be restored if the memory exception comes from an helper */
|
|
3279 |
gen_update_nip(ctx, ctx->nip - 4);
|
|
3302 |
int l1 = gen_new_label();
|
|
3303 |
TCGv t0 = tcg_temp_local_new();
|
|
3280 | 3304 |
gen_set_access_type(ACCESS_RES); |
3281 |
gen_addr_reg_index(cpu_T[0], ctx); |
|
3282 |
tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); |
|
3283 |
op_stdcx(); |
|
3305 |
gen_addr_reg_index(t0, ctx); |
|
3306 |
gen_check_align(ctx, t0, 0x07); |
|
3307 |
if (!ctx->sf_mode) |
|
3308 |
tcg_gen_ext32u_tl(t0, t0); |
|
3309 |
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer); |
|
3310 |
tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO); |
|
3311 |
tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1); |
|
3312 |
tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); |
|
3313 |
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); |
|
3314 |
gen_qemu_st64(cpu_gpr[rS(ctx->opcode)], t0, ctx->mem_idx); |
|
3315 |
gen_set_label(l1); |
|
3316 |
tcg_gen_movi_tl(cpu_reserve, -1); |
|
3317 |
tcg_temp_free(t0); |
|
3284 | 3318 |
} |
3285 | 3319 |
#endif /* defined(TARGET_PPC64) */ |
3286 | 3320 |
|
Also available in: Unified diff