Revision aba9d61e target-i386/translate.c
b/target-i386/translate.c | ||
---|---|---|
2231 | 2231 |
#endif |
2232 | 2232 |
} |
2233 | 2233 |
|
2234 |
static void gen_add_A0_im(DisasContext *s, int val) |
|
2235 |
{ |
|
2236 |
#ifdef TARGET_X86_64 |
|
2237 |
if (CODE64(s)) |
|
2238 |
gen_op_addq_A0_im(val); |
|
2239 |
else |
|
2240 |
#endif |
|
2241 |
gen_op_addl_A0_im(val); |
|
2242 |
} |
|
2243 |
|
|
2234 | 2244 |
static GenOpFunc1 *gen_ldq_env_A0[3] = { |
2235 | 2245 |
gen_op_ldq_raw_env_A0, |
2236 | 2246 |
#ifndef CONFIG_USER_ONLY |
... | ... | |
3382 | 3392 |
goto illegal_op; |
3383 | 3393 |
} |
3384 | 3394 |
if (CODE64(s)) { |
3385 |
if (op >= 2 && op <= 5) {
|
|
3395 |
if (op == 2 || op == 4) {
|
|
3386 | 3396 |
/* operand size for jumps is 64 bit */ |
3387 | 3397 |
ot = OT_QUAD; |
3398 |
} else if (op == 3 || op == 5) { |
|
3399 |
/* for call calls, the operand is 16 or 32 bit, even |
|
3400 |
in long mode */ |
|
3401 |
ot = dflag ? OT_LONG : OT_WORD; |
|
3388 | 3402 |
} else if (op == 6) { |
3389 | 3403 |
/* default push size is 64 bit */ |
3390 | 3404 |
ot = dflag ? OT_QUAD : OT_WORD; |
... | ... | |
3425 | 3439 |
break; |
3426 | 3440 |
case 3: /* lcall Ev */ |
3427 | 3441 |
gen_op_ld_T1_A0[ot + s->mem_index](); |
3428 |
gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
|
|
3442 |
gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
|
|
3429 | 3443 |
gen_op_ldu_T0_A0[OT_WORD + s->mem_index](); |
3430 | 3444 |
do_lcall: |
3431 | 3445 |
if (s->pe && !s->vm86) { |
3432 | 3446 |
if (s->cc_op != CC_OP_DYNAMIC) |
3433 | 3447 |
gen_op_set_cc_op(s->cc_op); |
3434 | 3448 |
gen_jmp_im(pc_start - s->cs_base); |
3435 |
gen_op_lcall_protected_T0_T1(dflag, s->pc - s->cs_base);
|
|
3449 |
gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
|
|
3436 | 3450 |
} else { |
3437 | 3451 |
gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base); |
3438 | 3452 |
} |
... | ... | |
3446 | 3460 |
break; |
3447 | 3461 |
case 5: /* ljmp Ev */ |
3448 | 3462 |
gen_op_ld_T1_A0[ot + s->mem_index](); |
3449 |
gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
|
|
3463 |
gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
|
|
3450 | 3464 |
gen_op_ldu_T0_A0[OT_WORD + s->mem_index](); |
3451 | 3465 |
do_ljmp: |
3452 | 3466 |
if (s->pe && !s->vm86) { |
3453 | 3467 |
if (s->cc_op != CC_OP_DYNAMIC) |
3454 | 3468 |
gen_op_set_cc_op(s->cc_op); |
3455 | 3469 |
gen_jmp_im(pc_start - s->cs_base); |
3456 |
gen_op_ljmp_protected_T0_T1(s->pc - s->cs_base);
|
|
3470 |
gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
|
|
3457 | 3471 |
} else { |
3458 | 3472 |
gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS])); |
3459 | 3473 |
gen_op_movl_T0_T1(); |
... | ... | |
4043 | 4057 |
goto illegal_op; |
4044 | 4058 |
gen_lea_modrm(s, modrm, ®_addr, &offset_addr); |
4045 | 4059 |
gen_op_ld_T1_A0[ot + s->mem_index](); |
4046 |
gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
|
|
4060 |
gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
|
|
4047 | 4061 |
/* load the segment first to handle exceptions properly */ |
4048 | 4062 |
gen_op_ldu_T0_A0[OT_WORD + s->mem_index](); |
4049 | 4063 |
gen_movl_seg_T0(s, op, pc_start - s->cs_base); |
... | ... | |
5182 | 5196 |
gen_op_into(s->pc - pc_start); |
5183 | 5197 |
break; |
5184 | 5198 |
case 0xf1: /* icebp (undocumented, exits to external debugger) */ |
5199 |
#if 1 |
|
5185 | 5200 |
gen_debug(s, pc_start - s->cs_base); |
5201 |
#else |
|
5202 |
/* start debug */ |
|
5203 |
tb_flush(cpu_single_env); |
|
5204 |
cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM); |
|
5205 |
#endif |
|
5186 | 5206 |
break; |
5187 | 5207 |
case 0xfa: /* cli */ |
5188 | 5208 |
if (!s->vm86) { |
... | ... | |
5363 | 5383 |
} |
5364 | 5384 |
gen_jmp_im(pc_start - s->cs_base); |
5365 | 5385 |
gen_op_sysret(s->dflag); |
5386 |
/* condition codes are modified only in long mode */ |
|
5387 |
if (s->lma) |
|
5388 |
s->cc_op = CC_OP_EFLAGS; |
|
5366 | 5389 |
gen_eob(s); |
5367 | 5390 |
} |
5368 | 5391 |
break; |
... | ... | |
5458 | 5481 |
else |
5459 | 5482 |
gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit)); |
5460 | 5483 |
gen_op_st_T0_A0[OT_WORD + s->mem_index](); |
5461 |
#ifdef TARGET_X86_64 |
|
5462 |
if (CODE64(s)) |
|
5463 |
gen_op_addq_A0_im(2); |
|
5464 |
else |
|
5465 |
#endif |
|
5466 |
gen_op_addl_A0_im(2); |
|
5484 |
gen_add_A0_im(s, 2); |
|
5467 | 5485 |
if (op == 0) |
5468 | 5486 |
gen_op_movtl_T0_env(offsetof(CPUX86State,gdt.base)); |
5469 | 5487 |
else |
... | ... | |
5481 | 5499 |
} else { |
5482 | 5500 |
gen_lea_modrm(s, modrm, ®_addr, &offset_addr); |
5483 | 5501 |
gen_op_ld_T1_A0[OT_WORD + s->mem_index](); |
5484 |
#ifdef TARGET_X86_64 |
|
5485 |
if (CODE64(s)) |
|
5486 |
gen_op_addq_A0_im(2); |
|
5487 |
else |
|
5488 |
#endif |
|
5489 |
gen_op_addl_A0_im(2); |
|
5502 |
gen_add_A0_im(s, 2); |
|
5490 | 5503 |
gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index](); |
5491 | 5504 |
if (!s->dflag) |
5492 | 5505 |
gen_op_andl_T0_im(0xffffff); |
Also available in: Unified diff