Revision cc739bb0 target-i386/translate.c

b/target-i386/translate.c
62 62
static TCGv_ptr cpu_env;
63 63
static TCGv cpu_A0, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
64 64
static TCGv_i32 cpu_cc_op;
65
static TCGv cpu_regs[CPU_NB_REGS];
65 66
/* local temps */
66 67
static TCGv cpu_T[2], cpu_T3;
67 68
/* local register indexes (only used inside old micro ops) */
......
271 272

  
272 273
static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
273 274
{
275
    TCGv tmp;
276

  
274 277
    switch(ot) {
275 278
    case OT_BYTE:
279
        tmp = tcg_temp_new();
280
        tcg_gen_ext8u_tl(tmp, t0);
276 281
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
277
            tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
282
            tcg_gen_andi_tl(cpu_regs[reg], cpu_regs[reg], ~0xff);
283
            tcg_gen_or_tl(cpu_regs[reg], cpu_regs[reg], tmp);
278 284
        } else {
279
            tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
285
            tcg_gen_shli_tl(tmp, tmp, 8);
286
            tcg_gen_andi_tl(cpu_regs[reg - 4], cpu_regs[reg - 4], ~0xff00);
287
            tcg_gen_or_tl(cpu_regs[reg - 4], cpu_regs[reg - 4], tmp);
280 288
        }
289
        tcg_temp_free(tmp);
281 290
        break;
282 291
    case OT_WORD:
283
        tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
292
        tmp = tcg_temp_new();
293
        tcg_gen_ext16u_tl(tmp, t0);
294
        tcg_gen_andi_tl(cpu_regs[reg], cpu_regs[reg], ~0xffff);
295
        tcg_gen_or_tl(cpu_regs[reg], cpu_regs[reg], tmp);
296
        tcg_temp_free(tmp);
284 297
        break;
285
#ifdef TARGET_X86_64
298
    default: /* XXX this shouldn't be reached;  abort? */
286 299
    case OT_LONG:
287
        tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
288
        /* high part of register set to zero */
289
        tcg_gen_movi_tl(cpu_tmp0, 0);
290
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
300
        /* For x86_64, this sets the higher half of register to zero.
301
           For i386, this is equivalent to a mov. */
302
        tcg_gen_ext32u_tl(cpu_regs[reg], t0);
291 303
        break;
292
    default:
304
#ifdef TARGET_X86_64
293 305
    case OT_QUAD:
294
        tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
295
        break;
296
#else
297
    default:
298
    case OT_LONG:
299
        tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
306
        tcg_gen_mov_tl(cpu_regs[reg], t0);
300 307
        break;
301 308
#endif
302 309
    }
......
314 321

  
315 322
static inline void gen_op_mov_reg_A0(int size, int reg)
316 323
{
324
    TCGv tmp;
325

  
317 326
    switch(size) {
318 327
    case 0:
319
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
328
        tmp = tcg_temp_new();
329
        tcg_gen_ext16u_tl(tmp, cpu_A0);
330
        tcg_gen_andi_tl(cpu_regs[reg], cpu_regs[reg], ~0xffff);
331
        tcg_gen_or_tl(cpu_regs[reg], cpu_regs[reg], tmp);
332
        tcg_temp_free(tmp);
320 333
        break;
321
#ifdef TARGET_X86_64
334
    default: /* XXX this shouldn't be reached;  abort? */
322 335
    case 1:
323
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
324
        /* high part of register set to zero */
325
        tcg_gen_movi_tl(cpu_tmp0, 0);
326
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
336
        /* For x86_64, this sets the higher half of register to zero.
337
           For i386, this is equivalent to a mov. */
338
        tcg_gen_ext32u_tl(cpu_regs[reg], cpu_A0);
327 339
        break;
328
    default:
340
#ifdef TARGET_X86_64
329 341
    case 2:
330
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
331
        break;
332
#else
333
    default:
334
    case 1:
335
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
342
        tcg_gen_mov_tl(cpu_regs[reg], cpu_A0);
336 343
        break;
337 344
#endif
338 345
    }
......
345 352
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
346 353
            goto std_case;
347 354
        } else {
348
            tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
355
            tcg_gen_shri_tl(t0, cpu_regs[reg - 4], 8);
356
            tcg_gen_ext8u_tl(t0, t0);
349 357
        }
350 358
        break;
351 359
    default:
352 360
    std_case:
353
        tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
361
        tcg_gen_mov_tl(t0, cpu_regs[reg]);
354 362
        break;
355 363
    }
356 364
}
......
362 370

  
363 371
static inline void gen_op_movl_A0_reg(int reg)
364 372
{
365
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
373
    tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
366 374
}
367 375

  
368 376
static inline void gen_op_addl_A0_im(int32_t val)
......
404 412
{
405 413
    switch(size) {
406 414
    case 0:
407
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
408
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
409
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
415
        tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
416
        tcg_gen_ext16u_tl(cpu_tmp0, cpu_tmp0);
417
        tcg_gen_andi_tl(cpu_regs[reg], cpu_regs[reg], ~0xffff);
418
        tcg_gen_or_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0);
410 419
        break;
411 420
    case 1:
412
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
413
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
414
#ifdef TARGET_X86_64
415
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
416
#endif
417
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
421
        tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
422
        /* For x86_64, this sets the higher half of register to zero.
423
           For i386, this is equivalent to a nop. */
424
        tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
425
        tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
418 426
        break;
419 427
#ifdef TARGET_X86_64
420 428
    case 2:
421
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
422
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
423
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
429
        tcg_gen_addi_tl(cpu_regs[reg], cpu_regs[reg], val);
424 430
        break;
425 431
#endif
426 432
    }
......
430 436
{
431 437
    switch(size) {
432 438
    case 0:
433
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
434
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
435
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
439
        tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
440
        tcg_gen_ext16u_tl(cpu_tmp0, cpu_tmp0);
441
        tcg_gen_andi_tl(cpu_regs[reg], cpu_regs[reg], ~0xffff);
442
        tcg_gen_or_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0);
436 443
        break;
437 444
    case 1:
438
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
439
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
440
#ifdef TARGET_X86_64
441
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
442
#endif
443
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
445
        tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
446
        /* For x86_64, this sets the higher half of register to zero.
447
           For i386, this is equivalent to a nop. */
448
        tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
449
        tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
444 450
        break;
445 451
#ifdef TARGET_X86_64
446 452
    case 2:
447
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
448
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
449
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
453
        tcg_gen_add_tl(cpu_regs[reg], cpu_regs[reg], cpu_T[0]);
450 454
        break;
451 455
#endif
452 456
    }
......
459 463

  
460 464
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
461 465
{
462
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
463
    if (shift != 0) 
466
    tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
467
    if (shift != 0)
464 468
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
465 469
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
466
#ifdef TARGET_X86_64
467
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
468
#endif
470
    /* For x86_64, this sets the higher half of register to zero.
471
       For i386, this is equivalent to a nop. */
472
    tcg_gen_ext32u_tl(cpu_A0, cpu_A0);
469 473
}
470 474

  
471 475
static inline void gen_op_movl_A0_seg(int reg)
......
496 500

  
497 501
static inline void gen_op_movq_A0_reg(int reg)
498 502
{
499
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
503
    tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
500 504
}
501 505

  
502 506
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
503 507
{
504
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
505
    if (shift != 0) 
508
    tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
509
    if (shift != 0)
506 510
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
507 511
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
508 512
}
......
701 705

  
702 706
static inline void gen_op_jnz_ecx(int size, int label1)
703 707
{
704
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
708
    tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
705 709
    gen_extu(size + 1, cpu_tmp0);
706 710
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
707 711
}
708 712

  
709 713
static inline void gen_op_jz_ecx(int size, int label1)
710 714
{
711
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
715
    tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
712 716
    gen_extu(size + 1, cpu_tmp0);
713 717
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
714 718
}
......
4834 4838
                rm = 0; /* avoid warning */
4835 4839
            }
4836 4840
            label1 = gen_new_label();
4837
            tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
4838
            tcg_gen_sub_tl(t2, t2, t0);
4841
            tcg_gen_sub_tl(t2, cpu_regs[R_EAX], t0);
4839 4842
            gen_extu(ot, t2);
4840 4843
            tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
4841 4844
            if (mod == 3) {
......
5409 5412
            val = ldub_code(s->pc++);
5410 5413
            tcg_gen_movi_tl(cpu_T3, val);
5411 5414
        } else {
5412
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
5415
            tcg_gen_mov_tl(cpu_T3, cpu_regs[R_ECX]);
5413 5416
        }
5414 5417
        gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5415 5418
        break;
......
6317 6320
                /* XXX: specific Intel behaviour ? */
6318 6321
                l1 = gen_new_label();
6319 6322
                gen_jcc1(s, s->cc_op, b ^ 1, l1);
6320
                tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6323
                tcg_gen_mov_tl(cpu_regs[reg], t0);
6321 6324
                gen_set_label(l1);
6322
                tcg_gen_movi_tl(cpu_tmp0, 0);
6323
                tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6325
                tcg_gen_ext32u_tl(cpu_regs[reg], cpu_regs[reg]);
6324 6326
            } else
6325 6327
#endif
6326 6328
            {
......
7588 7590
    cpu_cc_tmp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_tmp),
7589 7591
                                    "cc_tmp");
7590 7592

  
7593
#ifdef TARGET_X86_64
7594
    cpu_regs[R_EAX] = tcg_global_mem_new_i64(TCG_AREG0,
7595
                                             offsetof(CPUState, regs[R_EAX]), "rax");
7596
    cpu_regs[R_ECX] = tcg_global_mem_new_i64(TCG_AREG0,
7597
                                             offsetof(CPUState, regs[R_ECX]), "rcx");
7598
    cpu_regs[R_EDX] = tcg_global_mem_new_i64(TCG_AREG0,
7599
                                             offsetof(CPUState, regs[R_EDX]), "rdx");
7600
    cpu_regs[R_EBX] = tcg_global_mem_new_i64(TCG_AREG0,
7601
                                             offsetof(CPUState, regs[R_EBX]), "rbx");
7602
    cpu_regs[R_ESP] = tcg_global_mem_new_i64(TCG_AREG0,
7603
                                             offsetof(CPUState, regs[R_ESP]), "rsp");
7604
    cpu_regs[R_EBP] = tcg_global_mem_new_i64(TCG_AREG0,
7605
                                             offsetof(CPUState, regs[R_EBP]), "rbp");
7606
    cpu_regs[R_ESI] = tcg_global_mem_new_i64(TCG_AREG0,
7607
                                             offsetof(CPUState, regs[R_ESI]), "rsi");
7608
    cpu_regs[R_EDI] = tcg_global_mem_new_i64(TCG_AREG0,
7609
                                             offsetof(CPUState, regs[R_EDI]), "rdi");
7610
    cpu_regs[8] = tcg_global_mem_new_i64(TCG_AREG0,
7611
                                         offsetof(CPUState, regs[8]), "r8");
7612
    cpu_regs[9] = tcg_global_mem_new_i64(TCG_AREG0,
7613
                                          offsetof(CPUState, regs[9]), "r9");
7614
    cpu_regs[10] = tcg_global_mem_new_i64(TCG_AREG0,
7615
                                          offsetof(CPUState, regs[10]), "r10");
7616
    cpu_regs[11] = tcg_global_mem_new_i64(TCG_AREG0,
7617
                                          offsetof(CPUState, regs[11]), "r11");
7618
    cpu_regs[12] = tcg_global_mem_new_i64(TCG_AREG0,
7619
                                          offsetof(CPUState, regs[12]), "r12");
7620
    cpu_regs[13] = tcg_global_mem_new_i64(TCG_AREG0,
7621
                                          offsetof(CPUState, regs[13]), "r13");
7622
    cpu_regs[14] = tcg_global_mem_new_i64(TCG_AREG0,
7623
                                          offsetof(CPUState, regs[14]), "r14");
7624
    cpu_regs[15] = tcg_global_mem_new_i64(TCG_AREG0,
7625
                                          offsetof(CPUState, regs[15]), "r15");
7626
#else
7627
    cpu_regs[R_EAX] = tcg_global_mem_new_i32(TCG_AREG0,
7628
                                             offsetof(CPUState, regs[R_EAX]), "eax");
7629
    cpu_regs[R_ECX] = tcg_global_mem_new_i32(TCG_AREG0,
7630
                                             offsetof(CPUState, regs[R_ECX]), "ecx");
7631
    cpu_regs[R_EDX] = tcg_global_mem_new_i32(TCG_AREG0,
7632
                                             offsetof(CPUState, regs[R_EDX]), "edx");
7633
    cpu_regs[R_EBX] = tcg_global_mem_new_i32(TCG_AREG0,
7634
                                             offsetof(CPUState, regs[R_EBX]), "ebx");
7635
    cpu_regs[R_ESP] = tcg_global_mem_new_i32(TCG_AREG0,
7636
                                             offsetof(CPUState, regs[R_ESP]), "esp");
7637
    cpu_regs[R_EBP] = tcg_global_mem_new_i32(TCG_AREG0,
7638
                                             offsetof(CPUState, regs[R_EBP]), "ebp");
7639
    cpu_regs[R_ESI] = tcg_global_mem_new_i32(TCG_AREG0,
7640
                                             offsetof(CPUState, regs[R_ESI]), "esi");
7641
    cpu_regs[R_EDI] = tcg_global_mem_new_i32(TCG_AREG0,
7642
                                             offsetof(CPUState, regs[R_EDI]), "edi");
7643
#endif
7644

  
7591 7645
    /* register helpers */
7592 7646
#define GEN_HELPER 2
7593 7647
#include "helper.h"

Also available in: Unified diff