Revision 97212c88

b/target-i386/translate.c
252 252
    }
253 253
}
254 254

  
255
static inline void gen_op_movl_T0_0(void)
256
{
257
    tcg_gen_movi_tl(cpu_T[0], 0);
258
}
259

  
260 255
static inline void gen_op_movl_T0_im(int32_t val)
261 256
{
262 257
    tcg_gen_movi_tl(cpu_T[0], val);
......
1257 1252
    gen_string_movl_A0_EDI(s);
1258 1253
    /* Note: we must do this dummy write first to be restartable in
1259 1254
       case of page fault. */
1260
    gen_op_movl_T0_0();
1255
    tcg_gen_movi_tl(cpu_T[0], 0);
1261 1256
    gen_op_st_v(s, ot, cpu_T[0], cpu_A0);
1262 1257
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_regs[R_EDX]);
1263 1258
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
......
3271 3266
                gen_lea_modrm(env, s, modrm);
3272 3267
                gen_op_ld_v(s, MO_32, cpu_T[0], cpu_A0);
3273 3268
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3274
                gen_op_movl_T0_0();
3269
                tcg_gen_movi_tl(cpu_T[0], 0);
3275 3270
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3276 3271
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3277 3272
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
......
3286 3281
                gen_lea_modrm(env, s, modrm);
3287 3282
                gen_ldq_env_A0(s, offsetof(CPUX86State,
3288 3283
                                           xmm_regs[reg].XMM_Q(0)));
3289
                gen_op_movl_T0_0();
3284
                tcg_gen_movi_tl(cpu_T[0], 0);
3290 3285
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3291 3286
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3292 3287
            } else {
......
3507 3502
            if (is_xmm) {
3508 3503
                gen_op_movl_T0_im(val);
3509 3504
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3510
                gen_op_movl_T0_0();
3505
                tcg_gen_movi_tl(cpu_T[0], 0);
3511 3506
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3512 3507
                op1_offset = offsetof(CPUX86State,xmm_t0);
3513 3508
            } else {
3514 3509
                gen_op_movl_T0_im(val);
3515 3510
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3516
                gen_op_movl_T0_0();
3511
                tcg_gen_movi_tl(cpu_T[0], 0);
3517 3512
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3518 3513
                op1_offset = offsetof(CPUX86State,mmx_t0);
3519 3514
            }
......
4716 4711
                xor_zero:
4717 4712
                    /* xor reg, reg optimisation */
4718 4713
                    set_cc_op(s, CC_OP_CLR);
4719
                    gen_op_movl_T0_0();
4714
                    tcg_gen_movi_tl(cpu_T[0], 0);
4720 4715
                    gen_op_mov_reg_T0(ot, reg);
4721 4716
                    break;
4722 4717
                } else {

Also available in: Unified diff