Revision b8b6a50b target-i386/translate.c

b/target-i386/translate.c
60 60
/* global register indexes */
61 61
static TCGv cpu_env, cpu_T[2], cpu_A0;
62 62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0, cpu_tmp1, cpu_tmp2, cpu_ptr0, cpu_ptr1;
63
static TCGv cpu_tmp0, cpu_tmp1, cpu_tmp2, cpu_tmp3, cpu_ptr0, cpu_ptr1;
64 64

  
65 65
#ifdef TARGET_X86_64
66 66
static int x86_64_hregs;
......
903 903
    },
904 904
};
905 905

  
906
static GenOpFunc *gen_op_in_DX_T0[3] = {
907
    gen_op_inb_DX_T0,
908
    gen_op_inw_DX_T0,
909
    gen_op_inl_DX_T0,
906
static void *helper_in_func[3] = {
907
    helper_inb,
908
    helper_inw,
909
    helper_inl,
910 910
};
911 911

  
912
static GenOpFunc *gen_op_out_DX_T0[3] = {
913
    gen_op_outb_DX_T0,
914
    gen_op_outw_DX_T0,
915
    gen_op_outl_DX_T0,
912
static void *helper_out_func[3] = {
913
    helper_outb,
914
    helper_outw,
915
    helper_outl,
916 916
};
917 917

  
918
static GenOpFunc *gen_op_in[3] = {
919
    gen_op_inb_T0_T1,
920
    gen_op_inw_T0_T1,
921
    gen_op_inl_T0_T1,
918
static void *gen_check_io_func[3] = {
919
    helper_check_iob,
920
    helper_check_iow,
921
    helper_check_iol,
922 922
};
923 923

  
924
static GenOpFunc *gen_op_out[3] = {
925
    gen_op_outb_T0_T1,
926
    gen_op_outw_T0_T1,
927
    gen_op_outl_T0_T1,
928
};
929

  
930
static GenOpFunc *gen_check_io_T0[3] = {
931
    gen_op_check_iob_T0,
932
    gen_op_check_iow_T0,
933
    gen_op_check_iol_T0,
934
};
935

  
936
static GenOpFunc *gen_check_io_DX[3] = {
937
    gen_op_check_iob_DX,
938
    gen_op_check_iow_DX,
939
    gen_op_check_iol_DX,
940
};
941

  
942
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
924
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
925
                         uint32_t svm_flags)
943 926
{
927
    int state_saved;
928
    target_ulong next_eip;
929

  
930
    state_saved = 0;
944 931
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
945 932
        if (s->cc_op != CC_OP_DYNAMIC)
946 933
            gen_op_set_cc_op(s->cc_op);
947 934
        gen_jmp_im(cur_eip);
948
        if (use_dx)
949
            gen_check_io_DX[ot]();
950
        else
951
            gen_check_io_T0[ot]();
935
        state_saved = 1;
936
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
937
        tcg_gen_helper_0_1(gen_check_io_func[ot],
938
                           cpu_tmp2);
939
    }
940
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
941
        if (!state_saved) {
942
            if (s->cc_op != CC_OP_DYNAMIC)
943
                gen_op_set_cc_op(s->cc_op);
944
            gen_jmp_im(cur_eip);
945
            state_saved = 1;
946
        }
947
        svm_flags |= (1 << (4 + ot));
948
        next_eip = s->pc - s->cs_base;
949
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
950
        tcg_gen_helper_0_3(helper_svm_check_io,
951
                           cpu_tmp2,
952
                           tcg_const_i32(svm_flags),
953
                           tcg_const_i32(next_eip - cur_eip));
952 954
    }
953 955
}
954 956

  
......
1080 1082
    gen_string_movl_A0_EDI(s);
1081 1083
    gen_op_movl_T0_0();
1082 1084
    gen_op_st_T0_A0(ot + s->mem_index);
1083
    gen_op_in_DX_T0[ot]();
1085
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1086
    tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[1]);
1087
    tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
1088
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2);
1084 1089
    gen_op_st_T0_A0(ot + s->mem_index);
1085 1090
    gen_op_movl_T0_Dshift[ot]();
1086 1091
#ifdef TARGET_X86_64
......
1099 1104
{
1100 1105
    gen_string_movl_A0_ESI(s);
1101 1106
    gen_op_ld_T0_A0(ot + s->mem_index);
1102
    gen_op_out_DX_T0[ot]();
1107

  
1108
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1109
    tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[1]);
1110
    tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
1111
    tcg_gen_trunc_tl_i32(cpu_tmp3, cpu_T[0]);
1112
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2, cpu_tmp3);
1113

  
1103 1114
    gen_op_movl_T0_Dshift[ot]();
1104 1115
#ifdef TARGET_X86_64
1105 1116
    if (s->aflag == 2) {
......
1976 1987
        if (s->cc_op != CC_OP_DYNAMIC)
1977 1988
            gen_op_set_cc_op(s->cc_op);
1978 1989
        gen_jmp_im(cur_eip);
1979
        gen_op_movl_seg_T0(seg_reg);
1990
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
1991
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2);
1980 1992
        /* abort translation because the addseg value may change or
1981 1993
           because ss32 may change. For R_SS, translation must always
1982 1994
           stop as a special handling must be done to disable hardware
......
1990 2002
    }
1991 2003
}
1992 2004

  
1993
#define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1994

  
1995
static inline int
1996
gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
1997
{
1998
#if !defined(CONFIG_USER_ONLY)
1999
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2000
        if (s->cc_op != CC_OP_DYNAMIC)
2001
            gen_op_set_cc_op(s->cc_op);
2002
        SVM_movq_T1_im(s->pc - s->cs_base);
2003
        gen_jmp_im(pc_start - s->cs_base);
2004
        gen_op_geneflags();
2005
        gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2006
        s->cc_op = CC_OP_DYNAMIC;
2007
        /* FIXME: maybe we could move the io intercept vector to the TB as well
2008
                  so we know if this is an EOB or not ... let's assume it's not
2009
                  for now. */
2010
    }
2011
#endif
2012
    return 0;
2013
}
2014

  
2015 2005
static inline int svm_is_rep(int prefixes)
2016 2006
{
2017 2007
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
......
2019 2009

  
2020 2010
static inline int
2021 2011
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2022
                              uint64_t type, uint64_t param)
2012
                              uint32_t type, uint64_t param)
2023 2013
{
2024 2014
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2025 2015
	/* no SVM activated */
......
2029 2019
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2030 2020
            if (s->cc_op != CC_OP_DYNAMIC) {
2031 2021
                gen_op_set_cc_op(s->cc_op);
2032
                s->cc_op = CC_OP_DYNAMIC;
2033 2022
            }
2034 2023
            gen_jmp_im(pc_start - s->cs_base);
2035
            SVM_movq_T1_im(param);
2036
            gen_op_geneflags();
2037
            gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2024
            tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2025
                               tcg_const_i32(type), tcg_const_i64(param));
2038 2026
            /* this is a special case as we do not know if the interception occurs
2039 2027
               so we assume there was none */
2040 2028
            return 0;
......
2042 2030
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2043 2031
                if (s->cc_op != CC_OP_DYNAMIC) {
2044 2032
                    gen_op_set_cc_op(s->cc_op);
2045
                    s->cc_op = CC_OP_DYNAMIC;
2046 2033
                }
2047 2034
                gen_jmp_im(pc_start - s->cs_base);
2048
                SVM_movq_T1_im(param);
2049
                gen_op_geneflags();
2050
                gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2035
                tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2036
                                   tcg_const_i32(type), tcg_const_i64(param));
2051 2037
                /* this is a special case as we do not know if the interception occurs
2052 2038
                   so we assume there was none */
2053 2039
                return 0;
......
2057 2043
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2058 2044
                if (s->cc_op != CC_OP_DYNAMIC) {
2059 2045
                    gen_op_set_cc_op(s->cc_op);
2060
		    s->cc_op = CC_OP_EFLAGS;
2061 2046
                }
2062 2047
                gen_jmp_im(pc_start - s->cs_base);
2063
                SVM_movq_T1_im(param);
2064
                gen_op_geneflags();
2065
                gen_op_svm_vmexit(type >> 32, type);
2048
                tcg_gen_helper_0_2(helper_vmexit,
2049
                                   tcg_const_i32(type), tcg_const_i64(param));
2066 2050
                /* we can optimize this one so TBs don't get longer
2067 2051
                   than up to vmexit */
2068 2052
                gen_eob(s);
......
2276 2260
        gen_op_st_T0_A0(ot + s->mem_index);
2277 2261
        if (level) {
2278 2262
            /* XXX: must save state */
2279
            tcg_gen_helper_0_2(helper_enter64_level,
2263
            tcg_gen_helper_0_3(helper_enter64_level,
2280 2264
                               tcg_const_i32(level),
2281
                               tcg_const_i32((ot == OT_QUAD)));
2265
                               tcg_const_i32((ot == OT_QUAD)),
2266
                               cpu_T[1]);
2282 2267
        }
2283 2268
        gen_op_mov_reg_T1(ot, R_EBP);
2284 2269
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
......
2301 2286
        gen_op_st_T0_A0(ot + s->mem_index);
2302 2287
        if (level) {
2303 2288
            /* XXX: must save state */
2304
            tcg_gen_helper_0_2(helper_enter_level,
2289
            tcg_gen_helper_0_3(helper_enter_level,
2305 2290
                               tcg_const_i32(level),
2306
                               tcg_const_i32(s->dflag));
2291
                               tcg_const_i32(s->dflag),
2292
                               cpu_T[1]);
2307 2293
        }
2308 2294
        gen_op_mov_reg_T1(ot, R_EBP);
2309 2295
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
......
3208 3194
    } else {
3209 3195
        /* generic MMX or SSE operation */
3210 3196
        switch(b) {
3211
        case 0xf7:
3212
            /* maskmov : we must prepare A0 */
3213
            if (mod != 3)
3214
                goto illegal_op;
3215
#ifdef TARGET_X86_64
3216
            if (s->aflag == 2) {
3217
                gen_op_movq_A0_reg(R_EDI);
3218
            } else
3219
#endif
3220
            {
3221
                gen_op_movl_A0_reg(R_EDI);
3222
                if (s->aflag == 0)
3223
                    gen_op_andl_A0_ffff();
3224
            }
3225
            gen_add_A0_ds_seg(s);
3226
            break;
3227 3197
        case 0x70: /* pshufx insn */
3228 3198
        case 0xc6: /* pshufx insn */
3229 3199
        case 0xc2: /* compare insns */
......
3295 3265
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3296 3266
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3297 3267
            break;
3268
        case 0xf7:
3269
            /* maskmov : we must prepare A0 */
3270
            if (mod != 3)
3271
                goto illegal_op;
3272
#ifdef TARGET_X86_64
3273
            if (s->aflag == 2) {
3274
                gen_op_movq_A0_reg(R_EDI);
3275
            } else
3276
#endif
3277
            {
3278
                gen_op_movl_A0_reg(R_EDI);
3279
                if (s->aflag == 0)
3280
                    gen_op_andl_A0_ffff();
3281
            }
3282
            gen_add_A0_ds_seg(s);
3283

  
3284
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3285
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3286
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3287
            break;
3298 3288
        default:
3299 3289
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3300 3290
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
......
3440 3430

  
3441 3431
    /* lock generation */
3442 3432
    if (prefixes & PREFIX_LOCK)
3443
        gen_op_lock();
3433
        tcg_gen_helper_0_0(helper_lock);
3444 3434

  
3445 3435
    /* now check op code */
3446 3436
 reswitch:
......
3783 3773
                if (s->cc_op != CC_OP_DYNAMIC)
3784 3774
                    gen_op_set_cc_op(s->cc_op);
3785 3775
                gen_jmp_im(pc_start - s->cs_base);
3786
                gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3776
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3777
                tcg_gen_helper_0_4(helper_lcall_protected,
3778
                                   cpu_tmp2, cpu_T[1],
3779
                                   tcg_const_i32(dflag), 
3780
                                   tcg_const_i32(s->pc - pc_start));
3787 3781
            } else {
3788
                gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3782
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3783
                tcg_gen_helper_0_4(helper_lcall_real,
3784
                                   cpu_tmp2, cpu_T[1],
3785
                                   tcg_const_i32(dflag), 
3786
                                   tcg_const_i32(s->pc - s->cs_base));
3789 3787
            }
3790 3788
            gen_eob(s);
3791 3789
            break;
......
3804 3802
                if (s->cc_op != CC_OP_DYNAMIC)
3805 3803
                    gen_op_set_cc_op(s->cc_op);
3806 3804
                gen_jmp_im(pc_start - s->cs_base);
3807
                gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3805
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3806
                tcg_gen_helper_0_3(helper_ljmp_protected,
3807
                                   cpu_tmp2,
3808
                                   cpu_T[1],
3809
                                   tcg_const_i32(s->pc - pc_start));
3808 3810
            } else {
3809 3811
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3810 3812
                gen_op_movl_T0_T1();
......
4355 4357
            gen_op_mov_TN_reg(ot, 0, reg);
4356 4358
            /* for xchg, lock is implicit */
4357 4359
            if (!(prefixes & PREFIX_LOCK))
4358
                gen_op_lock();
4360
                tcg_gen_helper_0_0(helper_lock);
4359 4361
            gen_op_ld_T1_A0(ot + s->mem_index);
4360 4362
            gen_op_st_T0_A0(ot + s->mem_index);
4361 4363
            if (!(prefixes & PREFIX_LOCK))
4362
                gen_op_unlock();
4364
                tcg_gen_helper_0_0(helper_unlock);
4363 4365
            gen_op_mov_reg_T1(ot, reg);
4364 4366
        }
4365 4367
        break;
......
5117 5119
            ot = OT_BYTE;
5118 5120
        else
5119 5121
            ot = dflag ? OT_LONG : OT_WORD;
5120
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
5121 5122
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5122 5123
        gen_op_andl_T0_ffff();
5123
        if (gen_svm_check_io(s, pc_start,
5124
                             SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
5125
                             svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
5126
            break;
5124
        gen_check_io(s, ot, pc_start - s->cs_base, 
5125
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5127 5126
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5128 5127
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5129 5128
        } else {
......
5136 5135
            ot = OT_BYTE;
5137 5136
        else
5138 5137
            ot = dflag ? OT_LONG : OT_WORD;
5139
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
5140 5138
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5141 5139
        gen_op_andl_T0_ffff();
5142
        if (gen_svm_check_io(s, pc_start,
5143
                             (1 << (4+ot)) | svm_is_rep(prefixes) |
5144
                             4 | (1 << (7+s->aflag))))
5145
            break;
5140
        gen_check_io(s, ot, pc_start - s->cs_base,
5141
                     svm_is_rep(prefixes) | 4);
5146 5142
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5147 5143
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5148 5144
        } else {
......
5161 5157
            ot = dflag ? OT_LONG : OT_WORD;
5162 5158
        val = ldub_code(s->pc++);
5163 5159
        gen_op_movl_T0_im(val);
5164
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5165
        if (gen_svm_check_io(s, pc_start,
5166
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5167
                             (1 << (4+ot))))
5168
            break;
5169
        gen_op_in[ot]();
5160
        gen_check_io(s, ot, pc_start - s->cs_base,
5161
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5162
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5163
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2);
5170 5164
        gen_op_mov_reg_T1(ot, R_EAX);
5171 5165
        break;
5172 5166
    case 0xe6:
......
5177 5171
            ot = dflag ? OT_LONG : OT_WORD;
5178 5172
        val = ldub_code(s->pc++);
5179 5173
        gen_op_movl_T0_im(val);
5180
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5181
        if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5182
                             (1 << (4+ot))))
5183
            break;
5174
        gen_check_io(s, ot, pc_start - s->cs_base,
5175
                     svm_is_rep(prefixes));
5184 5176
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5185
        gen_op_out[ot]();
5177

  
5178
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5179
        tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
5180
        tcg_gen_trunc_tl_i32(cpu_tmp3, cpu_T[1]);
5181
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2, cpu_tmp3);
5186 5182
        break;
5187 5183
    case 0xec:
5188 5184
    case 0xed:
......
5192 5188
            ot = dflag ? OT_LONG : OT_WORD;
5193 5189
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5194 5190
        gen_op_andl_T0_ffff();
5195
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5196
        if (gen_svm_check_io(s, pc_start,
5197
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5198
                             (1 << (4+ot))))
5199
            break;
5200
        gen_op_in[ot]();
5191
        gen_check_io(s, ot, pc_start - s->cs_base,
5192
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5193
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5194
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2);
5201 5195
        gen_op_mov_reg_T1(ot, R_EAX);
5202 5196
        break;
5203 5197
    case 0xee:
......
5208 5202
            ot = dflag ? OT_LONG : OT_WORD;
5209 5203
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5210 5204
        gen_op_andl_T0_ffff();
5211
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5212
        if (gen_svm_check_io(s, pc_start,
5213
                             svm_is_rep(prefixes) | (1 << (4+ot))))
5214
            break;
5205
        gen_check_io(s, ot, pc_start - s->cs_base,
5206
                     svm_is_rep(prefixes));
5215 5207
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5216
        gen_op_out[ot]();
5208

  
5209
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5210
        tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
5211
        tcg_gen_trunc_tl_i32(cpu_tmp3, cpu_T[1]);
5212
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2, cpu_tmp3);
5217 5213
        break;
5218 5214

  
5219 5215
        /************************/
......
5246 5242
            if (s->cc_op != CC_OP_DYNAMIC)
5247 5243
                gen_op_set_cc_op(s->cc_op);
5248 5244
            gen_jmp_im(pc_start - s->cs_base);
5249
            gen_op_lret_protected(s->dflag, val);
5245
            tcg_gen_helper_0_2(helper_lret_protected,
5246
                               tcg_const_i32(s->dflag), 
5247
                               tcg_const_i32(val));
5250 5248
        } else {
5251 5249
            gen_stack_A0(s);
5252 5250
            /* pop offset */
......
5273 5271
            break;
5274 5272
        if (!s->pe) {
5275 5273
            /* real mode */
5276
            gen_op_iret_real(s->dflag);
5274
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5277 5275
            s->cc_op = CC_OP_EFLAGS;
5278 5276
        } else if (s->vm86) {
5279 5277
            if (s->iopl != 3) {
5280 5278
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5281 5279
            } else {
5282
                gen_op_iret_real(s->dflag);
5280
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5283 5281
                s->cc_op = CC_OP_EFLAGS;
5284 5282
            }
5285 5283
        } else {
5286 5284
            if (s->cc_op != CC_OP_DYNAMIC)
5287 5285
                gen_op_set_cc_op(s->cc_op);
5288 5286
            gen_jmp_im(pc_start - s->cs_base);
5289
            gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5287
            tcg_gen_helper_0_2(helper_iret_protected,
5288
                               tcg_const_i32(s->dflag), 
5289
                               tcg_const_i32(s->pc - s->cs_base));
5290 5290
            s->cc_op = CC_OP_EFLAGS;
5291 5291
        }
5292 5292
        gen_eob(s);
......
5723 5723
        gen_op_mov_TN_reg(ot, 0, reg);
5724 5724
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5725 5725
        gen_jmp_im(pc_start - s->cs_base);
5726
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5726 5727
        if (ot == OT_WORD)
5727
            tcg_gen_helper_0_0(helper_boundw);
5728
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2);
5728 5729
        else
5729
            tcg_gen_helper_0_0(helper_boundl);
5730
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2);
5730 5731
        break;
5731 5732
    case 0x1c8 ... 0x1cf: /* bswap reg */
5732 5733
        reg = (b & 7) | REX_B(s);
......
6134 6135
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6135 6136
                    break;
6136 6137
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6137
                gen_op_lmsw_T0();
6138
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6138 6139
                gen_jmp_im(s->pc - s->cs_base);
6139 6140
                gen_eob(s);
6140 6141
            }
......
6223 6224
            } else {
6224 6225
                gen_op_mov_TN_reg(ot, 0, rm);
6225 6226
            }
6227
            gen_op_mov_TN_reg(ot, 1, reg);
6226 6228
            if (s->cc_op != CC_OP_DYNAMIC)
6227 6229
                gen_op_set_cc_op(s->cc_op);
6228 6230
            gen_op_arpl();
......
6299 6301
                if (b & 2) {
6300 6302
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6301 6303
                    gen_op_mov_TN_reg(ot, 0, rm);
6302
                    gen_op_movl_crN_T0(reg);
6304
                    tcg_gen_helper_0_2(helper_movl_crN_T0, 
6305
                                       tcg_const_i32(reg), cpu_T[0]);
6303 6306
                    gen_jmp_im(s->pc - s->cs_base);
6304 6307
                    gen_eob(s);
6305 6308
                } else {
6306 6309
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6307 6310
#if !defined(CONFIG_USER_ONLY)
6308 6311
                    if (reg == 8)
6309
                        gen_op_movtl_T0_cr8();
6312
                        tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6310 6313
                    else
6311 6314
#endif
6312 6315
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
......
6338 6341
            if (b & 2) {
6339 6342
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6340 6343
                gen_op_mov_TN_reg(ot, 0, rm);
6341
                gen_op_movl_drN_T0(reg);
6344
                tcg_gen_helper_0_2(helper_movl_drN_T0,
6345
                                   tcg_const_i32(reg), cpu_T[0]);
6342 6346
                gen_jmp_im(s->pc - s->cs_base);
6343 6347
                gen_eob(s);
6344 6348
            } else {
......
6353 6357
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6354 6358
        } else {
6355 6359
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6356
            gen_op_clts();
6360
            tcg_gen_helper_0_0(helper_clts);
6357 6361
            /* abort block because static cpu state changed */
6358 6362
            gen_jmp_im(s->pc - s->cs_base);
6359 6363
            gen_eob(s);
......
6485 6489
    }
6486 6490
    /* lock generation */
6487 6491
    if (s->prefix & PREFIX_LOCK)
6488
        gen_op_unlock();
6492
        tcg_gen_helper_0_0(helper_unlock);
6489 6493
    return s->pc;
6490 6494
 illegal_op:
6491 6495
    if (s->prefix & PREFIX_LOCK)
6492
        gen_op_unlock();
6496
        tcg_gen_helper_0_0(helper_unlock);
6493 6497
    /* XXX: ensure that no lock was generated */
6494 6498
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6495 6499
    return s->pc;
......
6861 6865
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6862 6866
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6863 6867
#endif
6864
#if defined(__i386__)
6868
#if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6869
    /* XXX: must be suppressed once there are less fixed registers */
6865 6870
    cpu_tmp1 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6866 6871
#endif
6867 6872
}
......
6957 6962
#endif
6958 6963

  
6959 6964
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6960
#if !defined(__i386__)
6965
#if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6961 6966
    cpu_tmp1 = tcg_temp_new(TCG_TYPE_I64);
6962 6967
#endif
6963 6968
    cpu_tmp2 = tcg_temp_new(TCG_TYPE_I32);
6969
    cpu_tmp3 = tcg_temp_new(TCG_TYPE_I32);
6964 6970
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6965 6971
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6966 6972

  

Also available in: Unified diff