Revision 4373f3ce target-arm/translate.c

b/target-arm/translate.c
79 79
static TCGv cpu_env;
80 80
/* FIXME:  These should be removed.  */
81 81
static TCGv cpu_T[3];
82
static TCGv cpu_F0s, cpu_F1s, cpu_F0d, cpu_F1d;
82 83

  
83 84
/* initialize TCG globals.  */
84 85
void arm_translate_init(void)
......
959 960
    }
960 961
}
961 962

  
962
#define VFP_OP(name)                      \
963
static inline void gen_vfp_##name(int dp) \
964
{                                         \
965
    if (dp)                               \
966
        gen_op_vfp_##name##d();           \
967
    else                                  \
968
        gen_op_vfp_##name##s();           \
963
#define VFP_OP2(name)                                                 \
964
static inline void gen_vfp_##name(int dp)                             \
965
{                                                                     \
966
    if (dp)                                                           \
967
        gen_helper_vfp_##name##d(cpu_F0d, cpu_F0d, cpu_F1d, cpu_env); \
968
    else                                                              \
969
        gen_helper_vfp_##name##s(cpu_F0s, cpu_F0s, cpu_F1s, cpu_env); \
969 970
}
970 971

  
971
#define VFP_OP1(name)                               \
972
#define VFP_OP1i(name)                               \
972 973
static inline void gen_vfp_##name(int dp, int arg)  \
973 974
{                                                   \
974 975
    if (dp)                                         \
......
977 978
        gen_op_vfp_##name##s(arg);                  \
978 979
}
979 980

  
980
VFP_OP(add)
981
VFP_OP(sub)
982
VFP_OP(mul)
983
VFP_OP(div)
984
VFP_OP(neg)
985
VFP_OP(abs)
986
VFP_OP(sqrt)
987
VFP_OP(cmp)
988
VFP_OP(cmpe)
989
VFP_OP(F1_ld0)
990
VFP_OP(uito)
991
VFP_OP(sito)
992
VFP_OP(toui)
993
VFP_OP(touiz)
994
VFP_OP(tosi)
995
VFP_OP(tosiz)
996
VFP_OP1(tosh)
997
VFP_OP1(tosl)
998
VFP_OP1(touh)
999
VFP_OP1(toul)
1000
VFP_OP1(shto)
1001
VFP_OP1(slto)
1002
VFP_OP1(uhto)
1003
VFP_OP1(ulto)
1004

  
1005
#undef VFP_OP
1006

  
1007
static inline void gen_vfp_fconst(int dp, uint32_t val)
981
VFP_OP2(add)
982
VFP_OP2(sub)
983
VFP_OP2(mul)
984
VFP_OP2(div)
985

  
986
#undef VFP_OP2
987

  
988
static inline void gen_vfp_abs(int dp)
989
{
990
    if (dp)
991
        gen_helper_vfp_absd(cpu_F0d, cpu_F0d);
992
    else
993
        gen_helper_vfp_abss(cpu_F0s, cpu_F0s);
994
}
995

  
996
static inline void gen_vfp_neg(int dp)
997
{
998
    if (dp)
999
        gen_helper_vfp_negd(cpu_F0d, cpu_F0d);
1000
    else
1001
        gen_helper_vfp_negs(cpu_F0s, cpu_F0s);
1002
}
1003

  
1004
static inline void gen_vfp_sqrt(int dp)
1005
{
1006
    if (dp)
1007
        gen_helper_vfp_sqrtd(cpu_F0d, cpu_F0d, cpu_env);
1008
    else
1009
        gen_helper_vfp_sqrts(cpu_F0s, cpu_F0s, cpu_env);
1010
}
1011

  
1012
static inline void gen_vfp_cmp(int dp)
1013
{
1014
    if (dp)
1015
        gen_helper_vfp_cmpd(cpu_F0d, cpu_F1d, cpu_env);
1016
    else
1017
        gen_helper_vfp_cmps(cpu_F0s, cpu_F1s, cpu_env);
1018
}
1019

  
1020
static inline void gen_vfp_cmpe(int dp)
1021
{
1022
    if (dp)
1023
        gen_helper_vfp_cmped(cpu_F0d, cpu_F1d, cpu_env);
1024
    else
1025
        gen_helper_vfp_cmpes(cpu_F0s, cpu_F1s, cpu_env);
1026
}
1027

  
1028
static inline void gen_vfp_F1_ld0(int dp)
1029
{
1030
    if (dp)
1031
        tcg_gen_movi_i64(cpu_F0d, 0);
1032
    else
1033
        tcg_gen_movi_i32(cpu_F0s, 0);
1034
}
1035

  
1036
static inline void gen_vfp_uito(int dp)
1037
{
1038
    if (dp)
1039
        gen_helper_vfp_uitod(cpu_F0d, cpu_F0s, cpu_env);
1040
    else
1041
        gen_helper_vfp_uitos(cpu_F0s, cpu_F0s, cpu_env);
1042
}
1043

  
1044
static inline void gen_vfp_sito(int dp)
1045
{
1046
    if (dp)
1047
        gen_helper_vfp_uitod(cpu_F0d, cpu_F0s, cpu_env);
1048
    else
1049
        gen_helper_vfp_uitos(cpu_F0s, cpu_F0s, cpu_env);
1050
}
1051

  
1052
static inline void gen_vfp_toui(int dp)
1053
{
1054
    if (dp)
1055
        gen_helper_vfp_touid(cpu_F0s, cpu_F0d, cpu_env);
1056
    else
1057
        gen_helper_vfp_touis(cpu_F0s, cpu_F0s, cpu_env);
1058
}
1059

  
1060
static inline void gen_vfp_touiz(int dp)
1061
{
1062
    if (dp)
1063
        gen_helper_vfp_touizd(cpu_F0s, cpu_F0d, cpu_env);
1064
    else
1065
        gen_helper_vfp_touizs(cpu_F0s, cpu_F0s, cpu_env);
1066
}
1067

  
1068
static inline void gen_vfp_tosi(int dp)
1069
{
1070
    if (dp)
1071
        gen_helper_vfp_tosid(cpu_F0s, cpu_F0d, cpu_env);
1072
    else
1073
        gen_helper_vfp_tosis(cpu_F0s, cpu_F0s, cpu_env);
1074
}
1075

  
1076
static inline void gen_vfp_tosiz(int dp)
1008 1077
{
1009 1078
    if (dp)
1010
        gen_op_vfp_fconstd(val);
1079
        gen_helper_vfp_tosizd(cpu_F0s, cpu_F0d, cpu_env);
1011 1080
    else
1012
        gen_op_vfp_fconsts(val);
1081
        gen_helper_vfp_tosizs(cpu_F0s, cpu_F0s, cpu_env);
1082
}
1083

  
1084
#define VFP_GEN_FIX(name) \
1085
static inline void gen_vfp_##name(int dp, int shift) \
1086
{ \
1087
    if (dp) \
1088
        gen_helper_vfp_##name##d(cpu_F0d, cpu_F0d, tcg_const_i32(shift), cpu_env);\
1089
    else \
1090
        gen_helper_vfp_##name##s(cpu_F0s, cpu_F0s, tcg_const_i32(shift), cpu_env);\
1013 1091
}
1092
VFP_GEN_FIX(tosh)
1093
VFP_GEN_FIX(tosl)
1094
VFP_GEN_FIX(touh)
1095
VFP_GEN_FIX(toul)
1096
VFP_GEN_FIX(shto)
1097
VFP_GEN_FIX(slto)
1098
VFP_GEN_FIX(uhto)
1099
VFP_GEN_FIX(ulto)
1100
#undef VFP_GEN_FIX
1014 1101

  
1015 1102
static inline void gen_vfp_ld(DisasContext *s, int dp)
1016 1103
{
1017 1104
    if (dp)
1018
        gen_ldst(vfp_ldd, s);
1105
        tcg_gen_qemu_ld64(cpu_F0d, cpu_T[1], IS_USER(s));
1019 1106
    else
1020
        gen_ldst(vfp_lds, s);
1107
        tcg_gen_qemu_ld32u(cpu_F0s, cpu_T[1], IS_USER(s));
1021 1108
}
1022 1109

  
1023 1110
static inline void gen_vfp_st(DisasContext *s, int dp)
1024 1111
{
1025 1112
    if (dp)
1026
        gen_ldst(vfp_std, s);
1113
        tcg_gen_qemu_st64(cpu_F0d, cpu_T[1], IS_USER(s));
1027 1114
    else
1028
        gen_ldst(vfp_sts, s);
1115
        tcg_gen_qemu_st32(cpu_F0s, cpu_T[1], IS_USER(s));
1029 1116
}
1030 1117

  
1031 1118
static inline long
......
1055 1142
#define NEON_GET_REG(T, reg, n) gen_op_neon_getreg_##T(neon_reg_offset(reg, n))
1056 1143
#define NEON_SET_REG(T, reg, n) gen_op_neon_setreg_##T(neon_reg_offset(reg, n))
1057 1144

  
1145
#define tcg_gen_ld_f32 tcg_gen_ld_i32
1146
#define tcg_gen_ld_f64 tcg_gen_ld_i64
1147
#define tcg_gen_st_f32 tcg_gen_st_i32
1148
#define tcg_gen_st_f64 tcg_gen_st_i64
1149

  
1058 1150
static inline void gen_mov_F0_vreg(int dp, int reg)
1059 1151
{
1060 1152
    if (dp)
1061
        gen_op_vfp_getreg_F0d(vfp_reg_offset(dp, reg));
1153
        tcg_gen_ld_f64(cpu_F0d, cpu_env, vfp_reg_offset(dp, reg));
1062 1154
    else
1063
        gen_op_vfp_getreg_F0s(vfp_reg_offset(dp, reg));
1155
        tcg_gen_ld_f32(cpu_F0s, cpu_env, vfp_reg_offset(dp, reg));
1064 1156
}
1065 1157

  
1066 1158
static inline void gen_mov_F1_vreg(int dp, int reg)
1067 1159
{
1068 1160
    if (dp)
1069
        gen_op_vfp_getreg_F1d(vfp_reg_offset(dp, reg));
1161
        tcg_gen_ld_f64(cpu_F1d, cpu_env, vfp_reg_offset(dp, reg));
1070 1162
    else
1071
        gen_op_vfp_getreg_F1s(vfp_reg_offset(dp, reg));
1163
        tcg_gen_ld_f32(cpu_F1s, cpu_env, vfp_reg_offset(dp, reg));
1072 1164
}
1073 1165

  
1074 1166
static inline void gen_mov_vreg_F0(int dp, int reg)
1075 1167
{
1076 1168
    if (dp)
1077
        gen_op_vfp_setreg_F0d(vfp_reg_offset(dp, reg));
1169
        tcg_gen_st_f64(cpu_F0d, cpu_env, vfp_reg_offset(dp, reg));
1078 1170
    else
1079
        gen_op_vfp_setreg_F0s(vfp_reg_offset(dp, reg));
1171
        tcg_gen_st_f32(cpu_F0s, cpu_env, vfp_reg_offset(dp, reg));
1080 1172
}
1081 1173

  
1082 1174
#define ARM_CP_RW_BIT	(1 << 20)
......
2262 2354
#define VFP_SREG_M(insn) VFP_SREG(insn,  0,  5)
2263 2355
#define VFP_DREG_M(reg, insn) VFP_DREG(reg, insn,  0,  5)
2264 2356

  
2357
/* Move between integer and VFP cores.  */
2358
static TCGv gen_vfp_mrs(void)
2359
{
2360
    TCGv tmp = new_tmp();
2361
    tcg_gen_mov_i32(tmp, cpu_F0s);
2362
    return tmp;
2363
}
2364

  
2365
static void gen_vfp_msr(TCGv tmp)
2366
{
2367
    tcg_gen_mov_i32(cpu_F0s, tmp);
2368
    dead_tmp(tmp);
2369
}
2370

  
2265 2371
static inline int
2266 2372
vfp_enabled(CPUState * env)
2267 2373
{
......
2274 2380
{
2275 2381
    uint32_t rd, rn, rm, op, i, n, offset, delta_d, delta_m, bank_mask;
2276 2382
    int dp, veclen;
2383
    TCGv tmp;
2277 2384

  
2278 2385
    if (!arm_feature(env, ARM_FEATURE_VFP))
2279 2386
        return 1;
......
2396 2503

  
2397 2504
                        switch (rn) {
2398 2505
                        case ARM_VFP_FPSID:
2399
                            /* VFP2 allows access for FSID from userspace.
2506
                            /* VFP2 allows access to FSID from userspace.
2400 2507
                               VFP3 restricts all id registers to privileged
2401 2508
                               accesses.  */
2402 2509
                            if (IS_USER(s)
2403 2510
                                && arm_feature(env, ARM_FEATURE_VFP3))
2404 2511
                                return 1;
2405
                            gen_op_vfp_movl_T0_xreg(rn);
2512
                            tmp = load_cpu_field(vfp.xregs[rn]);
2406 2513
                            break;
2407 2514
                        case ARM_VFP_FPEXC:
2408 2515
                            if (IS_USER(s))
2409 2516
                                return 1;
2410
                            gen_op_vfp_movl_T0_xreg(rn);
2517
                            tmp = load_cpu_field(vfp.xregs[rn]);
2411 2518
                            break;
2412 2519
                        case ARM_VFP_FPINST:
2413 2520
                        case ARM_VFP_FPINST2:
......
2415 2522
                            if (IS_USER(s)
2416 2523
                                || arm_feature(env, ARM_FEATURE_VFP3))
2417 2524
                                return 1;
2418
                            gen_op_vfp_movl_T0_xreg(rn);
2525
                            tmp = load_cpu_field(vfp.xregs[rn]);
2419 2526
                            break;
2420 2527
                        case ARM_VFP_FPSCR:
2421
			    if (rd == 15)
2422
				gen_op_vfp_movl_T0_fpscr_flags();
2423
			    else
2424
				gen_op_vfp_movl_T0_fpscr();
2528
			    if (rd == 15) {
2529
                                tmp = load_cpu_field(vfp.xregs[ARM_VFP_FPSCR]);
2530
                                tcg_gen_andi_i32(tmp, tmp, 0xf0000000);
2531
                            } else {
2532
                                tmp = new_tmp();
2533
                                gen_helper_vfp_get_fpscr(tmp, cpu_env);
2534
                            }
2425 2535
                            break;
2426 2536
                        case ARM_VFP_MVFR0:
2427 2537
                        case ARM_VFP_MVFR1:
2428 2538
                            if (IS_USER(s)
2429 2539
                                || !arm_feature(env, ARM_FEATURE_VFP3))
2430 2540
                                return 1;
2431
                            gen_op_vfp_movl_T0_xreg(rn);
2541
                            tmp = load_cpu_field(vfp.xregs[rn]);
2432 2542
                            break;
2433 2543
                        default:
2434 2544
                            return 1;
2435 2545
                        }
2436 2546
                    } else {
2437 2547
                        gen_mov_F0_vreg(0, rn);
2438
                        gen_op_vfp_mrs();
2548
                        tmp = gen_vfp_mrs();
2439 2549
                    }
2440 2550
                    if (rd == 15) {
2441 2551
                        /* Set the 4 flag bits in the CPSR.  */
2442
                        gen_set_nzcv(cpu_T[0]);
2443
                    } else
2444
                        gen_movl_reg_T0(s, rd);
2552
                        gen_set_nzcv(tmp);
2553
                        dead_tmp(tmp);
2554
                    } else {
2555
                        store_reg(s, rd, tmp);
2556
                    }
2445 2557
                } else {
2446 2558
                    /* arm->vfp */
2447
                    gen_movl_T0_reg(s, rd);
2559
                    tmp = load_reg(s, rd);
2448 2560
                    if (insn & (1 << 21)) {
2449 2561
                        rn >>= 1;
2450 2562
                        /* system register */
......
2455 2567
                            /* Writes are ignored.  */
2456 2568
                            break;
2457 2569
                        case ARM_VFP_FPSCR:
2458
                            gen_op_vfp_movl_fpscr_T0();
2570
                            gen_helper_vfp_set_fpscr(cpu_env, tmp);
2571
                            dead_tmp(tmp);
2459 2572
                            gen_lookup_tb(s);
2460 2573
                            break;
2461 2574
                        case ARM_VFP_FPEXC:
2462 2575
                            if (IS_USER(s))
2463 2576
                                return 1;
2464
                            gen_op_vfp_movl_xreg_T0(rn);
2577
                            store_cpu_field(tmp, vfp.xregs[rn]);
2465 2578
                            gen_lookup_tb(s);
2466 2579
                            break;
2467 2580
                        case ARM_VFP_FPINST:
2468 2581
                        case ARM_VFP_FPINST2:
2469
                            gen_op_vfp_movl_xreg_T0(rn);
2582
                            store_cpu_field(tmp, vfp.xregs[rn]);
2470 2583
                            break;
2471 2584
                        default:
2472 2585
                            return 1;
2473 2586
                        }
2474 2587
                    } else {
2475
                        gen_op_vfp_msr();
2588
                        gen_vfp_msr(tmp);
2476 2589
                        gen_mov_vreg_F0(0, rn);
2477 2590
                    }
2478 2591
                }
......
2640 2753
                        else
2641 2754
                            i |= 0x4000;
2642 2755
                        n |= i << 16;
2756
                        tcg_gen_movi_i64(cpu_F0d, ((uint64_t)n) << 32);
2643 2757
                    } else {
2644 2758
                        if (i & 0x40)
2645 2759
                            i |= 0x780;
2646 2760
                        else
2647 2761
                            i |= 0x800;
2648 2762
                        n |= i << 19;
2763
                        tcg_gen_movi_i32(cpu_F0d, ((uint64_t)n) << 32);
2649 2764
                    }
2650
                    gen_vfp_fconst(dp, n);
2651 2765
                    break;
2652 2766
                case 15: /* extension space */
2653 2767
                    switch (rn) {
......
2678 2792
                        break;
2679 2793
                    case 15: /* single<->double conversion */
2680 2794
                        if (dp)
2681
                            gen_op_vfp_fcvtsd();
2795
                            gen_helper_vfp_fcvtsd(cpu_F0s, cpu_F0d, cpu_env);
2682 2796
                        else
2683
                            gen_op_vfp_fcvtds();
2797
                            gen_helper_vfp_fcvtds(cpu_F0d, cpu_F0s, cpu_env);
2684 2798
                        break;
2685 2799
                    case 16: /* fuito */
2686 2800
                        gen_vfp_uito(dp);
......
2814 2928
            if (insn & ARM_CP_RW_BIT) {
2815 2929
                /* vfp->arm */
2816 2930
                if (dp) {
2817
                    gen_mov_F0_vreg(1, rm);
2818
                    gen_op_vfp_mrrd();
2819
                    gen_movl_reg_T0(s, rd);
2820
                    gen_movl_reg_T1(s, rn);
2931
                    gen_mov_F0_vreg(0, rm * 2);
2932
                    tmp = gen_vfp_mrs();
2933
                    store_reg(s, rd, tmp);
2934
                    gen_mov_F0_vreg(0, rm * 2 + 1);
2935
                    tmp = gen_vfp_mrs();
2936
                    store_reg(s, rn, tmp);
2821 2937
                } else {
2822 2938
                    gen_mov_F0_vreg(0, rm);
2823
                    gen_op_vfp_mrs();
2824
                    gen_movl_reg_T0(s, rn);
2939
                    tmp = gen_vfp_mrs();
2940
                    store_reg(s, rn, tmp);
2825 2941
                    gen_mov_F0_vreg(0, rm + 1);
2826
                    gen_op_vfp_mrs();
2827
                    gen_movl_reg_T0(s, rd);
2942
                    tmp = gen_vfp_mrs();
2943
                    store_reg(s, rd, tmp);
2828 2944
                }
2829 2945
            } else {
2830 2946
                /* arm->vfp */
2831 2947
                if (dp) {
2832
                    gen_movl_T0_reg(s, rd);
2833
                    gen_movl_T1_reg(s, rn);
2834
                    gen_op_vfp_mdrr();
2835
                    gen_mov_vreg_F0(1, rm);
2948
                    tmp = load_reg(s, rd);
2949
                    gen_vfp_msr(tmp);
2950
                    gen_mov_vreg_F0(0, rm * 2);
2951
                    tmp = load_reg(s, rn);
2952
                    gen_vfp_msr(tmp);
2953
                    gen_mov_vreg_F0(0, rm * 2 + 1);
2836 2954
                } else {
2837
                    gen_movl_T0_reg(s, rn);
2838
                    gen_op_vfp_msr();
2955
                    tmp = load_reg(s, rn);
2956
                    gen_vfp_msr(tmp);
2839 2957
                    gen_mov_vreg_F0(0, rm);
2840
                    gen_movl_T0_reg(s, rd);
2841
                    gen_op_vfp_msr();
2958
                    tmp = load_reg(s, rd);
2959
                    gen_vfp_msr(tmp);
2842 2960
                    gen_mov_vreg_F0(0, rm + 1);
2843 2961
                }
2844 2962
            }
......
3993 4111
            break;
3994 4112
        case 31:
3995 4113
            if (size == 0)
3996
                gen_op_neon_recps_f32();
4114
                gen_helper_recps_f32(cpu_T[0], cpu_T[0], cpu_T[1], cpu_env);
3997 4115
            else
3998
                gen_op_neon_rsqrts_f32();
4116
                gen_helper_rsqrts_f32(cpu_T[0], cpu_T[0], cpu_T[1], cpu_env);
3999 4117
            break;
4000 4118
        default:
4001 4119
            abort();
......
4242 4360
            } else if (op == 15 || op == 16) {
4243 4361
                /* VCVT fixed-point.  */
4244 4362
                for (pass = 0; pass < (q ? 4 : 2); pass++) {
4245
                    gen_op_vfp_getreg_F0s(neon_reg_offset(rm, pass));
4363
                    tcg_gen_ld_f32(cpu_F0s, cpu_env, neon_reg_offset(rm, pass));
4246 4364
                    if (op & 1) {
4247 4365
                        if (u)
4248
                            gen_op_vfp_ultos(shift);
4366
                            gen_vfp_ulto(0, shift);
4249 4367
                        else
4250
                            gen_op_vfp_sltos(shift);
4368
                            gen_vfp_slto(0, shift);
4251 4369
                    } else {
4252 4370
                        if (u)
4253
                            gen_op_vfp_touls(shift);
4371
                            gen_vfp_toul(0, shift);
4254 4372
                        else
4255
                            gen_op_vfp_tosls(shift);
4373
                            gen_vfp_tosl(0, shift);
4256 4374
                    }
4257
                    gen_op_vfp_setreg_F0s(neon_reg_offset(rd, pass));
4375
                    tcg_gen_st_f32(cpu_F0s, cpu_env, neon_reg_offset(rd, pass));
4258 4376
                }
4259 4377
            } else {
4260 4378
                return 1;
......
4898 5016
                elementwise:
4899 5017
                    for (pass = 0; pass < (q ? 4 : 2); pass++) {
4900 5018
                        if (op == 30 || op == 31 || op >= 58) {
4901
                            gen_op_vfp_getreg_F0s(neon_reg_offset(rm, pass));
5019
                            tcg_gen_ld_f32(cpu_F0s, cpu_env,
5020
                                           neon_reg_offset(rm, pass));
4902 5021
                        } else {
4903 5022
                            NEON_GET_REG(T0, rm, pass);
4904 5023
                        }
......
5041 5160
                            gen_op_neon_ceq_f32();
5042 5161
                            break;
5043 5162
                        case 30: /* Float VABS */
5044
                            gen_op_vfp_abss();
5163
                            gen_vfp_abs(0);
5045 5164
                            break;
5046 5165
                        case 31: /* Float VNEG */
5047
                            gen_op_vfp_negs();
5166
                            gen_vfp_neg(0);
5048 5167
                            break;
5049 5168
                        case 32: /* VSWP */
5050 5169
                            NEON_GET_REG(T1, rd, pass);
......
5061 5180
                            NEON_SET_REG(T1, rm, pass);
5062 5181
                            break;
5063 5182
                        case 56: /* Integer VRECPE */
5064
                            gen_op_neon_recpe_u32();
5183
                            gen_helper_recpe_u32(cpu_T[0], cpu_T[0], cpu_env);
5065 5184
                            break;
5066 5185
                        case 57: /* Integer VRSQRTE */
5067
                            gen_op_neon_rsqrte_u32();
5186
                            gen_helper_rsqrte_u32(cpu_T[0], cpu_T[0], cpu_env);
5068 5187
                            break;
5069 5188
                        case 58: /* Float VRECPE */
5070
                            gen_op_neon_recpe_f32();
5189
                            gen_helper_recpe_f32(cpu_F0s, cpu_F0s, cpu_env);
5071 5190
                            break;
5072 5191
                        case 59: /* Float VRSQRTE */
5073
                            gen_op_neon_rsqrte_f32();
5192
                            gen_helper_rsqrte_f32(cpu_F0s, cpu_F0s, cpu_env);
5074 5193
                            break;
5075 5194
                        case 60: /* VCVT.F32.S32 */
5076
                            gen_op_vfp_tosizs();
5195
                            gen_vfp_tosiz(0);
5077 5196
                            break;
5078 5197
                        case 61: /* VCVT.F32.U32 */
5079
                            gen_op_vfp_touizs();
5198
                            gen_vfp_touiz(0);
5080 5199
                            break;
5081 5200
                        case 62: /* VCVT.S32.F32 */
5082
                            gen_op_vfp_sitos();
5201
                            gen_vfp_sito(0);
5083 5202
                            break;
5084 5203
                        case 63: /* VCVT.U32.F32 */
5085
                            gen_op_vfp_uitos();
5204
                            gen_vfp_uito(0);
5086 5205
                            break;
5087 5206
                        default:
5088 5207
                            /* Reserved: 21, 29, 39-56 */
5089 5208
                            return 1;
5090 5209
                        }
5091 5210
                        if (op == 30 || op == 31 || op >= 58) {
5092
                            gen_op_vfp_setreg_F0s(neon_reg_offset(rm, pass));
5211
                            tcg_gen_st_f32(cpu_F0s, cpu_env,
5212
                                           neon_reg_offset(rd, pass));
5093 5213
                        } else {
5094 5214
                            NEON_SET_REG(T0, rd, pass);
5095 5215
                        }
......
8062 8182
        dc->user = (env->uncached_cpsr & 0x1f) == ARM_CPU_MODE_USR;
8063 8183
    }
8064 8184
#endif
8185
    cpu_F0s = tcg_temp_new(TCG_TYPE_I32);
8186
    cpu_F1s = tcg_temp_new(TCG_TYPE_I32);
8187
    cpu_F0d = tcg_temp_new(TCG_TYPE_I64);
8188
    cpu_F1d = tcg_temp_new(TCG_TYPE_I64);
8065 8189
    next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
8066 8190
    lj = -1;
8067 8191
    /* Reset the conditional execution bits immediately. This avoids

Also available in: Unified diff