Revision a7812ae4 target-ppc/translate.c

b/target-ppc/translate.c
26 26
#include "cpu.h"
27 27
#include "exec-all.h"
28 28
#include "disas.h"
29
#include "helper.h"
30 29
#include "tcg-op.h"
31 30
#include "qemu-common.h"
32 31

  
32
#include "helper.h"
33
#define GEN_HELPER 1
34
#include "helper.h"
35

  
33 36
#define CPU_SINGLE_STEP 0x1
34 37
#define CPU_BRANCH_STEP 0x2
35 38
#define GDBSTUB_SINGLE_STEP 0x4
......
44 47
/* Code translation helpers                                                  */
45 48

  
46 49
/* global register indexes */
47
static TCGv cpu_env;
50
static TCGv_ptr cpu_env;
48 51
static char cpu_reg_names[10*3 + 22*4 /* GPR */
49 52
#if !defined(TARGET_PPC64)
50 53
    + 10*4 + 22*5 /* SPE GPRh */
......
56 59
#if !defined(TARGET_PPC64)
57 60
static TCGv cpu_gprh[32];
58 61
#endif
59
static TCGv cpu_fpr[32];
60
static TCGv cpu_avrh[32], cpu_avrl[32];
61
static TCGv cpu_crf[8];
62
static TCGv_i64 cpu_fpr[32];
63
static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
64
static TCGv_i32 cpu_crf[8];
62 65
static TCGv cpu_nip;
63 66
static TCGv cpu_ctr;
64 67
static TCGv cpu_lr;
65 68
static TCGv cpu_xer;
66
static TCGv cpu_fpscr;
69
static TCGv_i32 cpu_fpscr;
67 70

  
68 71
/* dyngen register indexes */
69 72
static TCGv cpu_T[3];
70 73
#if defined(TARGET_PPC64)
71 74
#define cpu_T64 cpu_T
72 75
#else
73
static TCGv cpu_T64[3];
76
static TCGv_i64 cpu_T64[3];
74 77
#endif
75
static TCGv cpu_FT[3];
76
static TCGv cpu_AVRh[3], cpu_AVRl[3];
78
static TCGv_i64 cpu_FT[3];
79
static TCGv_i64 cpu_AVRh[3], cpu_AVRl[3];
77 80

  
78 81
#include "gen-icount.h"
79 82

  
......
86 89
    if (done_init)
87 90
        return;
88 91

  
89
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
92
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
90 93
#if TARGET_LONG_BITS > HOST_LONG_BITS
91
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
92
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
93
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
94
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
95
    cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
96
                                  TCG_AREG0, offsetof(CPUState, t2), "T2");
94
    cpu_T[0] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t0), "T0");
95
    cpu_T[1] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t1), "T1");
96
    cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
97 97
#else
98
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
99
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
98
    cpu_T[0] = tcg_global_reg_new(TCG_AREG1, "T0");
99
    cpu_T[1] = tcg_global_reg_new(TCG_AREG2, "T1");
100 100
#ifdef HOST_I386
101 101
    /* XXX: This is a temporary workaround for i386.
102 102
     *      On i386 qemu_st32 runs out of registers.
103 103
     *      The proper fix is to remove cpu_T.
104 104
     */
105
    cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
106
                                  TCG_AREG0, offsetof(CPUState, t2), "T2");
105
    cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
107 106
#else
108
    cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
107
    cpu_T[2] = tcg_global_reg_new(TCG_AREG3, "T2");
109 108
#endif
110 109
#endif
111 110
#if !defined(TARGET_PPC64)
112
    cpu_T64[0] = tcg_global_mem_new(TCG_TYPE_I64,
113
                                    TCG_AREG0, offsetof(CPUState, t0_64),
114
                                    "T0_64");
115
    cpu_T64[1] = tcg_global_mem_new(TCG_TYPE_I64,
116
                                    TCG_AREG0, offsetof(CPUState, t1_64),
117
                                    "T1_64");
118
    cpu_T64[2] = tcg_global_mem_new(TCG_TYPE_I64,
119
                                    TCG_AREG0, offsetof(CPUState, t2_64),
120
                                    "T2_64");
121
#endif
122

  
123
    cpu_FT[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
124
                                   offsetof(CPUState, ft0), "FT0");
125
    cpu_FT[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
126
                                   offsetof(CPUState, ft1), "FT1");
127
    cpu_FT[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
128
                                   offsetof(CPUState, ft2), "FT2");
129

  
130
    cpu_AVRh[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
111
    cpu_T64[0] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t0_64),
112
                                        "T0_64");
113
    cpu_T64[1] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t1_64),
114
                                        "T1_64");
115
    cpu_T64[2] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t2_64),
116
                                        "T2_64");
117
#endif
118

  
119
    cpu_FT[0] = tcg_global_mem_new_i64(TCG_AREG0,
120
                                       offsetof(CPUState, ft0), "FT0");
121
    cpu_FT[1] = tcg_global_mem_new_i64(TCG_AREG0,
122
                                       offsetof(CPUState, ft1), "FT1");
123
    cpu_FT[2] = tcg_global_mem_new_i64(TCG_AREG0,
124
                                       offsetof(CPUState, ft2), "FT2");
125

  
126
    cpu_AVRh[0] = tcg_global_mem_new_i64(TCG_AREG0,
131 127
                                     offsetof(CPUState, avr0.u64[0]), "AVR0H");
132
    cpu_AVRl[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
128
    cpu_AVRl[0] = tcg_global_mem_new_i64(TCG_AREG0,
133 129
                                     offsetof(CPUState, avr0.u64[1]), "AVR0L");
134
    cpu_AVRh[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
130
    cpu_AVRh[1] = tcg_global_mem_new_i64(TCG_AREG0,
135 131
                                     offsetof(CPUState, avr1.u64[0]), "AVR1H");
136
    cpu_AVRl[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
132
    cpu_AVRl[1] = tcg_global_mem_new_i64(TCG_AREG0,
137 133
                                     offsetof(CPUState, avr1.u64[1]), "AVR1L");
138
    cpu_AVRh[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
134
    cpu_AVRh[2] = tcg_global_mem_new_i64(TCG_AREG0,
139 135
                                     offsetof(CPUState, avr2.u64[0]), "AVR2H");
140
    cpu_AVRl[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
136
    cpu_AVRl[2] = tcg_global_mem_new_i64(TCG_AREG0,
141 137
                                     offsetof(CPUState, avr2.u64[1]), "AVR2L");
142 138

  
143 139
    p = cpu_reg_names;
144 140

  
145 141
    for (i = 0; i < 8; i++) {
146 142
        sprintf(p, "crf%d", i);
147
        cpu_crf[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
148
                                        offsetof(CPUState, crf[i]), p);
143
        cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
144
                                            offsetof(CPUState, crf[i]), p);
149 145
        p += 5;
150 146
    }
151 147

  
152 148
    for (i = 0; i < 32; i++) {
153 149
        sprintf(p, "r%d", i);
154
        cpu_gpr[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
150
        cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
155 151
                                        offsetof(CPUState, gpr[i]), p);
156 152
        p += (i < 10) ? 3 : 4;
157 153
#if !defined(TARGET_PPC64)
158 154
        sprintf(p, "r%dH", i);
159
        cpu_gprh[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
160
                                         offsetof(CPUState, gprh[i]), p);
155
        cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
156
                                             offsetof(CPUState, gprh[i]), p);
161 157
        p += (i < 10) ? 4 : 5;
162 158
#endif
163 159

  
164 160
        sprintf(p, "fp%d", i);
165
        cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
166
                                        offsetof(CPUState, fpr[i]), p);
161
        cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
162
                                            offsetof(CPUState, fpr[i]), p);
167 163
        p += (i < 10) ? 4 : 5;
168 164

  
169 165
        sprintf(p, "avr%dH", i);
170
        cpu_avrh[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
166
        cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
171 167
                                         offsetof(CPUState, avr[i].u64[0]), p);
172 168
        p += (i < 10) ? 6 : 7;
173 169

  
174 170
        sprintf(p, "avr%dL", i);
175
        cpu_avrl[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
171
        cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
176 172
                                         offsetof(CPUState, avr[i].u64[1]), p);
177 173
        p += (i < 10) ? 6 : 7;
178 174
    }
179 175

  
180
    cpu_nip = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
176
    cpu_nip = tcg_global_mem_new(TCG_AREG0,
181 177
                                 offsetof(CPUState, nip), "nip");
182 178

  
183
    cpu_ctr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
179
    cpu_ctr = tcg_global_mem_new(TCG_AREG0,
184 180
                                 offsetof(CPUState, ctr), "ctr");
185 181

  
186
    cpu_lr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
182
    cpu_lr = tcg_global_mem_new(TCG_AREG0,
187 183
                                offsetof(CPUState, lr), "lr");
188 184

  
189
    cpu_xer = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
185
    cpu_xer = tcg_global_mem_new(TCG_AREG0,
190 186
                                 offsetof(CPUState, xer), "xer");
191 187

  
192
    cpu_fpscr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
193
                                   offsetof(CPUState, fpscr), "fpscr");
188
    cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
189
                                       offsetof(CPUState, fpscr), "fpscr");
194 190

  
195 191
    /* register helpers */
196
#undef DEF_HELPER
197
#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
192
#define GEN_HELPER 2
198 193
#include "helper.h"
199 194

  
200 195
    done_init = 1;
......
258 253
        *gen_fprf_ptr++ = gen_opc_ptr;
259 254
#endif
260 255
        gen_op_compute_fprf(1);
261
        if (unlikely(set_rc))
262
            tcg_gen_andi_i32(cpu_crf[1], cpu_T[0], 0xf);
256
        if (unlikely(set_rc)) {
257
            tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_T[0]);
258
            tcg_gen_andi_i32(cpu_crf[1], cpu_crf[1], 0xf);
259
        }
263 260
        gen_op_float_check_status();
264 261
    } else if (unlikely(set_rc)) {
265 262
        /* We always need to compute fpcc */
266 263
        gen_op_compute_fprf(0);
267
        tcg_gen_andi_i32(cpu_crf[1], cpu_T[0], 0xf);
264
        tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_T[0]);
265
        tcg_gen_andi_i32(cpu_crf[1], cpu_crf[1], 0xf);
268 266
        if (set_fprf)
269 267
            gen_op_float_check_status();
270 268
    }
......
751 749
static always_inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
752 750
{
753 751
    TCGv t0, t1;
754
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
755
    t1 = tcg_temp_local_new(TCG_TYPE_TL);
752
    t0 = tcg_temp_local_new();
753
    t1 = tcg_temp_local_new();
756 754
    if (s) {
757 755
        tcg_gen_ext32s_tl(t0, arg0);
758 756
        tcg_gen_ext32s_tl(t1, arg1);
......
841 839
    int l1, l2;
842 840
    uint32_t bi = rC(ctx->opcode);
843 841
    uint32_t mask;
844
    TCGv t0;
842
    TCGv_i32 t0;
845 843

  
846 844
    l1 = gen_new_label();
847 845
    l2 = gen_new_label();
848 846

  
849 847
    mask = 1 << (3 - (bi & 0x03));
850
    t0 = tcg_temp_new(TCG_TYPE_I32);
848
    t0 = tcg_temp_new_i32();
851 849
    tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
852 850
    tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
853 851
    if (rA(ctx->opcode) == 0)
......
858 856
    gen_set_label(l1);
859 857
    tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
860 858
    gen_set_label(l2);
861
    tcg_temp_free(t0);
859
    tcg_temp_free_i32(t0);
862 860
}
863 861

  
864 862
/***                           Integer arithmetic                          ***/
......
871 869
    l1 = gen_new_label();
872 870
    /* Start with XER OV disabled, the most likely case */
873 871
    tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
874
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
872
    t0 = tcg_temp_local_new();
875 873
    tcg_gen_xor_tl(t0, arg0, arg1);
876 874
#if defined(TARGET_PPC64)
877 875
    if (!ctx->sf_mode)
......
902 900
#if defined(TARGET_PPC64)
903 901
    if (!(ctx->sf_mode)) {
904 902
        TCGv t0, t1;
905
        t0 = tcg_temp_new(TCG_TYPE_TL);
906
        t1 = tcg_temp_new(TCG_TYPE_TL);
903
        t0 = tcg_temp_new();
904
        t1 = tcg_temp_new();
907 905

  
908 906
        tcg_gen_ext32u_tl(t0, arg1);
909 907
        tcg_gen_ext32u_tl(t1, arg2);
......
936 934
    TCGv t0, t1;
937 935

  
938 936
    if ((!compute_ca && !compute_ov) ||
939
        (GET_TCGV(ret) != GET_TCGV(arg1) && GET_TCGV(ret) != GET_TCGV(arg2)))  {
937
        (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2)))  {
940 938
        t0 = ret;
941 939
    } else {
942
        t0 = tcg_temp_local_new(TCG_TYPE_TL);
940
        t0 = tcg_temp_local_new();
943 941
    }
944 942

  
945 943
    if (add_ca) {
946
        t1 = tcg_temp_local_new(TCG_TYPE_TL);
944
        t1 = tcg_temp_local_new();
947 945
        tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
948 946
        tcg_gen_shri_tl(t1, t1, XER_CA);
949 947
    }
......
976 974
    if (unlikely(Rc(ctx->opcode) != 0))
977 975
        gen_set_Rc0(ctx, t0);
978 976

  
979
    if (GET_TCGV(t0) != GET_TCGV(ret)) {
977
    if (!TCGV_EQUAL(t0, ret)) {
980 978
        tcg_gen_mov_tl(ret, t0);
981 979
        tcg_temp_free(t0);
982 980
    }
......
1038 1036
    tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1039 1037

  
1040 1038
    if (likely(simm != 0)) {
1041
        TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
1039
        TCGv t0 = tcg_temp_local_new();
1042 1040
        tcg_gen_addi_tl(t0, arg1, simm);
1043 1041
        gen_op_arith_compute_ca(ctx, t0, arg1, 0);
1044 1042
        tcg_gen_mov_tl(ret, t0);
......
1076 1074
{
1077 1075
    int l1 = gen_new_label();
1078 1076
    int l2 = gen_new_label();
1079
    TCGv t0 = tcg_temp_local_new(TCG_TYPE_I32);
1080
    TCGv t1 = tcg_temp_local_new(TCG_TYPE_I32);
1077
    TCGv_i32 t0 = tcg_temp_local_new_i32();
1078
    TCGv_i32 t1 = tcg_temp_local_new_i32();
1081 1079

  
1082 1080
    tcg_gen_trunc_tl_i32(t0, arg1);
1083 1081
    tcg_gen_trunc_tl_i32(t1, arg2);
......
1106 1104
    }
1107 1105
    gen_set_label(l2);
1108 1106
    tcg_gen_extu_i32_tl(ret, t0);
1109
    tcg_temp_free(t0);
1110
    tcg_temp_free(t1);
1107
    tcg_temp_free_i32(t0);
1108
    tcg_temp_free_i32(t1);
1111 1109
    if (unlikely(Rc(ctx->opcode) != 0))
1112 1110
        gen_set_Rc0(ctx, ret);
1113 1111
}
......
1177 1175
/* mulhw  mulhw. */
1178 1176
GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER)
1179 1177
{
1180
    TCGv t0, t1;
1178
    TCGv_i64 t0, t1;
1181 1179

  
1182
    t0 = tcg_temp_new(TCG_TYPE_I64);
1183
    t1 = tcg_temp_new(TCG_TYPE_I64);
1180
    t0 = tcg_temp_new_i64();
1181
    t1 = tcg_temp_new_i64();
1184 1182
#if defined(TARGET_PPC64)
1185 1183
    tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1186 1184
    tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
......
1193 1191
    tcg_gen_shri_i64(t0, t0, 32);
1194 1192
    tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1195 1193
#endif
1196
    tcg_temp_free(t0);
1197
    tcg_temp_free(t1);
1194
    tcg_temp_free_i64(t0);
1195
    tcg_temp_free_i64(t1);
1198 1196
    if (unlikely(Rc(ctx->opcode) != 0))
1199 1197
        gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1200 1198
}
1201 1199
/* mulhwu  mulhwu.  */
1202 1200
GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER)
1203 1201
{
1204
    TCGv t0, t1;
1202
    TCGv_i64 t0, t1;
1205 1203

  
1206
    t0 = tcg_temp_new(TCG_TYPE_I64);
1207
    t1 = tcg_temp_new(TCG_TYPE_I64);
1204
    t0 = tcg_temp_new_i64();
1205
    t1 = tcg_temp_new_i64();
1208 1206
#if defined(TARGET_PPC64)
1209 1207
    tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1210 1208
    tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
......
1217 1215
    tcg_gen_shri_i64(t0, t0, 32);
1218 1216
    tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1219 1217
#endif
1220
    tcg_temp_free(t0);
1221
    tcg_temp_free(t1);
1218
    tcg_temp_free_i64(t0);
1219
    tcg_temp_free_i64(t1);
1222 1220
    if (unlikely(Rc(ctx->opcode) != 0))
1223 1221
        gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1224 1222
}
......
1235 1233
GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER)
1236 1234
{
1237 1235
    int l1;
1238
    TCGv t0, t1;
1236
    TCGv_i64 t0, t1;
1239 1237

  
1240
    t0 = tcg_temp_new(TCG_TYPE_I64);
1241
    t1 = tcg_temp_new(TCG_TYPE_I64);
1238
    t0 = tcg_temp_new_i64();
1239
    t1 = tcg_temp_new_i64();
1242 1240
    l1 = gen_new_label();
1243 1241
    /* Start with XER OV disabled, the most likely case */
1244 1242
    tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
......
1260 1258
#endif
1261 1259
    tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1262 1260
    gen_set_label(l1);
1263
    tcg_temp_free(t0);
1264
    tcg_temp_free(t1);
1261
    tcg_temp_free_i64(t0);
1262
    tcg_temp_free_i64(t1);
1265 1263
    if (unlikely(Rc(ctx->opcode) != 0))
1266 1264
        gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1267 1265
}
......
1275 1273
#define GEN_INT_ARITH_MUL_HELPER(name, opc3)                                  \
1276 1274
GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)                      \
1277 1275
{                                                                             \
1278
    tcg_gen_helper_1_2(helper_##name, cpu_gpr[rD(ctx->opcode)],               \
1276
    gen_helper_##name (cpu_gpr[rD(ctx->opcode)],                              \
1279 1277
                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);   \
1280 1278
    if (unlikely(Rc(ctx->opcode) != 0))                                       \
1281 1279
        gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);                           \
......
1301 1299
{
1302 1300
    int l1 = gen_new_label();
1303 1301
    int l2 = gen_new_label();
1304
    TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
1302
    TCGv t0 = tcg_temp_local_new();
1305 1303
#if defined(TARGET_PPC64)
1306 1304
    if (ctx->sf_mode) {
1307 1305
        tcg_gen_mov_tl(t0, arg1);
......
1343 1341
    TCGv t0, t1;
1344 1342

  
1345 1343
    if ((!compute_ca && !compute_ov) ||
1346
        (GET_TCGV(ret) != GET_TCGV(arg1) && GET_TCGV(ret) != GET_TCGV(arg2)))  {
1344
        (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2)))  {
1347 1345
        t0 = ret;
1348 1346
    } else {
1349
        t0 = tcg_temp_local_new(TCG_TYPE_TL);
1347
        t0 = tcg_temp_local_new();
1350 1348
    }
1351 1349

  
1352 1350
    if (add_ca) {
1353
        t1 = tcg_temp_local_new(TCG_TYPE_TL);
1351
        t1 = tcg_temp_local_new();
1354 1352
        tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
1355 1353
        tcg_gen_shri_tl(t1, t1, XER_CA);
1356 1354
    }
......
1386 1384
    if (unlikely(Rc(ctx->opcode) != 0))
1387 1385
        gen_set_Rc0(ctx, t0);
1388 1386

  
1389
    if (GET_TCGV(t0) != GET_TCGV(ret)) {
1387
    if (!TCGV_EQUAL(t0, ret)) {
1390 1388
        tcg_gen_mov_tl(ret, t0);
1391 1389
        tcg_temp_free(t0);
1392 1390
    }
......
1430 1428
{
1431 1429
    /* Start with XER CA and OV disabled, the most likely case */
1432 1430
    tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1433
    TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
1431
    TCGv t0 = tcg_temp_local_new();
1434 1432
    TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
1435 1433
    tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
1436 1434
    gen_op_arith_compute_ca(ctx, t0, t1, 1);
......
1476 1474
/* cntlzw */
1477 1475
GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER)
1478 1476
{
1479
    tcg_gen_helper_1_1(helper_cntlzw, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1477
    gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1480 1478
    if (unlikely(Rc(ctx->opcode) != 0))
1481 1479
        gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1482 1480
}
......
1556 1554
            break;
1557 1555
        }
1558 1556
        if (prio) {
1559
            TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
1557
            TCGv t0 = tcg_temp_new();
1560 1558
            tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, spr[SPR_PPR]));
1561 1559
            tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1562 1560
            tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
......
1629 1627
{
1630 1628
#if defined(TARGET_PPC64)
1631 1629
    if (ctx->sf_mode)
1632
        tcg_gen_helper_1_1(helper_popcntb_64, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1630
        gen_helper_popcntb_64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1633 1631
    else
1634 1632
#endif
1635
        tcg_gen_helper_1_1(helper_popcntb, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1633
        gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1636 1634
}
1637 1635

  
1638 1636
#if defined(TARGET_PPC64)
......
1641 1639
/* cntlzd */
1642 1640
GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B)
1643 1641
{
1644
    tcg_gen_helper_1_1(helper_cntlzd, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1642
    gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1645 1643
    if (unlikely(Rc(ctx->opcode) != 0))
1646 1644
        gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1647 1645
}
......
1659 1657
    if (likely(sh == 0 && mb == 0 && me == 31)) {
1660 1658
        tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1661 1659
    } else {
1662
        TCGv t0, t1;
1663 1660
        target_ulong mask;
1664

  
1665
        t0 = tcg_temp_new(TCG_TYPE_TL);
1661
        TCGv t1;
1662
        TCGv t0 = tcg_temp_new();
1666 1663
#if defined(TARGET_PPC64)
1667
        t1 = tcg_temp_new(TCG_TYPE_I32);
1668
        tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1669
        tcg_gen_rotli_i32(t1, t1, sh);
1670
        tcg_gen_extu_i32_i64(t0, t1);
1671
        tcg_temp_free(t1);
1664
        TCGv_i32 t2 = tcg_temp_new_i32();
1665
        tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1666
        tcg_gen_rotli_i32(t2, t2, sh);
1667
        tcg_gen_extu_i32_i64(t0, t2);
1668
        tcg_temp_free_i32(t2);
1672 1669
#else
1673 1670
        tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1674 1671
#endif
......
1677 1674
        me += 32;
1678 1675
#endif
1679 1676
        mask = MASK(mb, me);
1680
        t1 = tcg_temp_new(TCG_TYPE_TL);
1677
        t1 = tcg_temp_new();
1681 1678
        tcg_gen_andi_tl(t0, t0, mask);
1682 1679
        tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1683 1680
        tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
......
1700 1697
        if (likely(sh == 0)) {
1701 1698
            tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1702 1699
        } else {
1703
            TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
1700
            TCGv t0 = tcg_temp_new();
1704 1701
            tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1705 1702
            tcg_gen_shli_tl(t0, t0, sh);
1706 1703
            tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1707 1704
            tcg_temp_free(t0);
1708 1705
        }
1709 1706
    } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1710
        TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
1707
        TCGv t0 = tcg_temp_new();
1711 1708
        tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1712 1709
        tcg_gen_shri_tl(t0, t0, mb);
1713 1710
        tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1714 1711
        tcg_temp_free(t0);
1715 1712
    } else {
1716
        TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
1713
        TCGv t0 = tcg_temp_new();
1717 1714
#if defined(TARGET_PPC64)
1718
        TCGv t1 = tcg_temp_new(TCG_TYPE_I32);
1715
        TCGv_i32 t1 = tcg_temp_new_i32();
1719 1716
        tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1720 1717
        tcg_gen_rotli_i32(t1, t1, sh);
1721 1718
        tcg_gen_extu_i32_i64(t0, t1);
1722
        tcg_temp_free(t1);
1719
        tcg_temp_free_i32(t1);
1723 1720
#else
1724 1721
        tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1725 1722
#endif
......
1739 1736
    uint32_t mb, me;
1740 1737
    TCGv t0;
1741 1738
#if defined(TARGET_PPC64)
1742
    TCGv t1, t2;
1739
    TCGv_i32 t1, t2;
1743 1740
#endif
1744 1741

  
1745 1742
    mb = MB(ctx->opcode);
1746 1743
    me = ME(ctx->opcode);
1747
    t0 = tcg_temp_new(TCG_TYPE_TL);
1744
    t0 = tcg_temp_new();
1748 1745
    tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1749 1746
#if defined(TARGET_PPC64)
1750
    t1 = tcg_temp_new(TCG_TYPE_I32);
1751
    t2 = tcg_temp_new(TCG_TYPE_I32);
1747
    t1 = tcg_temp_new_i32();
1748
    t2 = tcg_temp_new_i32();
1752 1749
    tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1753 1750
    tcg_gen_trunc_i64_i32(t2, t0);
1754 1751
    tcg_gen_rotl_i32(t1, t1, t2);
1755 1752
    tcg_gen_extu_i32_i64(t0, t1);
1756
    tcg_temp_free(t1);
1757
    tcg_temp_free(t2);
1753
    tcg_temp_free_i32(t1);
1754
    tcg_temp_free_i32(t2);
1758 1755
#else
1759 1756
    tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1760 1757
#endif
......
1812 1809
    } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1813 1810
        tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1814 1811
    } else {
1815
        TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
1812
        TCGv t0 = tcg_temp_new();
1816 1813
        tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1817 1814
        if (likely(mb == 0 && me == 63)) {
1818 1815
            tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
......
1862 1859

  
1863 1860
    mb = MB(ctx->opcode);
1864 1861
    me = ME(ctx->opcode);
1865
    t0 = tcg_temp_new(TCG_TYPE_TL);
1862
    t0 = tcg_temp_new();
1866 1863
    tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1867 1864
    tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1868 1865
    if (unlikely(mb != 0 || me != 63)) {
......
1907 1904
        TCGv t0, t1;
1908 1905
        target_ulong mask;
1909 1906

  
1910
        t0 = tcg_temp_new(TCG_TYPE_TL);
1907
        t0 = tcg_temp_new();
1911 1908
        tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1912
        t1 = tcg_temp_new(TCG_TYPE_TL);
1909
        t1 = tcg_temp_new();
1913 1910
        mask = MASK(mb, me);
1914 1911
        tcg_gen_andi_tl(t0, t0, mask);
1915 1912
        tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
......
1932 1929
    l1 = gen_new_label();
1933 1930
    l2 = gen_new_label();
1934 1931

  
1935
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
1932
    t0 = tcg_temp_local_new();
1936 1933
    tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1937 1934
    tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
1938 1935
    tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
......
1948 1945
/* sraw & sraw. */
1949 1946
GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER)
1950 1947
{
1951
    tcg_gen_helper_1_2(helper_sraw, cpu_gpr[rA(ctx->opcode)],
1952
                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1948
    gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
1949
                    cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1953 1950
    if (unlikely(Rc(ctx->opcode) != 0))
1954 1951
        gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1955 1952
}
......
1962 1959
        TCGv t0;
1963 1960
        l1 = gen_new_label();
1964 1961
        l2 = gen_new_label();
1965
        t0 = tcg_temp_local_new(TCG_TYPE_TL);
1962
        t0 = tcg_temp_local_new();
1966 1963
        tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1967 1964
        tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
1968 1965
        tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
......
1990 1987
    l1 = gen_new_label();
1991 1988
    l2 = gen_new_label();
1992 1989

  
1993
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
1990
    t0 = tcg_temp_local_new();
1994 1991
    tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1995 1992
    tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
1996 1993
    tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1997 1994
    tcg_gen_br(l2);
1998 1995
    gen_set_label(l1);
1999
    t1 = tcg_temp_new(TCG_TYPE_TL);
1996
    t1 = tcg_temp_new();
2000 1997
    tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
2001 1998
    tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t1, t0);
2002 1999
    tcg_temp_free(t1);
......
2014 2011
    l1 = gen_new_label();
2015 2012
    l2 = gen_new_label();
2016 2013

  
2017
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
2014
    t0 = tcg_temp_local_new();
2018 2015
    tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
2019 2016
    tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
2020 2017
    tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
......
2029 2026
/* srad & srad. */
2030 2027
GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B)
2031 2028
{
2032
    tcg_gen_helper_1_2(helper_srad, cpu_gpr[rA(ctx->opcode)],
2033
                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2029
    gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
2030
                    cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2034 2031
    if (unlikely(Rc(ctx->opcode) != 0))
2035 2032
        gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2036 2033
}
......
2043 2040
        TCGv t0;
2044 2041
        l1 = gen_new_label();
2045 2042
        l2 = gen_new_label();
2046
        t0 = tcg_temp_local_new(TCG_TYPE_TL);
2043
        t0 = tcg_temp_local_new();
2047 2044
        tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
2048 2045
        tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
2049 2046
        tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
......
2077 2074
    l1 = gen_new_label();
2078 2075
    l2 = gen_new_label();
2079 2076

  
2080
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
2077
    t0 = tcg_temp_local_new();
2081 2078
    tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
2082 2079
    tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
2083 2080
    tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
......
2288 2285
    tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
2289 2286
    tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
2290 2287
    gen_reset_fpstatus();
2291
    tcg_gen_helper_1_0(helper_fcmpo, cpu_crf[crfD(ctx->opcode)]);
2288
    gen_helper_fcmpo(cpu_crf[crfD(ctx->opcode)]);
2292 2289
    gen_op_float_check_status();
2293 2290
}
2294 2291

  
......
2302 2299
    tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
2303 2300
    tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
2304 2301
    gen_reset_fpstatus();
2305
    tcg_gen_helper_1_0(helper_fcmpu, cpu_crf[crfD(ctx->opcode)]);
2302
    gen_helper_fcmpu(cpu_crf[crfD(ctx->opcode)]);
2306 2303
    gen_op_float_check_status();
2307 2304
}
2308 2305

  
......
2528 2525
    if (likely(flags & 2))                                                       \
2529 2526
        tcg_gen_qemu_ld##width(t0, t1, flags >> 2);                              \
2530 2527
    else {                                                                       \
2531
        TCGv addr = tcg_temp_new(TCG_TYPE_TL);                                   \
2528
        TCGv addr = tcg_temp_new();                                   \
2532 2529
        tcg_gen_ext32u_tl(addr, t1);                                             \
2533 2530
        tcg_gen_qemu_ld##width(t0, addr, flags >> 2);                            \
2534 2531
        tcg_temp_free(addr);                                                     \
......
2548 2545
    if (likely(flags & 2))                                                       \
2549 2546
        tcg_gen_qemu_st##width(t0, t1, flags >> 2);                              \
2550 2547
    else {                                                                       \
2551
        TCGv addr = tcg_temp_new(TCG_TYPE_TL);                                   \
2548
        TCGv addr = tcg_temp_new();                                   \
2552 2549
        tcg_gen_ext32u_tl(addr, t1);                                             \
2553 2550
        tcg_gen_qemu_st##width(t0, addr, flags >> 2);                            \
2554 2551
        tcg_temp_free(addr);                                                     \
......
2572 2569
static always_inline void gen_qemu_ld16u(TCGv arg0, TCGv arg1, int flags)
2573 2570
{
2574 2571
    if (unlikely(flags & 1)) {
2575
        TCGv t0;
2572
        TCGv_i32 t0;
2576 2573
        gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2577
        t0 = tcg_temp_new(TCG_TYPE_I32);
2574
        t0 = tcg_temp_new_i32();
2578 2575
        tcg_gen_trunc_tl_i32(t0, arg0);
2579 2576
        tcg_gen_bswap16_i32(t0, t0);
2580 2577
        tcg_gen_extu_i32_tl(arg0, t0);
2581
        tcg_temp_free(t0);
2578
        tcg_temp_free_i32(t0);
2582 2579
    } else
2583 2580
        gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2584 2581
}
......
2586 2583
static always_inline void gen_qemu_ld16s(TCGv arg0, TCGv arg1, int flags)
2587 2584
{
2588 2585
    if (unlikely(flags & 1)) {
2589
        TCGv t0;
2586
        TCGv_i32 t0;
2590 2587
        gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2591
        t0 = tcg_temp_new(TCG_TYPE_I32);
2588
        t0 = tcg_temp_new_i32();
2592 2589
        tcg_gen_trunc_tl_i32(t0, arg0);
2593 2590
        tcg_gen_bswap16_i32(t0, t0);
2594 2591
        tcg_gen_extu_i32_tl(arg0, t0);
2595 2592
        tcg_gen_ext16s_tl(arg0, arg0);
2596
        tcg_temp_free(t0);
2593
        tcg_temp_free_i32(t0);
2597 2594
    } else
2598 2595
        gen_qemu_ld16s_ppc64(arg0, arg1, flags);
2599 2596
}
......
2601 2598
static always_inline void gen_qemu_ld32u(TCGv arg0, TCGv arg1, int flags)
2602 2599
{
2603 2600
    if (unlikely(flags & 1)) {
2604
        TCGv t0;
2601
        TCGv_i32 t0;
2605 2602
        gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2606
        t0 = tcg_temp_new(TCG_TYPE_I32);
2603
        t0 = tcg_temp_new_i32();
2607 2604
        tcg_gen_trunc_tl_i32(t0, arg0);
2608 2605
        tcg_gen_bswap_i32(t0, t0);
2609 2606
        tcg_gen_extu_i32_tl(arg0, t0);
2610
        tcg_temp_free(t0);
2607
        tcg_temp_free_i32(t0);
2611 2608
    } else
2612 2609
        gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2613 2610
}
......
2615 2612
static always_inline void gen_qemu_ld32s(TCGv arg0, TCGv arg1, int flags)
2616 2613
{
2617 2614
    if (unlikely(flags & 1)) {
2618
        TCGv t0;
2615
        TCGv_i32 t0;
2619 2616
        gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2620
        t0 = tcg_temp_new(TCG_TYPE_I32);
2617
        t0 = tcg_temp_new_i32();
2621 2618
        tcg_gen_trunc_tl_i32(t0, arg0);
2622 2619
        tcg_gen_bswap_i32(t0, t0);
2623 2620
        tcg_gen_ext_i32_tl(arg0, t0);
2624
        tcg_temp_free(t0);
2621
        tcg_temp_free_i32(t0);
2625 2622
    } else
2626 2623
        gen_qemu_ld32s_ppc64(arg0, arg1, flags);
2627 2624
}
......
2641 2638
static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
2642 2639
{
2643 2640
    if (unlikely(flags & 1)) {
2644
        TCGv t0, t1;
2645
        t0 = tcg_temp_new(TCG_TYPE_I32);
2641
        TCGv_i32 t0;
2642
        TCGv_i64 t1;
2643
        t0 = tcg_temp_new_i32();
2646 2644
        tcg_gen_trunc_tl_i32(t0, arg0);
2647 2645
        tcg_gen_ext16u_i32(t0, t0);
2648 2646
        tcg_gen_bswap16_i32(t0, t0);
2649
        t1 = tcg_temp_new(TCG_TYPE_I64);
2647
        t1 = tcg_temp_new_i64();
2650 2648
        tcg_gen_extu_i32_tl(t1, t0);
2651
        tcg_temp_free(t0);
2649
        tcg_temp_free_i32(t0);
2652 2650
        gen_qemu_st16_ppc64(t1, arg1, flags);
2653
        tcg_temp_free(t1);
2651
        tcg_temp_free_i64(t1);
2654 2652
    } else
2655 2653
        gen_qemu_st16_ppc64(arg0, arg1, flags);
2656 2654
}
......
2658 2656
static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
2659 2657
{
2660 2658
    if (unlikely(flags & 1)) {
2661
        TCGv t0, t1;
2662
        t0 = tcg_temp_new(TCG_TYPE_I32);
2659
        TCGv_i32 t0;
2660
        TCGv_i64 t1;
2661
        t0 = tcg_temp_new_i32();
2663 2662
        tcg_gen_trunc_tl_i32(t0, arg0);
2664 2663
        tcg_gen_bswap_i32(t0, t0);
2665
        t1 = tcg_temp_new(TCG_TYPE_I64);
2664
        t1 = tcg_temp_new_i64();
2666 2665
        tcg_gen_extu_i32_tl(t1, t0);
2667
        tcg_temp_free(t0);
2666
        tcg_temp_free_i32(t0);
2668 2667
        gen_qemu_st32_ppc64(t1, arg1, flags);
2669
        tcg_temp_free(t1);
2668
        tcg_temp_free_i64(t1);
2670 2669
    } else
2671 2670
        gen_qemu_st32_ppc64(arg0, arg1, flags);
2672 2671
}
......
2674 2673
static always_inline void gen_qemu_st64(TCGv arg0, TCGv arg1, int flags)
2675 2674
{
2676 2675
    if (unlikely(flags & 1)) {
2677
        TCGv t0 = tcg_temp_new(TCG_TYPE_I64);
2676
        TCGv_i64 t0 = tcg_temp_new_i64();
2678 2677
        tcg_gen_bswap_i64(t0, arg0);
2679 2678
        gen_qemu_st64_ppc64(t0, arg1, flags);
2680
        tcg_temp_free(t0);
2679
        tcg_temp_free_i64(t0);
2681 2680
    } else
2682 2681
        gen_qemu_st64_ppc64(arg0, arg1, flags);
2683 2682
}
......
2695 2694
GEN_QEMU_LD_PPC32(16s)
2696 2695
GEN_QEMU_LD_PPC32(32u)
2697 2696
GEN_QEMU_LD_PPC32(32s)
2698
GEN_QEMU_LD_PPC32(64)
2699 2697

  
2700 2698
#define GEN_QEMU_ST_PPC32(width)                                                 \
2701 2699
static always_inline void gen_qemu_st##width##_ppc32(TCGv arg0, TCGv arg1, int flags)\
......
2705 2703
GEN_QEMU_ST_PPC32(8)
2706 2704
GEN_QEMU_ST_PPC32(16)
2707 2705
GEN_QEMU_ST_PPC32(32)
2708
GEN_QEMU_ST_PPC32(64)
2709 2706

  
2710 2707
static always_inline void gen_qemu_ld8u(TCGv arg0, TCGv arg1, int flags)
2711 2708
{
......
2741 2738
        tcg_gen_bswap_i32(arg0, arg0);
2742 2739
}
2743 2740

  
2744
static always_inline void gen_qemu_ld64(TCGv arg0, TCGv arg1, int flags)
2745
{
2746
    gen_qemu_ld64_ppc32(arg0, arg1, flags);
2747
    if (unlikely(flags & 1))
2748
        tcg_gen_bswap_i64(arg0, arg0);
2749
}
2750

  
2751 2741
static always_inline void gen_qemu_st8(TCGv arg0, TCGv arg1, int flags)
2752 2742
{
2753 2743
    gen_qemu_st8_ppc32(arg0, arg1, flags);
......
2756 2746
static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
2757 2747
{
2758 2748
    if (unlikely(flags & 1)) {
2759
        TCGv temp = tcg_temp_new(TCG_TYPE_I32);
2749
        TCGv_i32 temp = tcg_temp_new_i32();
2760 2750
        tcg_gen_ext16u_i32(temp, arg0);
2761 2751
        tcg_gen_bswap16_i32(temp, temp);
2762 2752
        gen_qemu_st16_ppc32(temp, arg1, flags);
2763
        tcg_temp_free(temp);
2753
        tcg_temp_free_i32(temp);
2764 2754
    } else
2765 2755
        gen_qemu_st16_ppc32(arg0, arg1, flags);
2766 2756
}
......
2768 2758
static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
2769 2759
{
2770 2760
    if (unlikely(flags & 1)) {
2771
        TCGv temp = tcg_temp_new(TCG_TYPE_I32);
2761
        TCGv_i32 temp = tcg_temp_new_i32();
2772 2762
        tcg_gen_bswap_i32(temp, arg0);
2773 2763
        gen_qemu_st32_ppc32(temp, arg1, flags);
2774
        tcg_temp_free(temp);
2764
        tcg_temp_free_i32(temp);
2775 2765
    } else
2776 2766
        gen_qemu_st32_ppc32(arg0, arg1, flags);
2777 2767
}
2778 2768

  
2779
static always_inline void gen_qemu_st64(TCGv arg0, TCGv arg1, int flags)
2780
{
2781
    if (unlikely(flags & 1)) {
2782
        TCGv temp = tcg_temp_new(TCG_TYPE_I64);
2783
        tcg_gen_bswap_i64(temp, arg0);
2784
        gen_qemu_st64_ppc32(temp, arg1, flags);
2785
        tcg_temp_free(temp);
2786
    } else
2787
        gen_qemu_st64_ppc32(arg0, arg1, flags);
2788
}
2789

  
2790 2769
#endif
2791 2770

  
2792 2771
#define GEN_LD(width, opc, type)                                              \
2793 2772
GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type)                      \
2794 2773
{                                                                             \
2795
    TCGv EA = tcg_temp_new(TCG_TYPE_TL);                                      \
2774
    TCGv EA = tcg_temp_new();                                      \
2796 2775
    gen_addr_imm_index(EA, ctx, 0);                                           \
2797 2776
    gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);           \
2798 2777
    tcg_temp_free(EA);                                                        \
......
2807 2786
        GEN_EXCP_INVAL(ctx);                                                  \
2808 2787
        return;                                                               \
2809 2788
    }                                                                         \
2810
    EA = tcg_temp_new(TCG_TYPE_TL);                                           \
2789
    EA = tcg_temp_new();                                           \
2811 2790
    if (type == PPC_64B)                                                      \
2812 2791
        gen_addr_imm_index(EA, ctx, 0x03);                                    \
2813 2792
    else                                                                      \
......
2826 2805
        GEN_EXCP_INVAL(ctx);                                                  \
2827 2806
        return;                                                               \
2828 2807
    }                                                                         \
2829
    EA = tcg_temp_new(TCG_TYPE_TL);                                           \
2808
    EA = tcg_temp_new();                                           \
2830 2809
    gen_addr_reg_index(EA, ctx);                                              \
2831 2810
    gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);           \
2832 2811
    tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);                             \
......
2836 2815
#define GEN_LDX(width, opc2, opc3, type)                                      \
2837 2816
GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type)                  \
2838 2817
{                                                                             \
2839
    TCGv EA = tcg_temp_new(TCG_TYPE_TL);                                      \
2818
    TCGv EA = tcg_temp_new();                                      \
2840 2819
    gen_addr_reg_index(EA, ctx);                                              \
2841 2820
    gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);           \
2842 2821
    tcg_temp_free(EA);                                                        \
......
2875 2854
            return;
2876 2855
        }
2877 2856
    }
2878
    EA = tcg_temp_new(TCG_TYPE_TL);
2857
    EA = tcg_temp_new();
2879 2858
    gen_addr_imm_index(EA, ctx, 0x03);
2880 2859
    if (ctx->opcode & 0x02) {
2881 2860
        /* lwa (lwau is undefined) */
......
2913 2892
        GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2914 2893
        return;
2915 2894
    }
2916
    EA = tcg_temp_new(TCG_TYPE_TL);
2895
    EA = tcg_temp_new();
2917 2896
    gen_addr_imm_index(EA, ctx, 0x0F);
2918 2897
    gen_qemu_ld64(cpu_gpr[rd], EA, ctx->mem_idx);
2919 2898
    tcg_gen_addi_tl(EA, EA, 8);
......
2927 2906
#define GEN_ST(width, opc, type)                                              \
2928 2907
GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type)                     \
2929 2908
{                                                                             \
2930
    TCGv EA = tcg_temp_new(TCG_TYPE_TL);                                      \
2909
    TCGv EA = tcg_temp_new();                                      \
2931 2910
    gen_addr_imm_index(EA, ctx, 0);                                           \
2932 2911
    gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx);       \
2933 2912
    tcg_temp_free(EA);                                                        \
......
2941 2920
        GEN_EXCP_INVAL(ctx);                                                  \
2942 2921
        return;                                                               \
2943 2922
    }                                                                         \
2944
    EA = tcg_temp_new(TCG_TYPE_TL);                                           \
2923
    EA = tcg_temp_new();                                           \
2945 2924
    if (type == PPC_64B)                                                      \
2946 2925
        gen_addr_imm_index(EA, ctx, 0x03);                                    \
2947 2926
    else                                                                      \
......
2959 2938
        GEN_EXCP_INVAL(ctx);                                                  \
2960 2939
        return;                                                               \
2961 2940
    }                                                                         \
2962
    EA = tcg_temp_new(TCG_TYPE_TL);                                           \
2941
    EA = tcg_temp_new();                                           \
2963 2942
    gen_addr_reg_index(EA, ctx);                                              \
2964 2943
    gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx);           \
2965 2944
    tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);                             \
......
2969 2948
#define GEN_STX(width, opc2, opc3, type)                                      \
2970 2949
GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type)                 \
2971 2950
{                                                                             \
2972
    TCGv EA = tcg_temp_new(TCG_TYPE_TL);                                      \
2951
    TCGv EA = tcg_temp_new();                                      \
2973 2952
    gen_addr_reg_index(EA, ctx);                                              \
2974 2953
    gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx);           \
2975 2954
    tcg_temp_free(EA);                                                        \
......
3014 2993
            GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
3015 2994
            return;
3016 2995
        }
3017
        EA = tcg_temp_new(TCG_TYPE_TL);
2996
        EA = tcg_temp_new();
3018 2997
        gen_addr_imm_index(EA, ctx, 0x03);
3019 2998
        gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
3020 2999
        tcg_gen_addi_tl(EA, EA, 8);
......
3029 3008
                return;
3030 3009
            }
3031 3010
        }
3032
        EA = tcg_temp_new(TCG_TYPE_TL);
3011
        EA = tcg_temp_new();
3033 3012
        gen_addr_imm_index(EA, ctx, 0x03);
3034 3013
        gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
3035 3014
        if (Rc(ctx->opcode))
......
3042 3021
/* lhbrx */
3043 3022
void always_inline gen_qemu_ld16ur(TCGv t0, TCGv t1, int flags)
3044 3023
{
3045
    TCGv temp = tcg_temp_new(TCG_TYPE_I32);
3046
    gen_qemu_ld16u(temp, t1, flags);
3024
    TCGv_i32 temp = tcg_temp_new_i32();
3025
    gen_qemu_ld16u(t0, t1, flags);
3026
    tcg_gen_trunc_tl_i32(temp, t0);
3047 3027
    tcg_gen_bswap16_i32(temp, temp);
3048 3028
    tcg_gen_extu_i32_tl(t0, temp);
3049
    tcg_temp_free(temp);
3029
    tcg_temp_free_i32(temp);
3050 3030
}
3051 3031
GEN_LDX(16ur, 0x16, 0x18, PPC_INTEGER);
3052 3032

  
3053 3033
/* lwbrx */
3054 3034
void always_inline gen_qemu_ld32ur(TCGv t0, TCGv t1, int flags)
3055 3035
{
3056
    TCGv temp = tcg_temp_new(TCG_TYPE_I32);
3057
    gen_qemu_ld32u(temp, t1, flags);
3036
    TCGv_i32 temp = tcg_temp_new_i32();
3037
    gen_qemu_ld32u(t0, t1, flags);
3038
    tcg_gen_trunc_tl_i32(temp, t0);
3058 3039
    tcg_gen_bswap_i32(temp, temp);
3059 3040
    tcg_gen_extu_i32_tl(t0, temp);
3060
    tcg_temp_free(temp);
3041
    tcg_temp_free_i32(temp);
3061 3042
}
3062 3043
GEN_LDX(32ur, 0x16, 0x10, PPC_INTEGER);
3063 3044

  
3064 3045
/* sthbrx */
3065 3046
void always_inline gen_qemu_st16r(TCGv t0, TCGv t1, int flags)
3066 3047
{
3067
    TCGv temp = tcg_temp_new(TCG_TYPE_I32);
3048
    TCGv_i32 temp = tcg_temp_new_i32();
3049
    TCGv t2 = tcg_temp_new();
3068 3050
    tcg_gen_trunc_tl_i32(temp, t0);
3069 3051
    tcg_gen_ext16u_i32(temp, temp);
3070 3052
    tcg_gen_bswap16_i32(temp, temp);
3071
    gen_qemu_st16(temp, t1, flags);
3072
    tcg_temp_free(temp);
3053
    tcg_gen_extu_i32_tl(t2, temp);
3054
    tcg_temp_free_i32(temp);
3055
    gen_qemu_st16(t2, t1, flags);
3056
    tcg_temp_free(t2);
3073 3057
}
3074 3058
GEN_STX(16r, 0x16, 0x1C, PPC_INTEGER);
3075 3059

  
3076 3060
/* stwbrx */
3077 3061
void always_inline gen_qemu_st32r(TCGv t0, TCGv t1, int flags)
3078 3062
{
3079
    TCGv temp = tcg_temp_new(TCG_TYPE_I32);
3063
    TCGv_i32 temp = tcg_temp_new_i32();
3064
    TCGv t2 = tcg_temp_new();
3080 3065
    tcg_gen_trunc_tl_i32(temp, t0);
3081 3066
    tcg_gen_bswap_i32(temp, temp);
3082
    gen_qemu_st32(temp, t1, flags);
3083
    tcg_temp_free(temp);
3067
    tcg_gen_extu_i32_tl(t2, temp);
3068
    tcg_temp_free_i32(temp);
3069
    gen_qemu_st16(t2, t1, flags);
3070
    tcg_temp_free(t2);
3084 3071
}
3085 3072
GEN_STX(32r, 0x16, 0x14, PPC_INTEGER);
3086 3073

  
......
3530 3517

  
3531 3518
    ctx->exception = POWERPC_EXCP_BRANCH;
3532 3519
    if (type == BCOND_LR || type == BCOND_CTR) {
3533
        target = tcg_temp_local_new(TCG_TYPE_TL);
3520
        target = tcg_temp_local_new();
3534 3521
        if (type == BCOND_CTR)
3535 3522
            tcg_gen_mov_tl(target, cpu_ctr);
3536 3523
        else
......
3541 3528
    l1 = gen_new_label();
3542 3529
    if ((bo & 0x4) == 0) {
3543 3530
        /* Decrement and test CTR */
3544
        TCGv temp = tcg_temp_new(TCG_TYPE_TL);
3531
        TCGv temp = tcg_temp_new();
3545 3532
        if (unlikely(type == BCOND_CTR)) {
3546 3533
            GEN_EXCP_INVAL(ctx);
3547 3534
            return;
......
3558 3545
        } else {
3559 3546
            tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3560 3547
        }
3548
        tcg_temp_free(temp);
3561 3549
    }
3562 3550
    if ((bo & 0x10) == 0) {
3563 3551
        /* Test CR */
3564 3552
        uint32_t bi = BI(ctx->opcode);
3565 3553
        uint32_t mask = 1 << (3 - (bi & 0x03));
3566
        TCGv temp = tcg_temp_new(TCG_TYPE_I32);
3554
        TCGv_i32 temp = tcg_temp_new_i32();
3567 3555

  
3568 3556
        if (bo & 0x8) {
3569 3557
            tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
......
3572 3560
            tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3573 3561
            tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3574 3562
        }
3563
        tcg_temp_free_i32(temp);
3575 3564
    }
3576 3565
    if (type == BCOND_IM) {
3577 3566
        target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
......
3622 3611
{                                                                             \
3623 3612
    uint8_t bitmask;                                                          \
3624 3613
    int sh;                                                                   \
3625
    TCGv t0, t1;                                                              \
3614
    TCGv_i32 t0, t1;                                                          \
3626 3615
    sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
3627
    t0 = tcg_temp_new(TCG_TYPE_I32);                                          \
3616
    t0 = tcg_temp_new_i32();                                                  \
3628 3617
    if (sh > 0)                                                               \
3629 3618
        tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
3630 3619
    else if (sh < 0)                                                          \
3631 3620
        tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
3632 3621
    else                                                                      \
3633 3622
        tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
3634
    t1 = tcg_temp_new(TCG_TYPE_I32);                                          \
3623
    t1 = tcg_temp_new_i32();                                                  \
3635 3624
    sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
3636 3625
    if (sh > 0)                                                               \
3637 3626
        tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
......
3644 3633
    tcg_gen_andi_i32(t0, t0, bitmask);                                        \
3645 3634
    tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
3646 3635
    tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
3647
    tcg_temp_free(t0);                                                        \
3648
    tcg_temp_free(t1);                                                        \
3636
    tcg_temp_free_i32(t0);                                                    \
3637
    tcg_temp_free_i32(t1);                                                    \
3649 3638
}
3650 3639

  
3651 3640
/* crand */
......
3797 3786
            tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3798 3787
        }
3799 3788
    } else {
3800
        tcg_gen_helper_1_0(helper_load_cr, cpu_gpr[rD(ctx->opcode)]);
3789
        gen_helper_load_cr(cpu_gpr[rD(ctx->opcode)]);
3801 3790
    }
3802 3791
}
3803 3792

  
......
3892 3881

  
3893 3882
    crm = CRM(ctx->opcode);
3894 3883
    if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) {
3884
        TCGv_i32 temp = tcg_temp_new_i32();
3895 3885
        crn = ffs(crm);
3896
        tcg_gen_shri_i32(cpu_crf[7 - crn], cpu_gpr[rS(ctx->opcode)], crn * 4);
3886
        tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3887
        tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3897 3888
        tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3889
        tcg_temp_free_i32(temp);
3898 3890
    } else {
3899
        TCGv t0 = tcg_const_tl(crm);
3900
        tcg_gen_helper_0_2(helper_store_cr, cpu_gpr[rS(ctx->opcode)], t0);
3901
        tcg_temp_free(t0);
3891
        TCGv_i32 temp = tcg_const_i32(crm);
3892
        gen_helper_store_cr(cpu_gpr[rS(ctx->opcode)], temp);
3893
        tcg_temp_free_i32(temp);
3902 3894
    }
3903 3895
}
3904 3896

  
......
4010 4002
GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE)
4011 4003
{
4012 4004
    /* XXX: specification says this is treated as a load by the MMU */
4013
    TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
4005
    TCGv t0 = tcg_temp_new();
4014 4006
    gen_addr_reg_index(t0, ctx);
4015 4007
    gen_qemu_ld8u(t0, t0, ctx->mem_idx);
4016 4008
    tcg_temp_free(t0);
......
4027 4019
        GEN_EXCP_PRIVOPC(ctx);
4028 4020
        return;
4029 4021
    }
4030
    EA = tcg_temp_new(TCG_TYPE_TL);
4022
    EA = tcg_temp_new();
4031 4023
    gen_addr_reg_index(EA, ctx);
4032
    val = tcg_temp_new(TCG_TYPE_TL);
4024
    val = tcg_temp_new();
4033 4025
    /* XXX: specification says this should be treated as a store by the MMU */
4034 4026
    gen_qemu_ld8u(val, EA, ctx->mem_idx);
4035 4027
    gen_qemu_st8(val, EA, ctx->mem_idx);
......
4042 4034
GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE)
4043 4035
{
4044 4036
    /* XXX: specification say this is treated as a load by the MMU */
4045
    TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
4037
    TCGv t0 = tcg_temp_new();
4046 4038
    gen_addr_reg_index(t0, ctx);
4047 4039
    gen_qemu_ld8u(t0, t0, ctx->mem_idx);
4048 4040
    tcg_temp_free(t0);
......
5188 5180
{
5189 5181
    TCGv t0, t1;
5190 5182

  
5191
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
5192
    t1 = tcg_temp_local_new(TCG_TYPE_TL);
5183
    t0 = tcg_temp_local_new();
5184
    t1 = tcg_temp_local_new();
5193 5185

  
5194 5186
    switch (opc3 & 0x0D) {
5195 5187
    case 0x05:
......
5511 5503
        GEN_EXCP_PRIVOPC(ctx);
5512 5504
        return;
5513 5505
    }
5514
    EA = tcg_temp_new(TCG_TYPE_TL);
5506
    EA = tcg_temp_new();
5515 5507
    gen_addr_reg_index(EA, ctx);
5516
    val = tcg_temp_new(TCG_TYPE_TL);
5508
    val = tcg_temp_new();
5517 5509
    gen_qemu_ld32u(val, EA, ctx->mem_idx);
5518 5510
    tcg_temp_free(val);
5519 5511
    tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
......
5816 5808
    tcg_gen_or_tl(cpu_xer, cpu_xer, cpu_T[0]);
5817 5809
    if (Rc(ctx->opcode)) {
5818 5810
        gen_op_440_dlmzb_update_Rc();
5819
        tcg_gen_andi_i32(cpu_crf[0], cpu_T[0], 0xf);
5811
        tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_T[0]);
5812
        tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 0xf);
5820 5813
    }
5821 5814
}
5822 5815

  
......
5903 5896
/***                           SPE extension                               ***/
5904 5897
/* Register moves */
5905 5898

  
5906
static always_inline void gen_load_gpr64(TCGv t, int reg) {
5899
static always_inline void gen_load_gpr64(TCGv_i64 t, int reg) {
5907 5900
#if defined(TARGET_PPC64)
5908 5901
    tcg_gen_mov_i64(t, cpu_gpr[reg]);
5909 5902
#else
......
5911 5904
#endif
5912 5905
}
5913 5906

  
5914
static always_inline void gen_store_gpr64(int reg, TCGv t) {
5907
static always_inline void gen_store_gpr64(int reg, TCGv_i64 t) {
5915 5908
#if defined(TARGET_PPC64)
5916 5909
    tcg_gen_mov_i64(cpu_gpr[reg], t);
5917 5910
#else
5911
    TCGv_i64 tmp = tcg_temp_new_i64();
5918 5912
    tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
5919
    TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
5920 5913
    tcg_gen_shri_i64(tmp, t, 32);
5921 5914
    tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
5922
    tcg_temp_free(tmp);
5915
    tcg_temp_free_i64(tmp);
5923 5916
#endif
5924 5917
}
5925 5918

  
......
6068 6061
        GEN_EXCP_NO_AP(ctx);                                                  \
6069 6062
        return;                                                               \
6070 6063
    }                                                                         \
6071
    TCGv t0 = tcg_temp_local_new(TCG_TYPE_I32);                               \
6072
    TCGv t1 = tcg_temp_local_new(TCG_TYPE_I32);                               \
6073
    TCGv t2 = tcg_temp_local_new(TCG_TYPE_I64);                               \
6064
    TCGv_i32 t0 = tcg_temp_local_new_i32();                                   \
6065
    TCGv_i32 t1 = tcg_temp_local_new_i32();                                   \
6066
    TCGv_i64 t2 = tcg_temp_local_new_i64();                                   \
6074 6067
    tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]);                      \
6075 6068
    tcg_opi(t0, t0, rB(ctx->opcode));                                         \
6076 6069
    tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32);                       \
6077 6070
    tcg_gen_trunc_i64_i32(t1, t2);                                            \
6078
    tcg_temp_free(t2);                                                        \
6071
    tcg_temp_free_i64(t2);                                                    \
6079 6072
    tcg_opi(t1, t1, rB(ctx->opcode));                                         \
6080 6073
    tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);                 \
6081
    tcg_temp_free(t0);                                                        \
6082
    tcg_temp_free(t1);                                                        \
6074
    tcg_temp_free_i32(t0);                                                    \
6075
    tcg_temp_free_i32(t1);                                                    \
6083 6076
}
6084 6077
#else
6085 6078
#define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi)                               \
......
6109 6102
        GEN_EXCP_NO_AP(ctx);                                                  \
6110 6103
        return;                                                               \
6111 6104
    }                                                                         \
6112
    TCGv t0 = tcg_temp_local_new(TCG_TYPE_I32);                               \
6113
    TCGv t1 = tcg_temp_local_new(TCG_TYPE_I32);                               \
6114
    TCGv t2 = tcg_temp_local_new(TCG_TYPE_I64);                               \
6105
    TCGv_i32 t0 = tcg_temp_local_new_i32();                                   \
6106
    TCGv_i32 t1 = tcg_temp_local_new_i32();                                   \
6107
    TCGv_i64 t2 = tcg_temp_local_new_i64();                                   \
6115 6108
    tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]);                      \
6116 6109
    tcg_op(t0, t0);                                                           \
6117 6110
    tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32);                       \
6118 6111
    tcg_gen_trunc_i64_i32(t1, t2);                                            \
6119
    tcg_temp_free(t2);                                                        \
6112
    tcg_temp_free_i64(t2);                                                    \
6120 6113
    tcg_op(t1, t1);                                                           \
6121 6114
    tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);                 \
6122
    tcg_temp_free(t0);                                                        \
6123
    tcg_temp_free(t1);                                                        \
6115
    tcg_temp_free_i32(t0);                                                    \
6116
    tcg_temp_free_i32(t1);                                                    \
6124 6117
}
6125 6118
#else
6126
#define GEN_SPEOP_ARITH1(name, tcg_op)                                    \
6119
#define GEN_SPEOP_ARITH1(name, tcg_op)                                        \
6127 6120
static always_inline void gen_##name (DisasContext *ctx)                      \
6128 6121
{                                                                             \
6129 6122
    if (unlikely(!ctx->spe_enabled)) {                                        \
......
6135 6128
}
6136 6129
#endif
6137 6130

  
6138
static always_inline void gen_op_evabs (TCGv ret, TCGv arg1)
6131
static always_inline void gen_op_evabs (TCGv_i32 ret, TCGv_i32 arg1)
6139 6132
{
6140 6133
    int l1 = gen_new_label();
6141 6134
    int l2 = gen_new_label();
......
6144 6137
    tcg_gen_neg_i32(ret, arg1);
6145 6138
    tcg_gen_br(l2);
6146 6139
    gen_set_label(l1);
6147
    tcg_gen_mov_tl(ret, arg1);
6140
    tcg_gen_mov_i32(ret, arg1);
6148 6141
    gen_set_label(l2);
6149 6142
}
6150 6143
GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6151 6144
GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6152 6145
GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6153 6146
GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6154
static always_inline void gen_op_evrndw (TCGv ret, TCGv arg1)
6147
static always_inline void gen_op_evrndw (TCGv_i32 ret, TCGv_i32 arg1)
6155 6148
{
6156 6149
    tcg_gen_addi_i32(ret, arg1, 0x8000);
6157 6150
    tcg_gen_ext16u_i32(ret, ret);
6158 6151
}
6159 6152
GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6160
static always_inline void gen_op_cntlsw (TCGv ret, TCGv arg1)
6161
{
6162
    tcg_gen_helper_1_1(helper_cntlsw32, ret, arg1);
6163
}
6164
GEN_SPEOP_ARITH1(evcntlsw, gen_op_cntlsw);
6165
static always_inline void gen_op_cntlzw (TCGv ret, TCGv arg1)
6166
{
6167
    tcg_gen_helper_1_1(helper_cntlzw32, ret, arg1);
6168
}
6169
GEN_SPEOP_ARITH1(evcntlzw, gen_op_cntlzw);
6153
GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
6154
GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
6170 6155

  
6171 6156
#if defined(TARGET_PPC64)
6172 6157
#define GEN_SPEOP_ARITH2(name, tcg_op)                                        \
......
6176 6161
        GEN_EXCP_NO_AP(ctx);                                                  \
6177 6162
        return;                                                               \
6178 6163
    }                                                                         \
6179
    TCGv t0 = tcg_temp_local_new(TCG_TYPE_I32);                               \
6180
    TCGv t1 = tcg_temp_local_new(TCG_TYPE_I32);                               \
6181
    TCGv t2 = tcg_temp_local_new(TCG_TYPE_I32);                               \
6182
    TCGv t3 = tcg_temp_local_new(TCG_TYPE_I64);                               \
6164
    TCGv_i32 t0 = tcg_temp_local_new_i32();                                   \
6165
    TCGv_i32 t1 = tcg_temp_local_new_i32();                                   \
6166
    TCGv_i32 t2 = tcg_temp_local_new_i32();                                   \
6167
    TCGv_i64 t3 = tcg_temp_local_new(TCG_TYPE_I64);                           \
6183 6168
    tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]);                      \
6184 6169
    tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]);                      \
6185 6170
    tcg_op(t0, t0, t2);                                                       \
......
6187 6172
    tcg_gen_trunc_i64_i32(t1, t3);                                            \
6188 6173
    tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32);                       \
6189 6174
    tcg_gen_trunc_i64_i32(t2, t3);                                            \
6190
    tcg_temp_free(t3);                                                        \
6175
    tcg_temp_free_i64(t3);                                                    \
6191 6176
    tcg_op(t1, t1, t2);                                                       \
6192
    tcg_temp_free(t2);                                                        \
6177
    tcg_temp_free_i32(t2);                                                    \
6193 6178
    tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);                 \
6194
    tcg_temp_free(t0);                                                        \
6195
    tcg_temp_free(t1);                                                        \
6179
    tcg_temp_free_i32(t0);                                                    \
6180
    tcg_temp_free_i32(t1);                                                    \
6196 6181
}
6197 6182
#else
6198 6183
#define GEN_SPEOP_ARITH2(name, tcg_op)                                        \
......
6209 6194
}
6210 6195
#endif
6211 6196

  
6212
static always_inline void gen_op_evsrwu (TCGv ret, TCGv arg1, TCGv arg2)
6197
static always_inline void gen_op_evsrwu (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6213 6198
{
6214
    TCGv t0;
6199
    TCGv_i32 t0;
6215 6200
    int l1, l2;
6216 6201

  
6217 6202
    l1 = gen_new_label();
6218 6203
    l2 = gen_new_label();
6219
    t0 = tcg_temp_local_new(TCG_TYPE_I32);
6204
    t0 = tcg_temp_local_new_i32();
6220 6205
    /* No error here: 6 bits are used */
6221 6206
    tcg_gen_andi_i32(t0, arg2, 0x3F);
6222 6207
    tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
......
6225 6210
    gen_set_label(l1);
6226 6211
    tcg_gen_movi_i32(ret, 0);
6227 6212
    tcg_gen_br(l2);
6228
    tcg_temp_free(t0);
6213
    tcg_temp_free_i32(t0);
6229 6214
}
6230 6215
GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
6231
static always_inline void gen_op_evsrws (TCGv ret, TCGv arg1, TCGv arg2)
6216
static always_inline void gen_op_evsrws (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6232 6217
{
6233
    TCGv t0;
6218
    TCGv_i32 t0;
... This diff was truncated because it exceeds the maximum size that can be displayed.

Also available in: Unified diff