Revision 396e467c

b/target-arm/translate.c
190 190
#define gen_op_subl_T0_T1() tcg_gen_sub_i32(cpu_T[0], cpu_T[0], cpu_T[1])
191 191
#define gen_op_rsbl_T0_T1() tcg_gen_sub_i32(cpu_T[0], cpu_T[1], cpu_T[0])
192 192

  
193
#define gen_op_addl_T0_T1_cc() gen_helper_add_cc(cpu_T[0], cpu_T[0], cpu_T[1])
194
#define gen_op_adcl_T0_T1_cc() gen_helper_adc_cc(cpu_T[0], cpu_T[0], cpu_T[1])
195
#define gen_op_subl_T0_T1_cc() gen_helper_sub_cc(cpu_T[0], cpu_T[0], cpu_T[1])
196
#define gen_op_sbcl_T0_T1_cc() gen_helper_sbc_cc(cpu_T[0], cpu_T[0], cpu_T[1])
197
#define gen_op_rsbl_T0_T1_cc() gen_helper_sub_cc(cpu_T[0], cpu_T[1], cpu_T[0])
198

  
199 193
#define gen_op_andl_T0_T1() tcg_gen_and_i32(cpu_T[0], cpu_T[0], cpu_T[1])
200 194
#define gen_op_xorl_T0_T1() tcg_gen_xor_i32(cpu_T[0], cpu_T[0], cpu_T[1])
201 195
#define gen_op_orl_T0_T1() tcg_gen_or_i32(cpu_T[0], cpu_T[0], cpu_T[1])
202 196
#define gen_op_notl_T0() tcg_gen_not_i32(cpu_T[0], cpu_T[0])
203 197
#define gen_op_notl_T1() tcg_gen_not_i32(cpu_T[1], cpu_T[1])
204
#define gen_op_logic_T0_cc() gen_logic_CC(cpu_T[0]);
205
#define gen_op_logic_T1_cc() gen_logic_CC(cpu_T[1]);
206 198

  
207 199
#define gen_op_shll_T1_im(im) tcg_gen_shli_i32(cpu_T[1], cpu_T[1], im)
208 200
#define gen_op_shrl_T1_im(im) tcg_gen_shri_i32(cpu_T[1], cpu_T[1], im)
......
338 330
}
339 331

  
340 332
/* Unsigned 32x32->64 multiply.  */
341
static void gen_op_mull_T0_T1(void)
333
static void gen_mull(TCGv a, TCGv b)
342 334
{
343 335
    TCGv_i64 tmp1 = tcg_temp_new_i64();
344 336
    TCGv_i64 tmp2 = tcg_temp_new_i64();
345 337

  
346
    tcg_gen_extu_i32_i64(tmp1, cpu_T[0]);
347
    tcg_gen_extu_i32_i64(tmp2, cpu_T[1]);
338
    tcg_gen_extu_i32_i64(tmp1, a);
339
    tcg_gen_extu_i32_i64(tmp2, b);
348 340
    tcg_gen_mul_i64(tmp1, tmp1, tmp2);
349
    tcg_gen_trunc_i64_i32(cpu_T[0], tmp1);
341
    tcg_gen_trunc_i64_i32(a, tmp1);
350 342
    tcg_gen_shri_i64(tmp1, tmp1, 32);
351
    tcg_gen_trunc_i64_i32(cpu_T[1], tmp1);
343
    tcg_gen_trunc_i64_i32(b, tmp1);
352 344
}
353 345

  
354 346
/* Signed 32x32->64 multiply.  */
......
414 406
}
415 407

  
416 408
/* T0 += T1 + CF.  */
417
static void gen_adc_T0_T1(void)
409
static void gen_adc(TCGv t0, TCGv t1)
418 410
{
419 411
    TCGv tmp;
420
    gen_op_addl_T0_T1();
412
    tcg_gen_add_i32(t0, t0, t1);
421 413
    tmp = load_cpu_field(CF);
422
    tcg_gen_add_i32(cpu_T[0], cpu_T[0], tmp);
414
    tcg_gen_add_i32(t0, t0, tmp);
423 415
    dead_tmp(tmp);
424 416
}
425 417

  
......
444 436
    dead_tmp(tmp);
445 437
}
446 438

  
447
#define gen_sbc_T0_T1() gen_sub_carry(cpu_T[0], cpu_T[0], cpu_T[1])
448
#define gen_rsc_T0_T1() gen_sub_carry(cpu_T[0], cpu_T[1], cpu_T[0])
449

  
450 439
/* T0 &= ~T1.  Clobbers T1.  */
451 440
/* FIXME: Implement bic natively.  */
452 441
static inline void tcg_gen_bic_i32(TCGv dest, TCGv t0, TCGv t1)
......
7064 7053
   Returns zero if the opcode is valid.  */
7065 7054

  
7066 7055
static int
7067
gen_thumb2_data_op(DisasContext *s, int op, int conds, uint32_t shifter_out)
7056
gen_thumb2_data_op(DisasContext *s, int op, int conds, uint32_t shifter_out, TCGv t0, TCGv t1)
7068 7057
{
7069 7058
    int logic_cc;
7070 7059

  
7071 7060
    logic_cc = 0;
7072 7061
    switch (op) {
7073 7062
    case 0: /* and */
7074
        gen_op_andl_T0_T1();
7063
        tcg_gen_and_i32(t0, t0, t1);
7075 7064
        logic_cc = conds;
7076 7065
        break;
7077 7066
    case 1: /* bic */
7078
        gen_op_bicl_T0_T1();
7067
        tcg_gen_bic_i32(t0, t0, t1);
7079 7068
        logic_cc = conds;
7080 7069
        break;
7081 7070
    case 2: /* orr */
7082
        gen_op_orl_T0_T1();
7071
        tcg_gen_or_i32(t0, t0, t1);
7083 7072
        logic_cc = conds;
7084 7073
        break;
7085 7074
    case 3: /* orn */
7086
        gen_op_notl_T1();
7087
        gen_op_orl_T0_T1();
7075
        tcg_gen_not_i32(t1, t1);
7076
        tcg_gen_or_i32(t0, t0, t1);
7088 7077
        logic_cc = conds;
7089 7078
        break;
7090 7079
    case 4: /* eor */
7091
        gen_op_xorl_T0_T1();
7080
        tcg_gen_xor_i32(t0, t0, t1);
7092 7081
        logic_cc = conds;
7093 7082
        break;
7094 7083
    case 8: /* add */
7095 7084
        if (conds)
7096
            gen_op_addl_T0_T1_cc();
7085
            gen_helper_add_cc(t0, t0, t1);
7097 7086
        else
7098
            gen_op_addl_T0_T1();
7087
            tcg_gen_add_i32(t0, t0, t1);
7099 7088
        break;
7100 7089
    case 10: /* adc */
7101 7090
        if (conds)
7102
            gen_op_adcl_T0_T1_cc();
7091
            gen_helper_adc_cc(t0, t0, t1);
7103 7092
        else
7104
            gen_adc_T0_T1();
7093
            gen_adc(t0, t1);
7105 7094
        break;
7106 7095
    case 11: /* sbc */
7107 7096
        if (conds)
7108
            gen_op_sbcl_T0_T1_cc();
7097
            gen_helper_sbc_cc(t0, t0, t1);
7109 7098
        else
7110
            gen_sbc_T0_T1();
7099
            gen_sub_carry(t0, t0, t1);
7111 7100
        break;
7112 7101
    case 13: /* sub */
7113 7102
        if (conds)
7114
            gen_op_subl_T0_T1_cc();
7103
            gen_helper_sub_cc(t0, t0, t1);
7115 7104
        else
7116
            gen_op_subl_T0_T1();
7105
            tcg_gen_sub_i32(t0, t0, t1);
7117 7106
        break;
7118 7107
    case 14: /* rsb */
7119 7108
        if (conds)
7120
            gen_op_rsbl_T0_T1_cc();
7109
            gen_helper_sub_cc(t0, t1, t0);
7121 7110
        else
7122
            gen_op_rsbl_T0_T1();
7111
            tcg_gen_sub_i32(t0, t1, t0);
7123 7112
        break;
7124 7113
    default: /* 5, 6, 7, 9, 12, 15. */
7125 7114
        return 1;
7126 7115
    }
7127 7116
    if (logic_cc) {
7128
        gen_op_logic_T0_cc();
7117
        gen_logic_CC(t0);
7129 7118
        if (shifter_out)
7130
            gen_set_CF_bit31(cpu_T[1]);
7119
            gen_set_CF_bit31(t1);
7131 7120
    }
7132 7121
    return 0;
7133 7122
}
......
7183 7172
               16-bit instructions in case the second half causes an
7184 7173
               prefetch abort.  */
7185 7174
            offset = ((int32_t)insn << 21) >> 9;
7186
            gen_op_movl_T0_im(s->pc + 2 + offset);
7187
            gen_movl_reg_T0(s, 14);
7175
            tcg_gen_movi_i32(cpu_R[14], s->pc + 2 + offset);
7188 7176
            return 0;
7189 7177
        }
7190 7178
        /* Fall through to 32-bit decode.  */
......
7463 7451
        conds = (insn & (1 << 20)) != 0;
7464 7452
        logic_cc = (conds && thumb2_logic_op(op));
7465 7453
        gen_arm_shift_im(cpu_T[1], shiftop, shift, logic_cc);
7466
        if (gen_thumb2_data_op(s, op, conds, 0))
7454
        if (gen_thumb2_data_op(s, op, conds, 0, cpu_T[0], cpu_T[1]))
7467 7455
            goto illegal_op;
7468 7456
        if (rd != 15)
7469 7457
            gen_movl_reg_T0(s, rd);
......
8025 8013
                    gen_movl_T0_reg(s, rn);
8026 8014
                op = (insn >> 21) & 0xf;
8027 8015
                if (gen_thumb2_data_op(s, op, (insn & (1 << 20)) != 0,
8028
                                       shifter_out))
8016
                                       shifter_out, cpu_T[0], cpu_T[1]))
8029 8017
                    goto illegal_op;
8030 8018
                rd = (insn >> 8) & 0xf;
8031 8019
                if (rd != 15) {
......
8173 8161

  
8174 8162
    switch (insn >> 12) {
8175 8163
    case 0: case 1:
8164

  
8176 8165
        rd = insn & 7;
8177 8166
        op = (insn >> 11) & 3;
8178 8167
        if (op == 3) {
8179 8168
            /* add/subtract */
8180 8169
            rn = (insn >> 3) & 7;
8181
            gen_movl_T0_reg(s, rn);
8170
            tmp = load_reg(s, rn);
8182 8171
            if (insn & (1 << 10)) {
8183 8172
                /* immediate */
8184
                gen_op_movl_T1_im((insn >> 6) & 7);
8173
                tmp2 = new_tmp();
8174
                tcg_gen_movi_i32(tmp2, (insn >> 6) & 7);
8185 8175
            } else {
8186 8176
                /* reg */
8187 8177
                rm = (insn >> 6) & 7;
8188
                gen_movl_T1_reg(s, rm);
8178
                tmp2 = load_reg(s, rm);
8189 8179
            }
8190 8180
            if (insn & (1 << 9)) {
8191 8181
                if (s->condexec_mask)
8192
                    gen_op_subl_T0_T1();
8182
                    tcg_gen_sub_i32(tmp, tmp, tmp2);
8193 8183
                else
8194
                    gen_op_subl_T0_T1_cc();
8184
                    gen_helper_sub_cc(tmp, tmp, tmp2);
8195 8185
            } else {
8196 8186
                if (s->condexec_mask)
8197
                    gen_op_addl_T0_T1();
8187
                    tcg_gen_add_i32(tmp, tmp, tmp2);
8198 8188
                else
8199
                    gen_op_addl_T0_T1_cc();
8189
                    gen_helper_add_cc(tmp, tmp, tmp2);
8200 8190
            }
8201
            gen_movl_reg_T0(s, rd);
8191
            dead_tmp(tmp2);
8192
            store_reg(s, rd, tmp);
8202 8193
        } else {
8203 8194
            /* shift immediate */
8204 8195
            rm = (insn >> 3) & 7;
......
8214 8205
        /* arithmetic large immediate */
8215 8206
        op = (insn >> 11) & 3;
8216 8207
        rd = (insn >> 8) & 0x7;
8217
        if (op == 0) {
8218
            gen_op_movl_T0_im(insn & 0xff);
8219
        } else {
8220
            gen_movl_T0_reg(s, rd);
8221
            gen_op_movl_T1_im(insn & 0xff);
8222
        }
8223
        switch (op) {
8224
        case 0: /* mov */
8208
        if (op == 0) { /* mov */
8209
            tmp = new_tmp();
8210
            tcg_gen_movi_i32(tmp, insn & 0xff);
8225 8211
            if (!s->condexec_mask)
8226
                gen_op_logic_T0_cc();
8227
            break;
8228
        case 1: /* cmp */
8229
            gen_op_subl_T0_T1_cc();
8230
            break;
8231
        case 2: /* add */
8232
            if (s->condexec_mask)
8233
                gen_op_addl_T0_T1();
8234
            else
8235
                gen_op_addl_T0_T1_cc();
8236
            break;
8237
        case 3: /* sub */
8238
            if (s->condexec_mask)
8239
                gen_op_subl_T0_T1();
8240
            else
8241
                gen_op_subl_T0_T1_cc();
8242
            break;
8212
                gen_logic_CC(tmp);
8213
            store_reg(s, rd, tmp);
8214
        } else {
8215
            tmp = load_reg(s, rd);
8216
            tmp2 = new_tmp();
8217
            tcg_gen_movi_i32(tmp2, insn & 0xff);
8218
            switch (op) {
8219
            case 1: /* cmp */
8220
                gen_helper_sub_cc(tmp, tmp, tmp2);
8221
                dead_tmp(tmp);
8222
                dead_tmp(tmp2);
8223
                break;
8224
            case 2: /* add */
8225
                if (s->condexec_mask)
8226
                    tcg_gen_add_i32(tmp, tmp, tmp2);
8227
                else
8228
                    gen_helper_add_cc(tmp, tmp, tmp2);
8229
                dead_tmp(tmp2);
8230
                store_reg(s, rd, tmp);
8231
                break;
8232
            case 3: /* sub */
8233
                if (s->condexec_mask)
8234
                    tcg_gen_sub_i32(tmp, tmp, tmp2);
8235
                else
8236
                    gen_helper_sub_cc(tmp, tmp, tmp2);
8237
                dead_tmp(tmp2);
8238
                store_reg(s, rd, tmp);
8239
                break;
8240
            }
8243 8241
        }
8244
        if (op != 1)
8245
            gen_movl_reg_T0(s, rd);
8246 8242
        break;
8247 8243
    case 4:
8248 8244
        if (insn & (1 << 11)) {
......
8264 8260
            op = (insn >> 8) & 3;
8265 8261
            switch (op) {
8266 8262
            case 0: /* add */
8267
                gen_movl_T0_reg(s, rd);
8268
                gen_movl_T1_reg(s, rm);
8269
                gen_op_addl_T0_T1();
8270
                gen_movl_reg_T0(s, rd);
8263
                tmp = load_reg(s, rd);
8264
                tmp2 = load_reg(s, rm);
8265
                tcg_gen_add_i32(tmp, tmp, tmp2);
8266
                dead_tmp(tmp2);
8267
                store_reg(s, rd, tmp);
8271 8268
                break;
8272 8269
            case 1: /* cmp */
8273
                gen_movl_T0_reg(s, rd);
8274
                gen_movl_T1_reg(s, rm);
8275
                gen_op_subl_T0_T1_cc();
8270
                tmp = load_reg(s, rd);
8271
                tmp2 = load_reg(s, rm);
8272
                gen_helper_sub_cc(tmp, tmp, tmp2);
8273
                dead_tmp(tmp2);
8274
                dead_tmp(tmp);
8276 8275
                break;
8277 8276
            case 2: /* mov/cpy */
8278
                gen_movl_T0_reg(s, rm);
8279
                gen_movl_reg_T0(s, rd);
8277
                tmp = load_reg(s, rm);
8278
                store_reg(s, rd, tmp);
8280 8279
                break;
8281 8280
            case 3:/* branch [and link] exchange thumb register */
8282 8281
                tmp = load_reg(s, rm);
......
8306 8305
            val = 0;
8307 8306
        }
8308 8307

  
8309
        if (op == 9) /* neg */
8310
            gen_op_movl_T0_im(0);
8311
        else if (op != 0xf) /* mvn doesn't read its first operand */
8312
            gen_movl_T0_reg(s, rd);
8308
        if (op == 9) { /* neg */
8309
            tmp = new_tmp();
8310
            tcg_gen_movi_i32(tmp, 0);
8311
        } else if (op != 0xf) { /* mvn doesn't read its first operand */
8312
            tmp = load_reg(s, rd);
8313
        } else {
8314
            TCGV_UNUSED(tmp);
8315
        }
8313 8316

  
8314
        gen_movl_T1_reg(s, rm);
8317
        tmp2 = load_reg(s, rm);
8315 8318
        switch (op) {
8316 8319
        case 0x0: /* and */
8317
            gen_op_andl_T0_T1();
8320
            tcg_gen_and_i32(tmp, tmp, tmp2);
8318 8321
            if (!s->condexec_mask)
8319
                gen_op_logic_T0_cc();
8322
                gen_logic_CC(tmp);
8320 8323
            break;
8321 8324
        case 0x1: /* eor */
8322
            gen_op_xorl_T0_T1();
8325
            tcg_gen_xor_i32(tmp, tmp, tmp2);
8323 8326
            if (!s->condexec_mask)
8324
                gen_op_logic_T0_cc();
8327
                gen_logic_CC(tmp);
8325 8328
            break;
8326 8329
        case 0x2: /* lsl */
8327 8330
            if (s->condexec_mask) {
8328
                gen_helper_shl(cpu_T[1], cpu_T[1], cpu_T[0]);
8331
                gen_helper_shl(tmp2, tmp2, tmp);
8329 8332
            } else {
8330
                gen_helper_shl_cc(cpu_T[1], cpu_T[1], cpu_T[0]);
8331
                gen_op_logic_T1_cc();
8333
                gen_helper_shl_cc(tmp2, tmp2, tmp);
8334
                gen_logic_CC(tmp2);
8332 8335
            }
8333 8336
            break;
8334 8337
        case 0x3: /* lsr */
8335 8338
            if (s->condexec_mask) {
8336
                gen_helper_shr(cpu_T[1], cpu_T[1], cpu_T[0]);
8339
                gen_helper_shr(tmp2, tmp2, tmp);
8337 8340
            } else {
8338
                gen_helper_shr_cc(cpu_T[1], cpu_T[1], cpu_T[0]);
8339
                gen_op_logic_T1_cc();
8341
                gen_helper_shr_cc(tmp2, tmp2, tmp);
8342
                gen_logic_CC(tmp2);
8340 8343
            }
8341 8344
            break;
8342 8345
        case 0x4: /* asr */
8343 8346
            if (s->condexec_mask) {
8344
                gen_helper_sar(cpu_T[1], cpu_T[1], cpu_T[0]);
8347
                gen_helper_sar(tmp2, tmp2, tmp);
8345 8348
            } else {
8346
                gen_helper_sar_cc(cpu_T[1], cpu_T[1], cpu_T[0]);
8347
                gen_op_logic_T1_cc();
8349
                gen_helper_sar_cc(tmp2, tmp2, tmp);
8350
                gen_logic_CC(tmp2);
8348 8351
            }
8349 8352
            break;
8350 8353
        case 0x5: /* adc */
8351 8354
            if (s->condexec_mask)
8352
                gen_adc_T0_T1();
8355
                gen_adc(tmp, tmp2);
8353 8356
            else
8354
                gen_op_adcl_T0_T1_cc();
8357
                gen_helper_adc_cc(tmp, tmp, tmp2);
8355 8358
            break;
8356 8359
        case 0x6: /* sbc */
8357 8360
            if (s->condexec_mask)
8358
                gen_sbc_T0_T1();
8361
                gen_sub_carry(tmp, tmp, tmp2);
8359 8362
            else
8360
                gen_op_sbcl_T0_T1_cc();
8363
                gen_helper_sbc_cc(tmp, tmp, tmp2);
8361 8364
            break;
8362 8365
        case 0x7: /* ror */
8363 8366
            if (s->condexec_mask) {
8364
                gen_helper_ror(cpu_T[1], cpu_T[1], cpu_T[0]);
8367
                gen_helper_ror(tmp2, tmp2, tmp);
8365 8368
            } else {
8366
                gen_helper_ror_cc(cpu_T[1], cpu_T[1], cpu_T[0]);
8367
                gen_op_logic_T1_cc();
8369
                gen_helper_ror_cc(tmp2, tmp2, tmp);
8370
                gen_logic_CC(tmp2);
8368 8371
            }
8369 8372
            break;
8370 8373
        case 0x8: /* tst */
8371
            gen_op_andl_T0_T1();
8372
            gen_op_logic_T0_cc();
8374
            tcg_gen_and_i32(tmp, tmp, tmp2);
8375
            gen_logic_CC(tmp);
8373 8376
            rd = 16;
8374 8377
            break;
8375 8378
        case 0x9: /* neg */
8376 8379
            if (s->condexec_mask)
8377
                tcg_gen_neg_i32(cpu_T[0], cpu_T[1]);
8380
                tcg_gen_neg_i32(tmp, tmp2);
8378 8381
            else
8379
                gen_op_subl_T0_T1_cc();
8382
                gen_helper_sub_cc(tmp, tmp, tmp2);
8380 8383
            break;
8381 8384
        case 0xa: /* cmp */
8382
            gen_op_subl_T0_T1_cc();
8385
            gen_helper_sub_cc(tmp, tmp, tmp2);
8383 8386
            rd = 16;
8384 8387
            break;
8385 8388
        case 0xb: /* cmn */
8386
            gen_op_addl_T0_T1_cc();
8389
            gen_helper_add_cc(tmp, tmp, tmp2);
8387 8390
            rd = 16;
8388 8391
            break;
8389 8392
        case 0xc: /* orr */
8390
            gen_op_orl_T0_T1();
8393
            tcg_gen_or_i32(tmp, tmp, tmp2);
8391 8394
            if (!s->condexec_mask)
8392
                gen_op_logic_T0_cc();
8395
                gen_logic_CC(tmp);
8393 8396
            break;
8394 8397
        case 0xd: /* mul */
8395
            gen_op_mull_T0_T1();
8398
            gen_mull(tmp, tmp2);
8396 8399
            if (!s->condexec_mask)
8397
                gen_op_logic_T0_cc();
8400
                gen_logic_CC(tmp);
8398 8401
            break;
8399 8402
        case 0xe: /* bic */
8400
            gen_op_bicl_T0_T1();
8403
            tcg_gen_bic_i32(tmp, tmp, tmp2);
8401 8404
            if (!s->condexec_mask)
8402
                gen_op_logic_T0_cc();
8405
                gen_logic_CC(tmp);
8403 8406
            break;
8404 8407
        case 0xf: /* mvn */
8405
            gen_op_notl_T1();
8408
            tcg_gen_not_i32(tmp2, tmp2);
8406 8409
            if (!s->condexec_mask)
8407
                gen_op_logic_T1_cc();
8410
                gen_logic_CC(tmp2);
8408 8411
            val = 1;
8409 8412
            rm = rd;
8410 8413
            break;
8411 8414
        }
8412 8415
        if (rd != 16) {
8413
            if (val)
8414
                gen_movl_reg_T1(s, rm);
8415
            else
8416
                gen_movl_reg_T0(s, rd);
8416
            if (val) {
8417
                store_reg(s, rm, tmp2);
8418
                if (op != 0xf)
8419
                    dead_tmp(tmp);
8420
            } else {
8421
                store_reg(s, rd, tmp);
8422
                dead_tmp(tmp2);
8423
            }
8424
        } else {
8425
            dead_tmp(tmp);
8426
            dead_tmp(tmp2);
8417 8427
        }
8418 8428
        break;
8419 8429

  

Also available in: Unified diff