Revision cad3a37d target-i386/translate.c

b/target-i386/translate.c
498 498
#endif
499 499
};
500 500

  
501
#define DEF_ARITHC(SUFFIX)\
502
    {\
503
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
504
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
505
    },\
506
    {\
507
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
508
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
509
    },\
510
    {\
511
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
512
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
513
    },\
514
    {\
515
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
516
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
517
    },
518

  
519
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
520
    DEF_ARITHC( )
521
};
522

  
523
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
524
    DEF_ARITHC(_raw)
525
#ifndef CONFIG_USER_ONLY
526
    DEF_ARITHC(_kernel)
527
    DEF_ARITHC(_user)
528
#endif
529
};
530

  
531
static const int cc_op_arithb[8] = {
532
    CC_OP_ADDB,
533
    CC_OP_LOGICB,
534
    CC_OP_ADDB,
535
    CC_OP_SUBB,
536
    CC_OP_LOGICB,
537
    CC_OP_SUBB,
538
    CC_OP_LOGICB,
539
    CC_OP_SUBB,
540
};
541

  
542
#define DEF_CMPXCHG(SUFFIX)\
543
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
544
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
545
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
546
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
547

  
548
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
549
    DEF_CMPXCHG( )
550
};
551

  
552
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
553
    DEF_CMPXCHG(_raw)
554
#ifndef CONFIG_USER_ONLY
555
    DEF_CMPXCHG(_kernel)
556
    DEF_CMPXCHG(_user)
557
#endif
558
};
559

  
560 501
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
561 502
    [0] = {
562 503
        gen_op_btw_T0_T1_cc,
......
1257 1198
    helper_fdiv_STN_ST0,
1258 1199
};
1259 1200

  
1201
/* compute eflags.C to reg */
1202
static void gen_compute_eflags_c(TCGv reg)
1203
{
1204
#if TCG_TARGET_REG_BITS == 32
1205
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1206
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1207
                     (long)cc_table + offsetof(CCTable, compute_c));
1208
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1209
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1210
                 1, &cpu_tmp2_i32, 0, NULL);
1211
#else
1212
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1213
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1214
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1215
                     (long)cc_table + offsetof(CCTable, compute_c));
1216
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1217
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1218
                 1, &cpu_tmp2_i32, 0, NULL);
1219
#endif
1220
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1221
}
1222

  
1223
/* compute all eflags to cc_src */
1224
static void gen_compute_eflags(TCGv reg)
1225
{
1226
#if TCG_TARGET_REG_BITS == 32
1227
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1228
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1229
                     (long)cc_table + offsetof(CCTable, compute_all));
1230
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1231
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1232
                 1, &cpu_tmp2_i32, 0, NULL);
1233
#else
1234
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1235
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1236
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1237
                     (long)cc_table + offsetof(CCTable, compute_all));
1238
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1239
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1240
                 1, &cpu_tmp2_i32, 0, NULL);
1241
#endif
1242
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1243
}
1244

  
1260 1245
/* if d == OR_TMP0, it means memory operand (address in A0) */
1261 1246
static void gen_op(DisasContext *s1, int op, int ot, int d)
1262 1247
{
1263
    GenOpFunc *gen_update_cc;
1264

  
1265 1248
    if (d != OR_TMP0) {
1266 1249
        gen_op_mov_TN_reg(ot, 0, d);
1267 1250
    } else {
......
1269 1252
    }
1270 1253
    switch(op) {
1271 1254
    case OP_ADCL:
1255
        if (s1->cc_op != CC_OP_DYNAMIC)
1256
            gen_op_set_cc_op(s1->cc_op);
1257
        gen_compute_eflags_c(cpu_tmp4);
1258
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1259
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1260
        if (d != OR_TMP0)
1261
            gen_op_mov_reg_T0(ot, d);
1262
        else
1263
            gen_op_st_T0_A0(ot + s1->mem_index);
1264
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1265
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1266
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1267
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1268
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1269
        s1->cc_op = CC_OP_DYNAMIC;
1270
        break;
1272 1271
    case OP_SBBL:
1273 1272
        if (s1->cc_op != CC_OP_DYNAMIC)
1274 1273
            gen_op_set_cc_op(s1->cc_op);
1275
        if (d != OR_TMP0) {
1276
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1274
        gen_compute_eflags_c(cpu_tmp4);
1275
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1276
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1277
        if (d != OR_TMP0)
1277 1278
            gen_op_mov_reg_T0(ot, d);
1278
        } else {
1279
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1280
        }
1279
        else
1280
            gen_op_st_T0_A0(ot + s1->mem_index);
1281
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1282
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1283
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1284
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1285
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1281 1286
        s1->cc_op = CC_OP_DYNAMIC;
1282
        goto the_end;
1287
        break;
1283 1288
    case OP_ADDL:
1284 1289
        gen_op_addl_T0_T1();
1290
        if (d != OR_TMP0)
1291
            gen_op_mov_reg_T0(ot, d);
1292
        else
1293
            gen_op_st_T0_A0(ot + s1->mem_index);
1294
        gen_op_update2_cc();
1285 1295
        s1->cc_op = CC_OP_ADDB + ot;
1286
        gen_update_cc = gen_op_update2_cc;
1287 1296
        break;
1288 1297
    case OP_SUBL:
1289 1298
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1299
        if (d != OR_TMP0)
1300
            gen_op_mov_reg_T0(ot, d);
1301
        else
1302
            gen_op_st_T0_A0(ot + s1->mem_index);
1303
        gen_op_update2_cc();
1290 1304
        s1->cc_op = CC_OP_SUBB + ot;
1291
        gen_update_cc = gen_op_update2_cc;
1292 1305
        break;
1293 1306
    default:
1294 1307
    case OP_ANDL:
1295 1308
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1309
        if (d != OR_TMP0)
1310
            gen_op_mov_reg_T0(ot, d);
1311
        else
1312
            gen_op_st_T0_A0(ot + s1->mem_index);
1313
        gen_op_update1_cc();
1296 1314
        s1->cc_op = CC_OP_LOGICB + ot;
1297
        gen_update_cc = gen_op_update1_cc;
1298 1315
        break;
1299 1316
    case OP_ORL:
1300 1317
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1318
        if (d != OR_TMP0)
1319
            gen_op_mov_reg_T0(ot, d);
1320
        else
1321
            gen_op_st_T0_A0(ot + s1->mem_index);
1322
        gen_op_update1_cc();
1301 1323
        s1->cc_op = CC_OP_LOGICB + ot;
1302
        gen_update_cc = gen_op_update1_cc;
1303 1324
        break;
1304 1325
    case OP_XORL:
1305 1326
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1327
        if (d != OR_TMP0)
1328
            gen_op_mov_reg_T0(ot, d);
1329
        else
1330
            gen_op_st_T0_A0(ot + s1->mem_index);
1331
        gen_op_update1_cc();
1306 1332
        s1->cc_op = CC_OP_LOGICB + ot;
1307
        gen_update_cc = gen_op_update1_cc;
1308 1333
        break;
1309 1334
    case OP_CMPL:
1310 1335
        gen_op_cmpl_T0_T1_cc();
1311 1336
        s1->cc_op = CC_OP_SUBB + ot;
1312
        gen_update_cc = NULL;
1313 1337
        break;
1314 1338
    }
1315
    if (op != OP_CMPL) {
1316
        if (d != OR_TMP0)
1317
            gen_op_mov_reg_T0(ot, d);
1318
        else
1319
            gen_op_st_T0_A0(ot + s1->mem_index);
1320
    }
1321
    /* the flags update must happen after the memory write (precise
1322
       exception support) */
1323
    if (gen_update_cc)
1324
        gen_update_cc();
1325
 the_end: ;
1326
}
1327

  
1328
/* compute eflags.C to reg */
1329
static void gen_compute_eflags_c(TCGv reg)
1330
{
1331
#if TCG_TARGET_REG_BITS == 32
1332
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1333
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1334
                     (long)cc_table + offsetof(CCTable, compute_c));
1335
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1336
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1337
                 1, &reg, 0, NULL);
1338
#else
1339
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1340
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1341
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1342
                     (long)cc_table + offsetof(CCTable, compute_c));
1343
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1344
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1345
                 1, &cpu_tmp2_i32, 0, NULL);
1346
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1347
#endif
1348
}
1349

  
1350
/* compute all eflags to cc_src */
1351
static void gen_compute_eflags(TCGv reg)
1352
{
1353
#if TCG_TARGET_REG_BITS == 32
1354
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1355
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1356
                     (long)cc_table + offsetof(CCTable, compute_all));
1357
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1358
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1359
                 1, &reg, 0, NULL);
1360
#else
1361
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1362
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1363
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1364
                     (long)cc_table + offsetof(CCTable, compute_all));
1365
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1366
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1367
                 1, &cpu_tmp2_i32, 0, NULL);
1368
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1369
#endif
1370 1339
}
1371 1340

  
1372 1341
/* if d == OR_TMP0, it means memory operand (address in A0) */
......
1393 1362
    gen_compute_eflags_c(cpu_cc_src);
1394 1363
}
1395 1364

  
1365
static void gen_extu(int ot, TCGv reg)
1366
{
1367
    switch(ot) {
1368
    case OT_BYTE:
1369
        tcg_gen_ext8u_tl(reg, reg);
1370
        break;
1371
    case OT_WORD:
1372
        tcg_gen_ext16u_tl(reg, reg);
1373
        break;
1374
    case OT_LONG:
1375
        tcg_gen_ext32u_tl(reg, reg);
1376
        break;
1377
    default:
1378
        break;
1379
    }
1380
}
1381

  
1396 1382
/* XXX: add faster immediate case */
1397 1383
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1, 
1398 1384
                            int is_right, int is_arith)
......
1433 1419
            tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1434 1420
            tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1435 1421
        } else {
1436
            switch(ot) {
1437
            case OT_BYTE:
1438
                tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
1439
                break;
1440
            case OT_WORD:
1441
                tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
1442
                break;
1443
            case OT_LONG:
1444
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1445
                break;
1446
            default:
1447
                break;
1448
            }
1422
            gen_extu(ot, cpu_T[0]);
1449 1423
            tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1450 1424
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1451 1425
        }
......
1516 1490
    else
1517 1491
        tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1518 1492
    
1519
    switch(ot) {
1520
    case OT_BYTE:
1521
        tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
1522
        break;
1523
    case OT_WORD:
1524
        tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
1525
        break;
1526
    case OT_LONG:
1527
        tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1528
        break;
1529
    default:
1530
        break;
1531
    }
1493
    gen_extu(ot, cpu_T[0]);
1532 1494
    tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1533 1495

  
1534 1496
    data_bits = 8 << ot;
......
4270 4232
        break;
4271 4233
    case 0x1b0:
4272 4234
    case 0x1b1: /* cmpxchg Ev, Gv */
4273
        if ((b & 1) == 0)
4274
            ot = OT_BYTE;
4275
        else
4276
            ot = dflag + OT_WORD;
4277
        modrm = ldub_code(s->pc++);
4278
        reg = ((modrm >> 3) & 7) | rex_r;
4279
        mod = (modrm >> 6) & 3;
4280
        gen_op_mov_TN_reg(ot, 1, reg);
4281
        if (mod == 3) {
4282
            rm = (modrm & 7) | REX_B(s);
4283
            gen_op_mov_TN_reg(ot, 0, rm);
4284
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
4285
            gen_op_mov_reg_T0(ot, rm);
4286
        } else {
4287
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4288
            gen_op_ld_T0_A0(ot + s->mem_index);
4289
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
4235
        {
4236
            int label1;
4237

  
4238
            if ((b & 1) == 0)
4239
                ot = OT_BYTE;
4240
            else
4241
                ot = dflag + OT_WORD;
4242
            modrm = ldub_code(s->pc++);
4243
            reg = ((modrm >> 3) & 7) | rex_r;
4244
            mod = (modrm >> 6) & 3;
4245
            gen_op_mov_TN_reg(ot, 1, reg);
4246
            if (mod == 3) {
4247
                rm = (modrm & 7) | REX_B(s);
4248
                gen_op_mov_TN_reg(ot, 0, rm);
4249
            } else {
4250
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4251
                gen_op_ld_T0_A0(ot + s->mem_index);
4252
                rm = 0; /* avoid warning */
4253
            }
4254
            label1 = gen_new_label();
4255
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4256
            tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4257
            gen_extu(ot, cpu_T3);
4258
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4259
            tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4260
            gen_op_mov_reg_T0(ot, R_EAX);
4261
            gen_set_label(label1);
4262
            if (mod == 3) {
4263
                gen_op_mov_reg_T1(ot, rm);
4264
            } else {
4265
                gen_op_st_T1_A0(ot + s->mem_index);
4266
            }
4267
            tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4268
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4269
            s->cc_op = CC_OP_SUBB + ot;
4290 4270
        }
4291
        s->cc_op = CC_OP_SUBB + ot;
4292 4271
        break;
4293 4272
    case 0x1c7: /* cmpxchg8b */
4294 4273
        modrm = ldub_code(s->pc++);

Also available in: Unified diff