Revision 6a18ae2d

b/osdep.h
70 70
#define inline always_inline
71 71
#endif
72 72

  
73
#ifdef __i386__
74
#define REGPARM __attribute((regparm(3)))
75
#else
76
#define REGPARM
77
#endif
78

  
79 73
#define qemu_printf printf
80 74

  
81 75
int qemu_daemon(int nochdir, int noclose);
b/softmmu_defs.h
9 9
#ifndef SOFTMMU_DEFS_H
10 10
#define SOFTMMU_DEFS_H
11 11

  
12
uint8_t REGPARM __ldb_mmu(target_ulong addr, int mmu_idx);
13
void REGPARM __stb_mmu(target_ulong addr, uint8_t val, int mmu_idx);
14
uint16_t REGPARM __ldw_mmu(target_ulong addr, int mmu_idx);
15
void REGPARM __stw_mmu(target_ulong addr, uint16_t val, int mmu_idx);
16
uint32_t REGPARM __ldl_mmu(target_ulong addr, int mmu_idx);
17
void REGPARM __stl_mmu(target_ulong addr, uint32_t val, int mmu_idx);
18
uint64_t REGPARM __ldq_mmu(target_ulong addr, int mmu_idx);
19
void REGPARM __stq_mmu(target_ulong addr, uint64_t val, int mmu_idx);
12
uint8_t __ldb_mmu(target_ulong addr, int mmu_idx);
13
void __stb_mmu(target_ulong addr, uint8_t val, int mmu_idx);
14
uint16_t __ldw_mmu(target_ulong addr, int mmu_idx);
15
void __stw_mmu(target_ulong addr, uint16_t val, int mmu_idx);
16
uint32_t __ldl_mmu(target_ulong addr, int mmu_idx);
17
void __stl_mmu(target_ulong addr, uint32_t val, int mmu_idx);
18
uint64_t __ldq_mmu(target_ulong addr, int mmu_idx);
19
void __stq_mmu(target_ulong addr, uint64_t val, int mmu_idx);
20 20

  
21
uint8_t REGPARM __ldb_cmmu(target_ulong addr, int mmu_idx);
22
void REGPARM __stb_cmmu(target_ulong addr, uint8_t val, int mmu_idx);
23
uint16_t REGPARM __ldw_cmmu(target_ulong addr, int mmu_idx);
24
void REGPARM __stw_cmmu(target_ulong addr, uint16_t val, int mmu_idx);
25
uint32_t REGPARM __ldl_cmmu(target_ulong addr, int mmu_idx);
26
void REGPARM __stl_cmmu(target_ulong addr, uint32_t val, int mmu_idx);
27
uint64_t REGPARM __ldq_cmmu(target_ulong addr, int mmu_idx);
28
void REGPARM __stq_cmmu(target_ulong addr, uint64_t val, int mmu_idx);
21
uint8_t __ldb_cmmu(target_ulong addr, int mmu_idx);
22
void __stb_cmmu(target_ulong addr, uint8_t val, int mmu_idx);
23
uint16_t __ldw_cmmu(target_ulong addr, int mmu_idx);
24
void __stw_cmmu(target_ulong addr, uint16_t val, int mmu_idx);
25
uint32_t __ldl_cmmu(target_ulong addr, int mmu_idx);
26
void __stl_cmmu(target_ulong addr, uint32_t val, int mmu_idx);
27
uint64_t __ldq_cmmu(target_ulong addr, int mmu_idx);
28
void __stq_cmmu(target_ulong addr, uint64_t val, int mmu_idx);
29 29

  
30 30
#endif
b/softmmu_template.h
89 89
}
90 90

  
91 91
/* handle all cases except unaligned access which span two pages */
92
DATA_TYPE REGPARM glue(glue(__ld, SUFFIX), MMUSUFFIX)(target_ulong addr,
93
                                                      int mmu_idx)
92
DATA_TYPE glue(glue(__ld, SUFFIX), MMUSUFFIX)(target_ulong addr, int mmu_idx)
94 93
{
95 94
    DATA_TYPE res;
96 95
    int index;
......
232 231
#endif /* SHIFT > 2 */
233 232
}
234 233

  
235
void REGPARM glue(glue(__st, SUFFIX), MMUSUFFIX)(target_ulong addr,
236
                                                 DATA_TYPE val,
237
                                                 int mmu_idx)
234
void glue(glue(__st, SUFFIX), MMUSUFFIX)(target_ulong addr, DATA_TYPE val,
235
                                         int mmu_idx)
238 236
{
239 237
    target_phys_addr_t ioaddr;
240 238
    unsigned long addend;
b/tcg/i386/tcg-target.c
116 116
        return 6;
117 117
    }
118 118

  
119
    flags &= TCG_CALL_TYPE_MASK;
120
    switch(flags) {
121
    case TCG_CALL_TYPE_STD:
122
        return 0;
123
    case TCG_CALL_TYPE_REGPARM_1:
124
    case TCG_CALL_TYPE_REGPARM_2:
125
    case TCG_CALL_TYPE_REGPARM:
126
        return flags - TCG_CALL_TYPE_REGPARM_1 + 1;
127
    default:
128
        tcg_abort();
129
    }
119
    return 0;
130 120
}
131 121

  
132 122
/* parse target specific constraints */
......
1148 1138
    int data_reg, data_reg2 = 0;
1149 1139
    int addrlo_idx;
1150 1140
#if defined(CONFIG_SOFTMMU)
1151
    int mem_index, s_bits, arg_idx;
1141
    int mem_index, s_bits;
1142
#if TCG_TARGET_REG_BITS == 64
1143
    int arg_idx;
1144
#else
1145
    int stack_adjust;
1146
#endif
1152 1147
    uint8_t *label_ptr[3];
1153 1148
#endif
1154 1149

  
......
1184 1179
    }
1185 1180

  
1186 1181
    /* XXX: move that code at the end of the TB */
1182
#if TCG_TARGET_REG_BITS == 32
1183
    tcg_out_pushi(s, mem_index);
1184
    stack_adjust = 4;
1185
    if (TARGET_LONG_BITS == 64) {
1186
        tcg_out_push(s, args[addrlo_idx + 1]);
1187
        stack_adjust += 4;
1188
    }
1189
    tcg_out_push(s, args[addrlo_idx]);
1190
    stack_adjust += 4;
1191
#else
1187 1192
    /* The first argument is already loaded with addrlo.  */
1188 1193
    arg_idx = 1;
1189
    if (TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 64) {
1190
        tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[arg_idx++],
1191
                    args[addrlo_idx + 1]);
1192
    }
1193 1194
    tcg_out_movi(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[arg_idx],
1194 1195
                 mem_index);
1196
#endif
1197

  
1195 1198
    tcg_out_calli(s, (tcg_target_long)qemu_ld_helpers[s_bits]);
1196 1199

  
1200
#if TCG_TARGET_REG_BITS == 32
1201
    if (stack_adjust == (TCG_TARGET_REG_BITS / 8)) {
1202
        /* Pop and discard.  This is 2 bytes smaller than the add.  */
1203
        tcg_out_pop(s, TCG_REG_ECX);
1204
    } else if (stack_adjust != 0) {
1205
        tcg_out_addi(s, TCG_REG_CALL_STACK, stack_adjust);
1206
    }
1207
#endif
1208

  
1197 1209
    switch(opc) {
1198 1210
    case 0 | 4:
1199 1211
        tcg_out_ext8s(s, data_reg, TCG_REG_EAX, P_REXW);
......
1359 1371
    }
1360 1372

  
1361 1373
    /* XXX: move that code at the end of the TB */
1362
    if (TCG_TARGET_REG_BITS == 64) {
1363
        tcg_out_mov(s, (opc == 3 ? TCG_TYPE_I64 : TCG_TYPE_I32),
1364
                    TCG_REG_RSI, data_reg);
1365
        tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RDX, mem_index);
1366
        stack_adjust = 0;
1367
    } else if (TARGET_LONG_BITS == 32) {
1368
        tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_EDX, data_reg);
1369
        if (opc == 3) {
1370
            tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_ECX, data_reg2);
1371
            tcg_out_pushi(s, mem_index);
1372
            stack_adjust = 4;
1373
        } else {
1374
            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_ECX, mem_index);
1375
            stack_adjust = 0;
1376
        }
1377
    } else {
1378
        if (opc == 3) {
1379
            tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_EDX, args[addrlo_idx + 1]);
1380
            tcg_out_pushi(s, mem_index);
1381
            tcg_out_push(s, data_reg2);
1382
            tcg_out_push(s, data_reg);
1383
            stack_adjust = 12;
1384
        } else {
1385
            tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_EDX, args[addrlo_idx + 1]);
1386
            switch(opc) {
1387
            case 0:
1388
                tcg_out_ext8u(s, TCG_REG_ECX, data_reg);
1389
                break;
1390
            case 1:
1391
                tcg_out_ext16u(s, TCG_REG_ECX, data_reg);
1392
                break;
1393
            case 2:
1394
                tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_ECX, data_reg);
1395
                break;
1396
            }
1397
            tcg_out_pushi(s, mem_index);
1398
            stack_adjust = 4;
1399
        }
1374
#if TCG_TARGET_REG_BITS == 32
1375
    tcg_out_pushi(s, mem_index);
1376
    stack_adjust = 4;
1377
    if (opc == 3) {
1378
        tcg_out_push(s, data_reg2);
1379
        stack_adjust += 4;
1380
    }
1381
    tcg_out_push(s, data_reg);
1382
    stack_adjust += 4;
1383
    if (TARGET_LONG_BITS == 64) {
1384
        tcg_out_push(s, args[addrlo_idx + 1]);
1385
        stack_adjust += 4;
1400 1386
    }
1387
    tcg_out_push(s, args[addrlo_idx]);
1388
    stack_adjust += 4;
1389
#else
1390
    tcg_out_mov(s, (opc == 3 ? TCG_TYPE_I64 : TCG_TYPE_I32),
1391
                TCG_REG_RSI, data_reg);
1392
    tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RDX, mem_index);
1393
    stack_adjust = 0;
1394
#endif
1401 1395

  
1402 1396
    tcg_out_calli(s, (tcg_target_long)qemu_st_helpers[s_bits]);
1403 1397

  
......
1962 1956
        tcg_out_push(s, tcg_target_callee_save_regs[i]);
1963 1957
    }
1964 1958

  
1965
    tcg_out_addi(s, TCG_REG_ESP, -stack_addend);
1966

  
1959
#if TCG_TARGET_REG_BITS == 32
1960
    tcg_out_ld(s, TCG_TYPE_PTR, TCG_AREG0, TCG_REG_ESP,
1961
               (ARRAY_SIZE(tcg_target_callee_save_regs) + 1) * 4);
1962
    tcg_out_ld(s, TCG_TYPE_PTR, tcg_target_call_iarg_regs[1], TCG_REG_ESP,
1963
               (ARRAY_SIZE(tcg_target_callee_save_regs) + 2) * 4);
1964
#else
1967 1965
    tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
1966
#endif
1967
    tcg_out_addi(s, TCG_REG_ESP, -stack_addend);
1968 1968

  
1969 1969
    /* jmp *tb.  */
1970 1970
    tcg_out_modrm(s, OPC_GRP5, EXT5_JMPN_Ev, tcg_target_call_iarg_regs[1]);
b/tcg/ppc/tcg-target.h
98 98
#define TCG_TARGET_HAS_GUEST_BASE
99 99

  
100 100
#define tcg_qemu_tb_exec(env, tb_ptr) \
101
    ((long REGPARM __attribute__ ((longcall)) \
101
    ((long __attribute__ ((longcall)) \
102 102
      (*)(void *, void *))code_gen_prologue)(env, tb_ptr)
b/tcg/tcg.c
594 594
void tcg_gen_callN(TCGContext *s, TCGv_ptr func, unsigned int flags,
595 595
                   int sizemask, TCGArg ret, int nargs, TCGArg *args)
596 596
{
597
#if defined(TCG_TARGET_I386) && TCG_TARGET_REG_BITS < 64
598
    int call_type;
599
#endif
600 597
    int i;
601 598
    int real_args;
602 599
    int nb_rets;
......
621 618

  
622 619
    *gen_opc_ptr++ = INDEX_op_call;
623 620
    nparam = gen_opparam_ptr++;
624
#if defined(TCG_TARGET_I386) && TCG_TARGET_REG_BITS < 64
625
    call_type = (flags & TCG_CALL_TYPE_MASK);
626
#endif
627 621
    if (ret != TCG_CALL_DUMMY_ARG) {
628 622
#if TCG_TARGET_REG_BITS < 64
629 623
        if (sizemask & 1) {
......
649 643
#if TCG_TARGET_REG_BITS < 64
650 644
        int is_64bit = sizemask & (1 << (i+1)*2);
651 645
        if (is_64bit) {
652
#ifdef TCG_TARGET_I386
653
            /* REGPARM case: if the third parameter is 64 bit, it is
654
               allocated on the stack */
655
            if (i == 2 && call_type == TCG_CALL_TYPE_REGPARM) {
656
                call_type = TCG_CALL_TYPE_REGPARM_2;
657
                flags = (flags & ~TCG_CALL_TYPE_MASK) | call_type;
658
            }
659
#endif
660 646
#ifdef TCG_TARGET_CALL_ALIGN_ARGS
661 647
            /* some targets want aligned 64 bit args */
662 648
            if (real_args & 1) {
b/tcg/tcg.h
252 252
#define TCGV_UNUSED_I64(x) x = MAKE_TCGV_I64(-1)
253 253

  
254 254
/* call flags */
255
#define TCG_CALL_TYPE_MASK      0x000f
256
#define TCG_CALL_TYPE_STD       0x0000 /* standard C call */
257
#define TCG_CALL_TYPE_REGPARM_1 0x0001 /* i386 style regparm call (1 reg) */
258
#define TCG_CALL_TYPE_REGPARM_2 0x0002 /* i386 style regparm call (2 regs) */
259
#define TCG_CALL_TYPE_REGPARM   0x0003 /* i386 style regparm call (3 regs) */
260 255
/* A pure function only reads its arguments and TCG global variables
261 256
   and cannot raise exceptions. Hence a call to a pure function can be
262 257
   safely suppressed if the return value is not used. */
......
589 584
/* TCG targets may use a different definition of tcg_qemu_tb_exec. */
590 585
#if !defined(tcg_qemu_tb_exec)
591 586
# define tcg_qemu_tb_exec(env, tb_ptr) \
592
    ((tcg_target_ulong REGPARM (*)(void *, void *))code_gen_prologue)(env, tb_ptr)
587
    ((tcg_target_ulong (*)(void *, void *))code_gen_prologue)(env, tb_ptr)
593 588
#endif

Also available in: Unified diff