Revision b03cce8e tcg/x86_64/tcg-target.c

b/tcg/x86_64/tcg-target.c
73 73
    TCG_REG_RDX 
74 74
};
75 75

  
76
static uint8_t *tb_ret_addr;
77

  
76 78
static void patch_reloc(uint8_t *code_ptr, int type, 
77 79
                        tcg_target_long value, tcg_target_long addend)
78 80
{
......
841 843
    switch(opc) {
842 844
    case INDEX_op_exit_tb:
843 845
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RAX, args[0]);
844
        tcg_out8(s, 0xc3); /* ret */
846
        tcg_out8(s, 0xe9); /* jmp tb_ret_addr */
847
        tcg_out32(s, tb_ret_addr - s->code_ptr - 4);
845 848
        break;
846 849
    case INDEX_op_goto_tb:
847 850
        if (s->tb_jmp_offset) {
......
1129 1132
    }
1130 1133
}
1131 1134

  
1135
static int tcg_target_callee_save_regs[] = {
1136
    TCG_REG_R10,
1137
    TCG_REG_R11,
1138
    TCG_REG_RBP,
1139
    TCG_REG_RBX,
1140
    TCG_REG_R12,
1141
    TCG_REG_R13,
1142
    /*    TCG_REG_R14, */ /* currently used for the global env, so no
1143
                             need to save */
1144
    TCG_REG_R15,
1145
};
1146

  
1147
static inline void tcg_out_push(TCGContext *s, int reg)
1148
{
1149
    tcg_out_opc(s, (0x50 + (reg & 7)), 0, reg, 0);
1150
}
1151

  
1152
static inline void tcg_out_pop(TCGContext *s, int reg)
1153
{
1154
    tcg_out_opc(s, (0x58 + (reg & 7)), 0, reg, 0);
1155
}
1156

  
1157
/* Generate global QEMU prologue and epilogue code */
1158
void tcg_target_qemu_prologue(TCGContext *s)
1159
{
1160
    int i, frame_size, push_size, stack_addend;
1161

  
1162
    /* TB prologue */
1163
    /* save all callee saved registers */
1164
    for(i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1165
        tcg_out_push(s, tcg_target_callee_save_regs[i]);
1166

  
1167
    }
1168
    /* reserve some stack space */
1169
    push_size = 8 + ARRAY_SIZE(tcg_target_callee_save_regs) * 8;
1170
    frame_size = push_size + TCG_STATIC_CALL_ARGS_SIZE;
1171
    frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) & 
1172
        ~(TCG_TARGET_STACK_ALIGN - 1);
1173
    stack_addend = frame_size - push_size;
1174
    tcg_out_addi(s, TCG_REG_RSP, -stack_addend);
1175

  
1176
    tcg_out_modrm(s, 0xff, 4, TCG_REG_RDI); /* jmp *%rdi */
1177
    
1178
    /* TB epilogue */
1179
    tb_ret_addr = s->code_ptr;
1180
    tcg_out_addi(s, TCG_REG_RSP, stack_addend);
1181
    for(i = ARRAY_SIZE(tcg_target_callee_save_regs) - 1; i >= 0; i--) {
1182
        tcg_out_pop(s, tcg_target_callee_save_regs[i]);
1183
    }
1184
    tcg_out8(s, 0xc3); /* ret */
1185
}
1186

  
1132 1187
static const TCGTargetOpDef x86_64_op_defs[] = {
1133 1188
    { INDEX_op_exit_tb, { } },
1134 1189
    { INDEX_op_goto_tb, { } },
......
1212 1267

  
1213 1268
void tcg_target_init(TCGContext *s)
1214 1269
{
1270
    /* fail safe */
1271
    if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
1272
        tcg_abort();
1273

  
1215 1274
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffff);
1216 1275
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffff);
1217 1276
    tcg_regset_set32(tcg_target_call_clobber_regs, 0,
......
1227 1286
    
1228 1287
    tcg_regset_clear(s->reserved_regs);
1229 1288
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_RSP);
1230
    /* XXX: will be suppresed when proper global TB entry code will be
1231
       generated */
1232
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_RBX);
1233
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_RBP);
1234 1289
    
1235 1290
    tcg_add_target_add_op_defs(x86_64_op_defs);
1236 1291
}

Also available in: Unified diff