Statistics
| Branch: | Revision:

root / tcg / x86_64 / tcg-target.c @ d2604285

History | View | Annotate | Download (37.9 kB)

1
/*
2
 * Tiny Code Generator for QEMU
3
 *
4
 * Copyright (c) 2008 Fabrice Bellard
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22
 * THE SOFTWARE.
23
 */
24

    
25
#ifndef NDEBUG
26
static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27
    "%rax",
28
    "%rcx",
29
    "%rdx",
30
    "%rbx",
31
    "%rsp",
32
    "%rbp",
33
    "%rsi",
34
    "%rdi",
35
    "%r8",
36
    "%r9",
37
    "%r10",
38
    "%r11",
39
    "%r12",
40
    "%r13",
41
    "%r14",
42
    "%r15",
43
};
44
#endif
45

    
46
static const int tcg_target_reg_alloc_order[] = {
47
    TCG_REG_RDI,
48
    TCG_REG_RSI,
49
    TCG_REG_RDX,
50
    TCG_REG_RCX,
51
    TCG_REG_R8,
52
    TCG_REG_R9,
53
    TCG_REG_RAX,
54
    TCG_REG_R10,
55
    TCG_REG_R11,
56

    
57
    TCG_REG_RBP,
58
    TCG_REG_RBX,
59
    TCG_REG_R12,
60
    TCG_REG_R13,
61
    TCG_REG_R14,
62
    TCG_REG_R15,
63
};
64

    
65
static const int tcg_target_call_iarg_regs[6] = {
66
    TCG_REG_RDI,
67
    TCG_REG_RSI,
68
    TCG_REG_RDX,
69
    TCG_REG_RCX,
70
    TCG_REG_R8,
71
    TCG_REG_R9,
72
};
73

    
74
static const int tcg_target_call_oarg_regs[2] = {
75
    TCG_REG_RAX, 
76
    TCG_REG_RDX 
77
};
78

    
79
static uint8_t *tb_ret_addr;
80

    
81
static void patch_reloc(uint8_t *code_ptr, int type, 
82
                        tcg_target_long value, tcg_target_long addend)
83
{
84
    value += addend;
85
    switch(type) {
86
    case R_X86_64_32:
87
        if (value != (uint32_t)value)
88
            tcg_abort();
89
        *(uint32_t *)code_ptr = value;
90
        break;
91
    case R_X86_64_32S:
92
        if (value != (int32_t)value)
93
            tcg_abort();
94
        *(uint32_t *)code_ptr = value;
95
        break;
96
    case R_386_PC32:
97
        value -= (long)code_ptr;
98
        if (value != (int32_t)value)
99
            tcg_abort();
100
        *(uint32_t *)code_ptr = value;
101
        break;
102
    default:
103
        tcg_abort();
104
    }
105
}
106

    
107
/* maximum number of register used for input function arguments */
108
static inline int tcg_target_get_call_iarg_regs_count(int flags)
109
{
110
    return 6;
111
}
112

    
113
/* parse target specific constraints */
114
static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
115
{
116
    const char *ct_str;
117

    
118
    ct_str = *pct_str;
119
    switch(ct_str[0]) {
120
    case 'a':
121
        ct->ct |= TCG_CT_REG;
122
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RAX);
123
        break;
124
    case 'b':
125
        ct->ct |= TCG_CT_REG;
126
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RBX);
127
        break;
128
    case 'c':
129
        ct->ct |= TCG_CT_REG;
130
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RCX);
131
        break;
132
    case 'd':
133
        ct->ct |= TCG_CT_REG;
134
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RDX);
135
        break;
136
    case 'S':
137
        ct->ct |= TCG_CT_REG;
138
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RSI);
139
        break;
140
    case 'D':
141
        ct->ct |= TCG_CT_REG;
142
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RDI);
143
        break;
144
    case 'q':
145
        ct->ct |= TCG_CT_REG;
146
        tcg_regset_set32(ct->u.regs, 0, 0xf);
147
        break;
148
    case 'r':
149
        ct->ct |= TCG_CT_REG;
150
        tcg_regset_set32(ct->u.regs, 0, 0xffff);
151
        break;
152
    case 'L': /* qemu_ld/st constraint */
153
        ct->ct |= TCG_CT_REG;
154
        tcg_regset_set32(ct->u.regs, 0, 0xffff);
155
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_RSI);
156
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_RDI);
157
        break;
158
    case 'e':
159
        ct->ct |= TCG_CT_CONST_S32;
160
        break;
161
    case 'Z':
162
        ct->ct |= TCG_CT_CONST_U32;
163
        break;
164
    default:
165
        return -1;
166
    }
167
    ct_str++;
168
    *pct_str = ct_str;
169
    return 0;
170
}
171

    
172
/* test if a constant matches the constraint */
173
static inline int tcg_target_const_match(tcg_target_long val,
174
                                         const TCGArgConstraint *arg_ct)
175
{
176
    int ct;
177
    ct = arg_ct->ct;
178
    if (ct & TCG_CT_CONST)
179
        return 1;
180
    else if ((ct & TCG_CT_CONST_S32) && val == (int32_t)val)
181
        return 1;
182
    else if ((ct & TCG_CT_CONST_U32) && val == (uint32_t)val)
183
        return 1;
184
    else
185
        return 0;
186
}
187

    
188
#define ARITH_ADD 0
189
#define ARITH_OR  1
190
#define ARITH_ADC 2
191
#define ARITH_SBB 3
192
#define ARITH_AND 4
193
#define ARITH_SUB 5
194
#define ARITH_XOR 6
195
#define ARITH_CMP 7
196

    
197
#define SHIFT_ROL 0
198
#define SHIFT_ROR 1
199
#define SHIFT_SHL 4
200
#define SHIFT_SHR 5
201
#define SHIFT_SAR 7
202

    
203
#define JCC_JMP (-1)
204
#define JCC_JO  0x0
205
#define JCC_JNO 0x1
206
#define JCC_JB  0x2
207
#define JCC_JAE 0x3
208
#define JCC_JE  0x4
209
#define JCC_JNE 0x5
210
#define JCC_JBE 0x6
211
#define JCC_JA  0x7
212
#define JCC_JS  0x8
213
#define JCC_JNS 0x9
214
#define JCC_JP  0xa
215
#define JCC_JNP 0xb
216
#define JCC_JL  0xc
217
#define JCC_JGE 0xd
218
#define JCC_JLE 0xe
219
#define JCC_JG  0xf
220

    
221
#define P_EXT   0x100 /* 0x0f opcode prefix */
222
#define P_REXW  0x200 /* set rex.w = 1 */
223
#define P_REXB  0x400 /* force rex use for byte registers */
224
                                  
225
static const uint8_t tcg_cond_to_jcc[10] = {
226
    [TCG_COND_EQ] = JCC_JE,
227
    [TCG_COND_NE] = JCC_JNE,
228
    [TCG_COND_LT] = JCC_JL,
229
    [TCG_COND_GE] = JCC_JGE,
230
    [TCG_COND_LE] = JCC_JLE,
231
    [TCG_COND_GT] = JCC_JG,
232
    [TCG_COND_LTU] = JCC_JB,
233
    [TCG_COND_GEU] = JCC_JAE,
234
    [TCG_COND_LEU] = JCC_JBE,
235
    [TCG_COND_GTU] = JCC_JA,
236
};
237

    
238
static inline void tcg_out_opc(TCGContext *s, int opc, int r, int rm, int x)
239
{
240
    int rex;
241
    rex = ((opc >> 6) & 0x8) | ((r >> 1) & 0x4) | 
242
        ((x >> 2) & 2) | ((rm >> 3) & 1);
243
    if (rex || (opc & P_REXB)) {
244
        tcg_out8(s, rex | 0x40);
245
    }
246
    if (opc & P_EXT)
247
        tcg_out8(s, 0x0f);
248
    tcg_out8(s, opc & 0xff);
249
}
250

    
251
static inline void tcg_out_modrm(TCGContext *s, int opc, int r, int rm)
252
{
253
    tcg_out_opc(s, opc, r, rm, 0);
254
    tcg_out8(s, 0xc0 | ((r & 7) << 3) | (rm & 7));
255
}
256

    
257
/* rm < 0 means no register index plus (-rm - 1 immediate bytes) */
258
static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm, 
259
                                        tcg_target_long offset)
260
{
261
    if (rm < 0) {
262
        tcg_target_long val;
263
        tcg_out_opc(s, opc, r, 0, 0);
264
        val = offset - ((tcg_target_long)s->code_ptr + 5 + (-rm - 1));
265
        if (val == (int32_t)val) {
266
            /* eip relative */
267
            tcg_out8(s, 0x05 | ((r & 7) << 3));
268
            tcg_out32(s, val);
269
        } else if (offset == (int32_t)offset) {
270
            tcg_out8(s, 0x04 | ((r & 7) << 3));
271
            tcg_out8(s, 0x25); /* sib */
272
            tcg_out32(s, offset);
273
        } else {
274
            tcg_abort();
275
        }
276
    } else if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
277
        tcg_out_opc(s, opc, r, rm, 0);
278
        if ((rm & 7) == TCG_REG_RSP) {
279
            tcg_out8(s, 0x04 | ((r & 7) << 3));
280
            tcg_out8(s, 0x24);
281
        } else {
282
            tcg_out8(s, 0x00 | ((r & 7) << 3) | (rm & 7));
283
        }
284
    } else if ((int8_t)offset == offset) {
285
        tcg_out_opc(s, opc, r, rm, 0);
286
        if ((rm & 7) == TCG_REG_RSP) {
287
            tcg_out8(s, 0x44 | ((r & 7) << 3));
288
            tcg_out8(s, 0x24);
289
        } else {
290
            tcg_out8(s, 0x40 | ((r & 7) << 3) | (rm & 7));
291
        }
292
        tcg_out8(s, offset);
293
    } else {
294
        tcg_out_opc(s, opc, r, rm, 0);
295
        if ((rm & 7) == TCG_REG_RSP) {
296
            tcg_out8(s, 0x84 | ((r & 7) << 3));
297
            tcg_out8(s, 0x24);
298
        } else {
299
            tcg_out8(s, 0x80 | ((r & 7) << 3) | (rm & 7));
300
        }
301
        tcg_out32(s, offset);
302
    }
303
}
304

    
305
#if defined(CONFIG_SOFTMMU)
306
/* XXX: incomplete. index must be different from ESP */
307
static void tcg_out_modrm_offset2(TCGContext *s, int opc, int r, int rm, 
308
                                  int index, int shift,
309
                                  tcg_target_long offset)
310
{
311
    int mod;
312
    if (rm == -1)
313
        tcg_abort();
314
    if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
315
        mod = 0;
316
    } else if (offset == (int8_t)offset) {
317
        mod = 0x40;
318
    } else if (offset == (int32_t)offset) {
319
        mod = 0x80;
320
    } else {
321
        tcg_abort();
322
    }
323
    if (index == -1) {
324
        tcg_out_opc(s, opc, r, rm, 0);
325
        if ((rm & 7) == TCG_REG_RSP) {
326
            tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
327
            tcg_out8(s, 0x04 | (rm & 7));
328
        } else {
329
            tcg_out8(s, mod | ((r & 7) << 3) | (rm & 7));
330
        }
331
    } else {
332
        tcg_out_opc(s, opc, r, rm, index);
333
        tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
334
        tcg_out8(s, (shift << 6) | ((index & 7) << 3) | (rm & 7));
335
    }
336
    if (mod == 0x40) {
337
        tcg_out8(s, offset);
338
    } else if (mod == 0x80) {
339
        tcg_out32(s, offset);
340
    }
341
}
342
#endif
343

    
344
static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
345
{
346
    tcg_out_modrm(s, 0x8b | P_REXW, ret, arg);
347
}
348

    
349
static inline void tcg_out_movi(TCGContext *s, TCGType type, 
350
                                int ret, tcg_target_long arg)
351
{
352
    if (arg == 0) {
353
        tcg_out_modrm(s, 0x01 | (ARITH_XOR << 3), ret, ret); /* xor r0,r0 */
354
    } else if (arg == (uint32_t)arg || type == TCG_TYPE_I32) {
355
        tcg_out_opc(s, 0xb8 + (ret & 7), 0, ret, 0);
356
        tcg_out32(s, arg);
357
    } else if (arg == (int32_t)arg) {
358
        tcg_out_modrm(s, 0xc7 | P_REXW, 0, ret);
359
        tcg_out32(s, arg);
360
    } else {
361
        tcg_out_opc(s, (0xb8 + (ret & 7)) | P_REXW, 0, ret, 0);
362
        tcg_out32(s, arg);
363
        tcg_out32(s, arg >> 32);
364
    }
365
}
366

    
367
static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
368
                              int arg1, tcg_target_long arg2)
369
{
370
    if (type == TCG_TYPE_I32)
371
        tcg_out_modrm_offset(s, 0x8b, ret, arg1, arg2); /* movl */
372
    else
373
        tcg_out_modrm_offset(s, 0x8b | P_REXW, ret, arg1, arg2); /* movq */
374
}
375

    
376
static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
377
                              int arg1, tcg_target_long arg2)
378
{
379
    if (type == TCG_TYPE_I32)
380
        tcg_out_modrm_offset(s, 0x89, arg, arg1, arg2); /* movl */
381
    else
382
        tcg_out_modrm_offset(s, 0x89 | P_REXW, arg, arg1, arg2); /* movq */
383
}
384

    
385
static inline void tgen_arithi32(TCGContext *s, int c, int r0, int32_t val)
386
{
387
    if (val == (int8_t)val) {
388
        tcg_out_modrm(s, 0x83, c, r0);
389
        tcg_out8(s, val);
390
    } else if (c == ARITH_AND && val == 0xffu) {
391
        /* movzbl */
392
        tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB, r0, r0);
393
    } else if (c == ARITH_AND && val == 0xffffu) {
394
        /* movzwl */
395
        tcg_out_modrm(s, 0xb7 | P_EXT, r0, r0);
396
    } else {
397
        tcg_out_modrm(s, 0x81, c, r0);
398
        tcg_out32(s, val);
399
    }
400
}
401

    
402
static inline void tgen_arithi64(TCGContext *s, int c, int r0, int64_t val)
403
{
404
    if (val == (int8_t)val) {
405
        tcg_out_modrm(s, 0x83 | P_REXW, c, r0);
406
        tcg_out8(s, val);
407
    } else if (c == ARITH_AND && val == 0xffu) {
408
        /* movzbl */
409
        tcg_out_modrm(s, 0xb6 | P_EXT | P_REXW, r0, r0);
410
    } else if (c == ARITH_AND && val == 0xffffu) {
411
        /* movzwl */
412
        tcg_out_modrm(s, 0xb7 | P_EXT | P_REXW, r0, r0);
413
    } else if (c == ARITH_AND && val == 0xffffffffu) {
414
        /* 32-bit mov zero extends */
415
        tcg_out_modrm(s, 0x8b, r0, r0);
416
    } else if (val == (int32_t)val) {
417
        tcg_out_modrm(s, 0x81 | P_REXW, c, r0);
418
        tcg_out32(s, val);
419
    } else if (c == ARITH_AND && val == (uint32_t)val) {
420
        tcg_out_modrm(s, 0x81, c, r0);
421
        tcg_out32(s, val);
422
    } else {
423
        tcg_abort();
424
    }
425
}
426

    
427
static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
428
{
429
    if (val != 0)
430
        tgen_arithi64(s, ARITH_ADD, reg, val);
431
}
432

    
433
static void tcg_out_jxx(TCGContext *s, int opc, int label_index)
434
{
435
    int32_t val, val1;
436
    TCGLabel *l = &s->labels[label_index];
437
    
438
    if (l->has_value) {
439
        val = l->u.value - (tcg_target_long)s->code_ptr;
440
        val1 = val - 2;
441
        if ((int8_t)val1 == val1) {
442
            if (opc == -1)
443
                tcg_out8(s, 0xeb);
444
            else
445
                tcg_out8(s, 0x70 + opc);
446
            tcg_out8(s, val1);
447
        } else {
448
            if (opc == -1) {
449
                tcg_out8(s, 0xe9);
450
                tcg_out32(s, val - 5);
451
            } else {
452
                tcg_out8(s, 0x0f);
453
                tcg_out8(s, 0x80 + opc);
454
                tcg_out32(s, val - 6);
455
            }
456
        }
457
    } else {
458
        if (opc == -1) {
459
            tcg_out8(s, 0xe9);
460
        } else {
461
            tcg_out8(s, 0x0f);
462
            tcg_out8(s, 0x80 + opc);
463
        }
464
        tcg_out_reloc(s, s->code_ptr, R_386_PC32, label_index, -4);
465
        s->code_ptr += 4;
466
    }
467
}
468

    
469
static void tcg_out_brcond(TCGContext *s, int cond, 
470
                           TCGArg arg1, TCGArg arg2, int const_arg2,
471
                           int label_index, int rexw)
472
{
473
    if (const_arg2) {
474
        if (arg2 == 0) {
475
            /* test r, r */
476
            tcg_out_modrm(s, 0x85 | rexw, arg1, arg1);
477
        } else {
478
            if (rexw)
479
                tgen_arithi64(s, ARITH_CMP, arg1, arg2);
480
            else
481
                tgen_arithi32(s, ARITH_CMP, arg1, arg2);
482
        }
483
    } else {
484
        tcg_out_modrm(s, 0x01 | (ARITH_CMP << 3) | rexw, arg2, arg1);
485
    }
486
    tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index);
487
}
488

    
489
#if defined(CONFIG_SOFTMMU)
490

    
491
#include "../../softmmu_defs.h"
492

    
493
static void *qemu_ld_helpers[4] = {
494
    __ldb_mmu,
495
    __ldw_mmu,
496
    __ldl_mmu,
497
    __ldq_mmu,
498
};
499

    
500
static void *qemu_st_helpers[4] = {
501
    __stb_mmu,
502
    __stw_mmu,
503
    __stl_mmu,
504
    __stq_mmu,
505
};
506
#endif
507

    
508
static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
509
                            int opc)
510
{
511
    int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
512
#if defined(CONFIG_SOFTMMU)
513
    uint8_t *label1_ptr, *label2_ptr;
514
#endif
515

    
516
    data_reg = *args++;
517
    addr_reg = *args++;
518
    mem_index = *args;
519
    s_bits = opc & 3;
520

    
521
    r0 = TCG_REG_RDI;
522
    r1 = TCG_REG_RSI;
523

    
524
#if TARGET_LONG_BITS == 32
525
    rexw = 0;
526
#else
527
    rexw = P_REXW;
528
#endif
529
#if defined(CONFIG_SOFTMMU)
530
    /* mov */
531
    tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
532

    
533
    /* mov */
534
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
535
 
536
    tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
537
    tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); 
538
    
539
    tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
540
    tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
541
    
542
    tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
543
    tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
544

    
545
    /* lea offset(r1, env), r1 */
546
    tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
547
                          offsetof(CPUState, tlb_table[mem_index][0].addr_read));
548

    
549
    /* cmp 0(r1), r0 */
550
    tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
551
    
552
    /* mov */
553
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
554
    
555
    /* je label1 */
556
    tcg_out8(s, 0x70 + JCC_JE);
557
    label1_ptr = s->code_ptr;
558
    s->code_ptr++;
559

    
560
    /* XXX: move that code at the end of the TB */
561
    tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RSI, mem_index);
562
    tcg_out8(s, 0xe8);
563
    tcg_out32(s, (tcg_target_long)qemu_ld_helpers[s_bits] - 
564
              (tcg_target_long)s->code_ptr - 4);
565

    
566
    switch(opc) {
567
    case 0 | 4:
568
        /* movsbq */
569
        tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
570
        break;
571
    case 1 | 4:
572
        /* movswq */
573
        tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
574
        break;
575
    case 2 | 4:
576
        /* movslq */
577
        tcg_out_modrm(s, 0x63 | P_REXW, data_reg, TCG_REG_RAX);
578
        break;
579
    case 0:
580
        /* movzbq */
581
        tcg_out_modrm(s, 0xb6 | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
582
        break;
583
    case 1:
584
        /* movzwq */
585
        tcg_out_modrm(s, 0xb7 | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
586
        break;
587
    case 2:
588
    default:
589
        /* movl */
590
        tcg_out_modrm(s, 0x8b, data_reg, TCG_REG_RAX);
591
        break;
592
    case 3:
593
        tcg_out_mov(s, data_reg, TCG_REG_RAX);
594
        break;
595
    }
596

    
597
    /* jmp label2 */
598
    tcg_out8(s, 0xeb);
599
    label2_ptr = s->code_ptr;
600
    s->code_ptr++;
601
    
602
    /* label1: */
603
    *label1_ptr = s->code_ptr - label1_ptr - 1;
604

    
605
    /* add x(r1), r0 */
606
    tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) - 
607
                         offsetof(CPUTLBEntry, addr_read));
608
#else
609
    r0 = addr_reg;
610
#endif    
611

    
612
#ifdef TARGET_WORDS_BIGENDIAN
613
    bswap = 1;
614
#else
615
    bswap = 0;
616
#endif
617
    switch(opc) {
618
    case 0:
619
        /* movzbl */
620
        tcg_out_modrm_offset(s, 0xb6 | P_EXT, data_reg, r0, 0);
621
        break;
622
    case 0 | 4:
623
        /* movsbX */
624
        tcg_out_modrm_offset(s, 0xbe | P_EXT | rexw, data_reg, r0, 0);
625
        break;
626
    case 1:
627
        /* movzwl */
628
        tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, 0);
629
        if (bswap) {
630
            /* rolw $8, data_reg */
631
            tcg_out8(s, 0x66); 
632
            tcg_out_modrm(s, 0xc1, 0, data_reg);
633
            tcg_out8(s, 8);
634
        }
635
        break;
636
    case 1 | 4:
637
        if (bswap) {
638
            /* movzwl */
639
            tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, 0);
640
            /* rolw $8, data_reg */
641
            tcg_out8(s, 0x66); 
642
            tcg_out_modrm(s, 0xc1, 0, data_reg);
643
            tcg_out8(s, 8);
644

    
645
            /* movswX data_reg, data_reg */
646
            tcg_out_modrm(s, 0xbf | P_EXT | rexw, data_reg, data_reg);
647
        } else {
648
            /* movswX */
649
            tcg_out_modrm_offset(s, 0xbf | P_EXT | rexw, data_reg, r0, 0);
650
        }
651
        break;
652
    case 2:
653
        /* movl (r0), data_reg */
654
        tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
655
        if (bswap) {
656
            /* bswap */
657
            tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
658
        }
659
        break;
660
    case 2 | 4:
661
        if (bswap) {
662
            /* movl (r0), data_reg */
663
            tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
664
            /* bswap */
665
            tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
666
            /* movslq */
667
            tcg_out_modrm(s, 0x63 | P_REXW, data_reg, data_reg);
668
        } else {
669
            /* movslq */
670
            tcg_out_modrm_offset(s, 0x63 | P_REXW, data_reg, r0, 0);
671
        }
672
        break;
673
    case 3:
674
        /* movq (r0), data_reg */
675
        tcg_out_modrm_offset(s, 0x8b | P_REXW, data_reg, r0, 0);
676
        if (bswap) {
677
            /* bswap */
678
            tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT | P_REXW, 0, data_reg, 0);
679
        }
680
        break;
681
    default:
682
        tcg_abort();
683
    }
684

    
685
#if defined(CONFIG_SOFTMMU)
686
    /* label2: */
687
    *label2_ptr = s->code_ptr - label2_ptr - 1;
688
#endif
689
}
690

    
691
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
692
                            int opc)
693
{
694
    int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
695
#if defined(CONFIG_SOFTMMU)
696
    uint8_t *label1_ptr, *label2_ptr;
697
#endif
698

    
699
    data_reg = *args++;
700
    addr_reg = *args++;
701
    mem_index = *args;
702

    
703
    s_bits = opc;
704

    
705
    r0 = TCG_REG_RDI;
706
    r1 = TCG_REG_RSI;
707

    
708
#if TARGET_LONG_BITS == 32
709
    rexw = 0;
710
#else
711
    rexw = P_REXW;
712
#endif
713
#if defined(CONFIG_SOFTMMU)
714
    /* mov */
715
    tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
716

    
717
    /* mov */
718
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
719
 
720
    tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
721
    tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); 
722
    
723
    tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
724
    tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
725
    
726
    tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
727
    tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
728

    
729
    /* lea offset(r1, env), r1 */
730
    tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
731
                          offsetof(CPUState, tlb_table[mem_index][0].addr_write));
732

    
733
    /* cmp 0(r1), r0 */
734
    tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
735
    
736
    /* mov */
737
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
738
    
739
    /* je label1 */
740
    tcg_out8(s, 0x70 + JCC_JE);
741
    label1_ptr = s->code_ptr;
742
    s->code_ptr++;
743

    
744
    /* XXX: move that code at the end of the TB */
745
    switch(opc) {
746
    case 0:
747
        /* movzbl */
748
        tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB, TCG_REG_RSI, data_reg);
749
        break;
750
    case 1:
751
        /* movzwl */
752
        tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_RSI, data_reg);
753
        break;
754
    case 2:
755
        /* movl */
756
        tcg_out_modrm(s, 0x8b, TCG_REG_RSI, data_reg);
757
        break;
758
    default:
759
    case 3:
760
        tcg_out_mov(s, TCG_REG_RSI, data_reg);
761
        break;
762
    }
763
    tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RDX, mem_index);
764
    tcg_out8(s, 0xe8);
765
    tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 
766
              (tcg_target_long)s->code_ptr - 4);
767

    
768
    /* jmp label2 */
769
    tcg_out8(s, 0xeb);
770
    label2_ptr = s->code_ptr;
771
    s->code_ptr++;
772
    
773
    /* label1: */
774
    *label1_ptr = s->code_ptr - label1_ptr - 1;
775

    
776
    /* add x(r1), r0 */
777
    tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) - 
778
                         offsetof(CPUTLBEntry, addr_write));
779
#else
780
    r0 = addr_reg;
781
#endif
782

    
783
#ifdef TARGET_WORDS_BIGENDIAN
784
    bswap = 1;
785
#else
786
    bswap = 0;
787
#endif
788
    switch(opc) {
789
    case 0:
790
        /* movb */
791
        tcg_out_modrm_offset(s, 0x88 | P_REXB, data_reg, r0, 0);
792
        break;
793
    case 1:
794
        if (bswap) {
795
            tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
796
            tcg_out8(s, 0x66); /* rolw $8, %ecx */
797
            tcg_out_modrm(s, 0xc1, 0, r1);
798
            tcg_out8(s, 8);
799
            data_reg = r1;
800
        }
801
        /* movw */
802
        tcg_out8(s, 0x66);
803
        tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
804
        break;
805
    case 2:
806
        if (bswap) {
807
            tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
808
            /* bswap data_reg */
809
            tcg_out_opc(s, (0xc8 + r1) | P_EXT, 0, r1, 0);
810
            data_reg = r1;
811
        }
812
        /* movl */
813
        tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
814
        break;
815
    case 3:
816
        if (bswap) {
817
            tcg_out_mov(s, r1, data_reg);
818
            /* bswap data_reg */
819
            tcg_out_opc(s, (0xc8 + r1) | P_EXT | P_REXW, 0, r1, 0);
820
            data_reg = r1;
821
        }
822
        /* movq */
823
        tcg_out_modrm_offset(s, 0x89 | P_REXW, data_reg, r0, 0);
824
        break;
825
    default:
826
        tcg_abort();
827
    }
828

    
829
#if defined(CONFIG_SOFTMMU)
830
    /* label2: */
831
    *label2_ptr = s->code_ptr - label2_ptr - 1;
832
#endif
833
}
834

    
835
static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
836
                              const int *const_args)
837
{
838
    int c;
839
    
840
    switch(opc) {
841
    case INDEX_op_exit_tb:
842
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RAX, args[0]);
843
        tcg_out8(s, 0xe9); /* jmp tb_ret_addr */
844
        tcg_out32(s, tb_ret_addr - s->code_ptr - 4);
845
        break;
846
    case INDEX_op_goto_tb:
847
        if (s->tb_jmp_offset) {
848
            /* direct jump method */
849
            tcg_out8(s, 0xe9); /* jmp im */
850
            s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
851
            tcg_out32(s, 0);
852
        } else {
853
            /* indirect jump method */
854
            /* jmp Ev */
855
            tcg_out_modrm_offset(s, 0xff, 4, -1, 
856
                                 (tcg_target_long)(s->tb_next + 
857
                                                   args[0]));
858
        }
859
        s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
860
        break;
861
    case INDEX_op_call:
862
        if (const_args[0]) {
863
            tcg_out8(s, 0xe8);
864
            tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
865
        } else {
866
            tcg_out_modrm(s, 0xff, 2, args[0]);
867
        }
868
        break;
869
    case INDEX_op_jmp:
870
        if (const_args[0]) {
871
            tcg_out8(s, 0xe9);
872
            tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
873
        } else {
874
            tcg_out_modrm(s, 0xff, 4, args[0]);
875
        }
876
        break;
877
    case INDEX_op_br:
878
        tcg_out_jxx(s, JCC_JMP, args[0]);
879
        break;
880
    case INDEX_op_movi_i32:
881
        tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
882
        break;
883
    case INDEX_op_movi_i64:
884
        tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
885
        break;
886
    case INDEX_op_ld8u_i32:
887
    case INDEX_op_ld8u_i64:
888
        /* movzbl */
889
        tcg_out_modrm_offset(s, 0xb6 | P_EXT, args[0], args[1], args[2]);
890
        break;
891
    case INDEX_op_ld8s_i32:
892
        /* movsbl */
893
        tcg_out_modrm_offset(s, 0xbe | P_EXT, args[0], args[1], args[2]);
894
        break;
895
    case INDEX_op_ld8s_i64:
896
        /* movsbq */
897
        tcg_out_modrm_offset(s, 0xbe | P_EXT | P_REXW, args[0], args[1], args[2]);
898
        break;
899
    case INDEX_op_ld16u_i32:
900
    case INDEX_op_ld16u_i64:
901
        /* movzwl */
902
        tcg_out_modrm_offset(s, 0xb7 | P_EXT, args[0], args[1], args[2]);
903
        break;
904
    case INDEX_op_ld16s_i32:
905
        /* movswl */
906
        tcg_out_modrm_offset(s, 0xbf | P_EXT, args[0], args[1], args[2]);
907
        break;
908
    case INDEX_op_ld16s_i64:
909
        /* movswq */
910
        tcg_out_modrm_offset(s, 0xbf | P_EXT | P_REXW, args[0], args[1], args[2]);
911
        break;
912
    case INDEX_op_ld_i32:
913
    case INDEX_op_ld32u_i64:
914
        /* movl */
915
        tcg_out_modrm_offset(s, 0x8b, args[0], args[1], args[2]);
916
        break;
917
    case INDEX_op_ld32s_i64:
918
        /* movslq */
919
        tcg_out_modrm_offset(s, 0x63 | P_REXW, args[0], args[1], args[2]);
920
        break;
921
    case INDEX_op_ld_i64:
922
        /* movq */
923
        tcg_out_modrm_offset(s, 0x8b | P_REXW, args[0], args[1], args[2]);
924
        break;
925
        
926
    case INDEX_op_st8_i32:
927
    case INDEX_op_st8_i64:
928
        /* movb */
929
        tcg_out_modrm_offset(s, 0x88 | P_REXB, args[0], args[1], args[2]);
930
        break;
931
    case INDEX_op_st16_i32:
932
    case INDEX_op_st16_i64:
933
        /* movw */
934
        tcg_out8(s, 0x66);
935
        tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
936
        break;
937
    case INDEX_op_st_i32:
938
    case INDEX_op_st32_i64:
939
        /* movl */
940
        tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
941
        break;
942
    case INDEX_op_st_i64:
943
        /* movq */
944
        tcg_out_modrm_offset(s, 0x89 | P_REXW, args[0], args[1], args[2]);
945
        break;
946

    
947
    case INDEX_op_sub_i32:
948
        c = ARITH_SUB;
949
        goto gen_arith32;
950
    case INDEX_op_and_i32:
951
        c = ARITH_AND;
952
        goto gen_arith32;
953
    case INDEX_op_or_i32:
954
        c = ARITH_OR;
955
        goto gen_arith32;
956
    case INDEX_op_xor_i32:
957
        c = ARITH_XOR;
958
        goto gen_arith32;
959
    case INDEX_op_add_i32:
960
        c = ARITH_ADD;
961
    gen_arith32:
962
        if (const_args[2]) {
963
            tgen_arithi32(s, c, args[0], args[2]);
964
        } else {
965
            tcg_out_modrm(s, 0x01 | (c << 3), args[2], args[0]);
966
        }
967
        break;
968

    
969
    case INDEX_op_sub_i64:
970
        c = ARITH_SUB;
971
        goto gen_arith64;
972
    case INDEX_op_and_i64:
973
        c = ARITH_AND;
974
        goto gen_arith64;
975
    case INDEX_op_or_i64:
976
        c = ARITH_OR;
977
        goto gen_arith64;
978
    case INDEX_op_xor_i64:
979
        c = ARITH_XOR;
980
        goto gen_arith64;
981
    case INDEX_op_add_i64:
982
        c = ARITH_ADD;
983
    gen_arith64:
984
        if (const_args[2]) {
985
            tgen_arithi64(s, c, args[0], args[2]);
986
        } else {
987
            tcg_out_modrm(s, 0x01 | (c << 3) | P_REXW, args[2], args[0]);
988
        }
989
        break;
990

    
991
    case INDEX_op_mul_i32:
992
        if (const_args[2]) {
993
            int32_t val;
994
            val = args[2];
995
            if (val == (int8_t)val) {
996
                tcg_out_modrm(s, 0x6b, args[0], args[0]);
997
                tcg_out8(s, val);
998
            } else {
999
                tcg_out_modrm(s, 0x69, args[0], args[0]);
1000
                tcg_out32(s, val);
1001
            }
1002
        } else {
1003
            tcg_out_modrm(s, 0xaf | P_EXT, args[0], args[2]);
1004
        }
1005
        break;
1006
    case INDEX_op_mul_i64:
1007
        if (const_args[2]) {
1008
            int32_t val;
1009
            val = args[2];
1010
            if (val == (int8_t)val) {
1011
                tcg_out_modrm(s, 0x6b | P_REXW, args[0], args[0]);
1012
                tcg_out8(s, val);
1013
            } else {
1014
                tcg_out_modrm(s, 0x69 | P_REXW, args[0], args[0]);
1015
                tcg_out32(s, val);
1016
            }
1017
        } else {
1018
            tcg_out_modrm(s, 0xaf | P_EXT | P_REXW, args[0], args[2]);
1019
        }
1020
        break;
1021
    case INDEX_op_div2_i32:
1022
        tcg_out_modrm(s, 0xf7, 7, args[4]);
1023
        break;
1024
    case INDEX_op_divu2_i32:
1025
        tcg_out_modrm(s, 0xf7, 6, args[4]);
1026
        break;
1027
    case INDEX_op_div2_i64:
1028
        tcg_out_modrm(s, 0xf7 | P_REXW, 7, args[4]);
1029
        break;
1030
    case INDEX_op_divu2_i64:
1031
        tcg_out_modrm(s, 0xf7 | P_REXW, 6, args[4]);
1032
        break;
1033

    
1034
    case INDEX_op_shl_i32:
1035
        c = SHIFT_SHL;
1036
    gen_shift32:
1037
        if (const_args[2]) {
1038
            if (args[2] == 1) {
1039
                tcg_out_modrm(s, 0xd1, c, args[0]);
1040
            } else {
1041
                tcg_out_modrm(s, 0xc1, c, args[0]);
1042
                tcg_out8(s, args[2]);
1043
            }
1044
        } else {
1045
            tcg_out_modrm(s, 0xd3, c, args[0]);
1046
        }
1047
        break;
1048
    case INDEX_op_shr_i32:
1049
        c = SHIFT_SHR;
1050
        goto gen_shift32;
1051
    case INDEX_op_sar_i32:
1052
        c = SHIFT_SAR;
1053
        goto gen_shift32;
1054
    case INDEX_op_rotl_i32:
1055
        c = SHIFT_ROL;
1056
        goto gen_shift32;
1057
    case INDEX_op_rotr_i32:
1058
        c = SHIFT_ROR;
1059
        goto gen_shift32;
1060

    
1061
    case INDEX_op_shl_i64:
1062
        c = SHIFT_SHL;
1063
    gen_shift64:
1064
        if (const_args[2]) {
1065
            if (args[2] == 1) {
1066
                tcg_out_modrm(s, 0xd1 | P_REXW, c, args[0]);
1067
            } else {
1068
                tcg_out_modrm(s, 0xc1 | P_REXW, c, args[0]);
1069
                tcg_out8(s, args[2]);
1070
            }
1071
        } else {
1072
            tcg_out_modrm(s, 0xd3 | P_REXW, c, args[0]);
1073
        }
1074
        break;
1075
    case INDEX_op_shr_i64:
1076
        c = SHIFT_SHR;
1077
        goto gen_shift64;
1078
    case INDEX_op_sar_i64:
1079
        c = SHIFT_SAR;
1080
        goto gen_shift64;
1081
    case INDEX_op_rotl_i64:
1082
        c = SHIFT_ROL;
1083
        goto gen_shift64;
1084
    case INDEX_op_rotr_i64:
1085
        c = SHIFT_ROR;
1086
        goto gen_shift64;
1087

    
1088
    case INDEX_op_brcond_i32:
1089
        tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], 
1090
                       args[3], 0);
1091
        break;
1092
    case INDEX_op_brcond_i64:
1093
        tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], 
1094
                       args[3], P_REXW);
1095
        break;
1096

    
1097
    case INDEX_op_bswap_i32:
1098
        tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT, 0, args[0], 0);
1099
        break;
1100
    case INDEX_op_bswap_i64:
1101
        tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT | P_REXW, 0, args[0], 0);
1102
        break;
1103

    
1104
    case INDEX_op_neg_i32:
1105
        tcg_out_modrm(s, 0xf7, 3, args[0]);
1106
        break;
1107
    case INDEX_op_neg_i64:
1108
        tcg_out_modrm(s, 0xf7 | P_REXW, 3, args[0]);
1109
        break;
1110

    
1111
    case INDEX_op_not_i32:
1112
        tcg_out_modrm(s, 0xf7, 2, args[0]);
1113
        break;
1114
    case INDEX_op_not_i64:
1115
        tcg_out_modrm(s, 0xf7 | P_REXW, 2, args[0]);
1116
        break;
1117

    
1118
    case INDEX_op_ext8s_i32:
1119
        tcg_out_modrm(s, 0xbe | P_EXT | P_REXB, args[0], args[1]);
1120
        break;
1121
    case INDEX_op_ext16s_i32:
1122
        tcg_out_modrm(s, 0xbf | P_EXT, args[0], args[1]);
1123
        break;
1124
    case INDEX_op_ext8s_i64:
1125
        tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, args[0], args[1]);
1126
        break;
1127
    case INDEX_op_ext16s_i64:
1128
        tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, args[0], args[1]);
1129
        break;
1130
    case INDEX_op_ext32s_i64:
1131
        tcg_out_modrm(s, 0x63 | P_REXW, args[0], args[1]);
1132
        break;
1133

    
1134
    case INDEX_op_qemu_ld8u:
1135
        tcg_out_qemu_ld(s, args, 0);
1136
        break;
1137
    case INDEX_op_qemu_ld8s:
1138
        tcg_out_qemu_ld(s, args, 0 | 4);
1139
        break;
1140
    case INDEX_op_qemu_ld16u:
1141
        tcg_out_qemu_ld(s, args, 1);
1142
        break;
1143
    case INDEX_op_qemu_ld16s:
1144
        tcg_out_qemu_ld(s, args, 1 | 4);
1145
        break;
1146
    case INDEX_op_qemu_ld32u:
1147
        tcg_out_qemu_ld(s, args, 2);
1148
        break;
1149
    case INDEX_op_qemu_ld32s:
1150
        tcg_out_qemu_ld(s, args, 2 | 4);
1151
        break;
1152
    case INDEX_op_qemu_ld64:
1153
        tcg_out_qemu_ld(s, args, 3);
1154
        break;
1155
        
1156
    case INDEX_op_qemu_st8:
1157
        tcg_out_qemu_st(s, args, 0);
1158
        break;
1159
    case INDEX_op_qemu_st16:
1160
        tcg_out_qemu_st(s, args, 1);
1161
        break;
1162
    case INDEX_op_qemu_st32:
1163
        tcg_out_qemu_st(s, args, 2);
1164
        break;
1165
    case INDEX_op_qemu_st64:
1166
        tcg_out_qemu_st(s, args, 3);
1167
        break;
1168

    
1169
    default:
1170
        tcg_abort();
1171
    }
1172
}
1173

    
1174
static int tcg_target_callee_save_regs[] = {
1175
    TCG_REG_RBP,
1176
    TCG_REG_RBX,
1177
    TCG_REG_R12,
1178
    TCG_REG_R13,
1179
    /*    TCG_REG_R14, */ /* currently used for the global env, so no
1180
                             need to save */
1181
    TCG_REG_R15,
1182
};
1183

    
1184
static inline void tcg_out_push(TCGContext *s, int reg)
1185
{
1186
    tcg_out_opc(s, (0x50 + (reg & 7)), 0, reg, 0);
1187
}
1188

    
1189
static inline void tcg_out_pop(TCGContext *s, int reg)
1190
{
1191
    tcg_out_opc(s, (0x58 + (reg & 7)), 0, reg, 0);
1192
}
1193

    
1194
/* Generate global QEMU prologue and epilogue code */
1195
void tcg_target_qemu_prologue(TCGContext *s)
1196
{
1197
    int i, frame_size, push_size, stack_addend;
1198

    
1199
    /* TB prologue */
1200
    /* save all callee saved registers */
1201
    for(i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1202
        tcg_out_push(s, tcg_target_callee_save_regs[i]);
1203

    
1204
    }
1205
    /* reserve some stack space */
1206
    push_size = 8 + ARRAY_SIZE(tcg_target_callee_save_regs) * 8;
1207
    frame_size = push_size + TCG_STATIC_CALL_ARGS_SIZE;
1208
    frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) & 
1209
        ~(TCG_TARGET_STACK_ALIGN - 1);
1210
    stack_addend = frame_size - push_size;
1211
    tcg_out_addi(s, TCG_REG_RSP, -stack_addend);
1212

    
1213
    tcg_out_modrm(s, 0xff, 4, TCG_REG_RDI); /* jmp *%rdi */
1214
    
1215
    /* TB epilogue */
1216
    tb_ret_addr = s->code_ptr;
1217
    tcg_out_addi(s, TCG_REG_RSP, stack_addend);
1218
    for(i = ARRAY_SIZE(tcg_target_callee_save_regs) - 1; i >= 0; i--) {
1219
        tcg_out_pop(s, tcg_target_callee_save_regs[i]);
1220
    }
1221
    tcg_out8(s, 0xc3); /* ret */
1222
}
1223

    
1224
static const TCGTargetOpDef x86_64_op_defs[] = {
1225
    { INDEX_op_exit_tb, { } },
1226
    { INDEX_op_goto_tb, { } },
1227
    { INDEX_op_call, { "ri" } }, /* XXX: might need a specific constant constraint */
1228
    { INDEX_op_jmp, { "ri" } }, /* XXX: might need a specific constant constraint */
1229
    { INDEX_op_br, { } },
1230

    
1231
    { INDEX_op_mov_i32, { "r", "r" } },
1232
    { INDEX_op_movi_i32, { "r" } },
1233
    { INDEX_op_ld8u_i32, { "r", "r" } },
1234
    { INDEX_op_ld8s_i32, { "r", "r" } },
1235
    { INDEX_op_ld16u_i32, { "r", "r" } },
1236
    { INDEX_op_ld16s_i32, { "r", "r" } },
1237
    { INDEX_op_ld_i32, { "r", "r" } },
1238
    { INDEX_op_st8_i32, { "r", "r" } },
1239
    { INDEX_op_st16_i32, { "r", "r" } },
1240
    { INDEX_op_st_i32, { "r", "r" } },
1241

    
1242
    { INDEX_op_add_i32, { "r", "0", "ri" } },
1243
    { INDEX_op_mul_i32, { "r", "0", "ri" } },
1244
    { INDEX_op_div2_i32, { "a", "d", "0", "1", "r" } },
1245
    { INDEX_op_divu2_i32, { "a", "d", "0", "1", "r" } },
1246
    { INDEX_op_sub_i32, { "r", "0", "ri" } },
1247
    { INDEX_op_and_i32, { "r", "0", "ri" } },
1248
    { INDEX_op_or_i32, { "r", "0", "ri" } },
1249
    { INDEX_op_xor_i32, { "r", "0", "ri" } },
1250

    
1251
    { INDEX_op_shl_i32, { "r", "0", "ci" } },
1252
    { INDEX_op_shr_i32, { "r", "0", "ci" } },
1253
    { INDEX_op_sar_i32, { "r", "0", "ci" } },
1254
    { INDEX_op_rotl_i32, { "r", "0", "ci" } },
1255
    { INDEX_op_rotr_i32, { "r", "0", "ci" } },
1256

    
1257
    { INDEX_op_brcond_i32, { "r", "ri" } },
1258

    
1259
    { INDEX_op_mov_i64, { "r", "r" } },
1260
    { INDEX_op_movi_i64, { "r" } },
1261
    { INDEX_op_ld8u_i64, { "r", "r" } },
1262
    { INDEX_op_ld8s_i64, { "r", "r" } },
1263
    { INDEX_op_ld16u_i64, { "r", "r" } },
1264
    { INDEX_op_ld16s_i64, { "r", "r" } },
1265
    { INDEX_op_ld32u_i64, { "r", "r" } },
1266
    { INDEX_op_ld32s_i64, { "r", "r" } },
1267
    { INDEX_op_ld_i64, { "r", "r" } },
1268
    { INDEX_op_st8_i64, { "r", "r" } },
1269
    { INDEX_op_st16_i64, { "r", "r" } },
1270
    { INDEX_op_st32_i64, { "r", "r" } },
1271
    { INDEX_op_st_i64, { "r", "r" } },
1272

    
1273
    { INDEX_op_add_i64, { "r", "0", "re" } },
1274
    { INDEX_op_mul_i64, { "r", "0", "re" } },
1275
    { INDEX_op_div2_i64, { "a", "d", "0", "1", "r" } },
1276
    { INDEX_op_divu2_i64, { "a", "d", "0", "1", "r" } },
1277
    { INDEX_op_sub_i64, { "r", "0", "re" } },
1278
    { INDEX_op_and_i64, { "r", "0", "reZ" } },
1279
    { INDEX_op_or_i64, { "r", "0", "re" } },
1280
    { INDEX_op_xor_i64, { "r", "0", "re" } },
1281

    
1282
    { INDEX_op_shl_i64, { "r", "0", "ci" } },
1283
    { INDEX_op_shr_i64, { "r", "0", "ci" } },
1284
    { INDEX_op_sar_i64, { "r", "0", "ci" } },
1285
    { INDEX_op_rotl_i64, { "r", "0", "ci" } },
1286
    { INDEX_op_rotr_i64, { "r", "0", "ci" } },
1287

    
1288
    { INDEX_op_brcond_i64, { "r", "re" } },
1289

    
1290
    { INDEX_op_bswap_i32, { "r", "0" } },
1291
    { INDEX_op_bswap_i64, { "r", "0" } },
1292

    
1293
    { INDEX_op_neg_i32, { "r", "0" } },
1294
    { INDEX_op_neg_i64, { "r", "0" } },
1295

    
1296
    { INDEX_op_not_i32, { "r", "0" } },
1297
    { INDEX_op_not_i64, { "r", "0" } },
1298

    
1299
    { INDEX_op_ext8s_i32, { "r", "r"} },
1300
    { INDEX_op_ext16s_i32, { "r", "r"} },
1301
    { INDEX_op_ext8s_i64, { "r", "r"} },
1302
    { INDEX_op_ext16s_i64, { "r", "r"} },
1303
    { INDEX_op_ext32s_i64, { "r", "r"} },
1304

    
1305
    { INDEX_op_qemu_ld8u, { "r", "L" } },
1306
    { INDEX_op_qemu_ld8s, { "r", "L" } },
1307
    { INDEX_op_qemu_ld16u, { "r", "L" } },
1308
    { INDEX_op_qemu_ld16s, { "r", "L" } },
1309
    { INDEX_op_qemu_ld32u, { "r", "L" } },
1310
    { INDEX_op_qemu_ld32s, { "r", "L" } },
1311
    { INDEX_op_qemu_ld64, { "r", "L" } },
1312

    
1313
    { INDEX_op_qemu_st8, { "L", "L" } },
1314
    { INDEX_op_qemu_st16, { "L", "L" } },
1315
    { INDEX_op_qemu_st32, { "L", "L" } },
1316
    { INDEX_op_qemu_st64, { "L", "L", "L" } },
1317

    
1318
    { -1 },
1319
};
1320

    
1321
void tcg_target_init(TCGContext *s)
1322
{
1323
    /* fail safe */
1324
    if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
1325
        tcg_abort();
1326

    
1327
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffff);
1328
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffff);
1329
    tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1330
                     (1 << TCG_REG_RDI) | 
1331
                     (1 << TCG_REG_RSI) | 
1332
                     (1 << TCG_REG_RDX) |
1333
                     (1 << TCG_REG_RCX) |
1334
                     (1 << TCG_REG_R8) |
1335
                     (1 << TCG_REG_R9) |
1336
                     (1 << TCG_REG_RAX) |
1337
                     (1 << TCG_REG_R10) |
1338
                     (1 << TCG_REG_R11));
1339
    
1340
    tcg_regset_clear(s->reserved_regs);
1341
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_RSP);
1342

    
1343
    tcg_add_target_add_op_defs(x86_64_op_defs);
1344
}