Statistics
| Branch: | Revision:

root / tcg / x86_64 / tcg-target.c @ 016b2b28

History | View | Annotate | Download (40.5 kB)

1
/*
2
 * Tiny Code Generator for QEMU
3
 *
4
 * Copyright (c) 2008 Fabrice Bellard
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22
 * THE SOFTWARE.
23
 */
24

    
25
#ifndef NDEBUG
26
static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27
    "%rax",
28
    "%rcx",
29
    "%rdx",
30
    "%rbx",
31
    "%rsp",
32
    "%rbp",
33
    "%rsi",
34
    "%rdi",
35
    "%r8",
36
    "%r9",
37
    "%r10",
38
    "%r11",
39
    "%r12",
40
    "%r13",
41
    "%r14",
42
    "%r15",
43
};
44
#endif
45

    
46
static const int tcg_target_reg_alloc_order[] = {
47
    TCG_REG_RBP,
48
    TCG_REG_RBX,
49
    TCG_REG_R12,
50
    TCG_REG_R13,
51
    TCG_REG_R14,
52
    TCG_REG_R15,
53
    TCG_REG_R10,
54
    TCG_REG_R11,
55
    TCG_REG_R9,
56
    TCG_REG_R8,
57
    TCG_REG_RCX,
58
    TCG_REG_RDX,
59
    TCG_REG_RSI,
60
    TCG_REG_RDI,
61
    TCG_REG_RAX,
62
};
63

    
64
static const int tcg_target_call_iarg_regs[6] = {
65
    TCG_REG_RDI,
66
    TCG_REG_RSI,
67
    TCG_REG_RDX,
68
    TCG_REG_RCX,
69
    TCG_REG_R8,
70
    TCG_REG_R9,
71
};
72

    
73
static const int tcg_target_call_oarg_regs[2] = {
74
    TCG_REG_RAX, 
75
    TCG_REG_RDX 
76
};
77

    
78
static uint8_t *tb_ret_addr;
79

    
80
static void patch_reloc(uint8_t *code_ptr, int type, 
81
                        tcg_target_long value, tcg_target_long addend)
82
{
83
    value += addend;
84
    switch(type) {
85
    case R_X86_64_32:
86
        if (value != (uint32_t)value)
87
            tcg_abort();
88
        *(uint32_t *)code_ptr = value;
89
        break;
90
    case R_X86_64_32S:
91
        if (value != (int32_t)value)
92
            tcg_abort();
93
        *(uint32_t *)code_ptr = value;
94
        break;
95
    case R_386_PC32:
96
        value -= (long)code_ptr;
97
        if (value != (int32_t)value)
98
            tcg_abort();
99
        *(uint32_t *)code_ptr = value;
100
        break;
101
    default:
102
        tcg_abort();
103
    }
104
}
105

    
106
/* maximum number of register used for input function arguments */
107
static inline int tcg_target_get_call_iarg_regs_count(int flags)
108
{
109
    return 6;
110
}
111

    
112
/* parse target specific constraints */
113
static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
114
{
115
    const char *ct_str;
116

    
117
    ct_str = *pct_str;
118
    switch(ct_str[0]) {
119
    case 'a':
120
        ct->ct |= TCG_CT_REG;
121
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RAX);
122
        break;
123
    case 'b':
124
        ct->ct |= TCG_CT_REG;
125
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RBX);
126
        break;
127
    case 'c':
128
        ct->ct |= TCG_CT_REG;
129
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RCX);
130
        break;
131
    case 'd':
132
        ct->ct |= TCG_CT_REG;
133
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RDX);
134
        break;
135
    case 'S':
136
        ct->ct |= TCG_CT_REG;
137
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RSI);
138
        break;
139
    case 'D':
140
        ct->ct |= TCG_CT_REG;
141
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RDI);
142
        break;
143
    case 'q':
144
        ct->ct |= TCG_CT_REG;
145
        tcg_regset_set32(ct->u.regs, 0, 0xf);
146
        break;
147
    case 'r':
148
        ct->ct |= TCG_CT_REG;
149
        tcg_regset_set32(ct->u.regs, 0, 0xffff);
150
        break;
151
    case 'L': /* qemu_ld/st constraint */
152
        ct->ct |= TCG_CT_REG;
153
        tcg_regset_set32(ct->u.regs, 0, 0xffff);
154
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_RSI);
155
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_RDI);
156
        break;
157
    case 'e':
158
        ct->ct |= TCG_CT_CONST_S32;
159
        break;
160
    case 'Z':
161
        ct->ct |= TCG_CT_CONST_U32;
162
        break;
163
    default:
164
        return -1;
165
    }
166
    ct_str++;
167
    *pct_str = ct_str;
168
    return 0;
169
}
170

    
171
/* test if a constant matches the constraint */
172
static inline int tcg_target_const_match(tcg_target_long val,
173
                                         const TCGArgConstraint *arg_ct)
174
{
175
    int ct;
176
    ct = arg_ct->ct;
177
    if (ct & TCG_CT_CONST)
178
        return 1;
179
    else if ((ct & TCG_CT_CONST_S32) && val == (int32_t)val)
180
        return 1;
181
    else if ((ct & TCG_CT_CONST_U32) && val == (uint32_t)val)
182
        return 1;
183
    else
184
        return 0;
185
}
186

    
187
#define ARITH_ADD 0
188
#define ARITH_OR  1
189
#define ARITH_ADC 2
190
#define ARITH_SBB 3
191
#define ARITH_AND 4
192
#define ARITH_SUB 5
193
#define ARITH_XOR 6
194
#define ARITH_CMP 7
195

    
196
#define SHIFT_ROL 0
197
#define SHIFT_ROR 1
198
#define SHIFT_SHL 4
199
#define SHIFT_SHR 5
200
#define SHIFT_SAR 7
201

    
202
#define JCC_JMP (-1)
203
#define JCC_JO  0x0
204
#define JCC_JNO 0x1
205
#define JCC_JB  0x2
206
#define JCC_JAE 0x3
207
#define JCC_JE  0x4
208
#define JCC_JNE 0x5
209
#define JCC_JBE 0x6
210
#define JCC_JA  0x7
211
#define JCC_JS  0x8
212
#define JCC_JNS 0x9
213
#define JCC_JP  0xa
214
#define JCC_JNP 0xb
215
#define JCC_JL  0xc
216
#define JCC_JGE 0xd
217
#define JCC_JLE 0xe
218
#define JCC_JG  0xf
219

    
220
#define P_EXT   0x100 /* 0x0f opcode prefix */
221
#define P_REXW  0x200 /* set rex.w = 1 */
222
#define P_REXB  0x400 /* force rex use for byte registers */
223
                                  
224
static const uint8_t tcg_cond_to_jcc[10] = {
225
    [TCG_COND_EQ] = JCC_JE,
226
    [TCG_COND_NE] = JCC_JNE,
227
    [TCG_COND_LT] = JCC_JL,
228
    [TCG_COND_GE] = JCC_JGE,
229
    [TCG_COND_LE] = JCC_JLE,
230
    [TCG_COND_GT] = JCC_JG,
231
    [TCG_COND_LTU] = JCC_JB,
232
    [TCG_COND_GEU] = JCC_JAE,
233
    [TCG_COND_LEU] = JCC_JBE,
234
    [TCG_COND_GTU] = JCC_JA,
235
};
236

    
237
static inline void tcg_out_opc(TCGContext *s, int opc, int r, int rm, int x)
238
{
239
    int rex;
240
    rex = ((opc >> 6) & 0x8) | ((r >> 1) & 0x4) | 
241
        ((x >> 2) & 2) | ((rm >> 3) & 1);
242
    if (rex || (opc & P_REXB)) {
243
        tcg_out8(s, rex | 0x40);
244
    }
245
    if (opc & P_EXT)
246
        tcg_out8(s, 0x0f);
247
    tcg_out8(s, opc & 0xff);
248
}
249

    
250
static inline void tcg_out_modrm(TCGContext *s, int opc, int r, int rm)
251
{
252
    tcg_out_opc(s, opc, r, rm, 0);
253
    tcg_out8(s, 0xc0 | ((r & 7) << 3) | (rm & 7));
254
}
255

    
256
/* rm < 0 means no register index plus (-rm - 1 immediate bytes) */
257
static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm, 
258
                                        tcg_target_long offset)
259
{
260
    if (rm < 0) {
261
        tcg_target_long val;
262
        tcg_out_opc(s, opc, r, 0, 0);
263
        val = offset - ((tcg_target_long)s->code_ptr + 5 + (-rm - 1));
264
        if (val == (int32_t)val) {
265
            /* eip relative */
266
            tcg_out8(s, 0x05 | ((r & 7) << 3));
267
            tcg_out32(s, val);
268
        } else if (offset == (int32_t)offset) {
269
            tcg_out8(s, 0x04 | ((r & 7) << 3));
270
            tcg_out8(s, 0x25); /* sib */
271
            tcg_out32(s, offset);
272
        } else {
273
            tcg_abort();
274
        }
275
    } else if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
276
        tcg_out_opc(s, opc, r, rm, 0);
277
        if ((rm & 7) == TCG_REG_RSP) {
278
            tcg_out8(s, 0x04 | ((r & 7) << 3));
279
            tcg_out8(s, 0x24);
280
        } else {
281
            tcg_out8(s, 0x00 | ((r & 7) << 3) | (rm & 7));
282
        }
283
    } else if ((int8_t)offset == offset) {
284
        tcg_out_opc(s, opc, r, rm, 0);
285
        if ((rm & 7) == TCG_REG_RSP) {
286
            tcg_out8(s, 0x44 | ((r & 7) << 3));
287
            tcg_out8(s, 0x24);
288
        } else {
289
            tcg_out8(s, 0x40 | ((r & 7) << 3) | (rm & 7));
290
        }
291
        tcg_out8(s, offset);
292
    } else {
293
        tcg_out_opc(s, opc, r, rm, 0);
294
        if ((rm & 7) == TCG_REG_RSP) {
295
            tcg_out8(s, 0x84 | ((r & 7) << 3));
296
            tcg_out8(s, 0x24);
297
        } else {
298
            tcg_out8(s, 0x80 | ((r & 7) << 3) | (rm & 7));
299
        }
300
        tcg_out32(s, offset);
301
    }
302
}
303

    
304
#if defined(CONFIG_SOFTMMU)
305
/* XXX: incomplete. index must be different from ESP */
306
static void tcg_out_modrm_offset2(TCGContext *s, int opc, int r, int rm, 
307
                                  int index, int shift,
308
                                  tcg_target_long offset)
309
{
310
    int mod;
311
    if (rm == -1)
312
        tcg_abort();
313
    if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
314
        mod = 0;
315
    } else if (offset == (int8_t)offset) {
316
        mod = 0x40;
317
    } else if (offset == (int32_t)offset) {
318
        mod = 0x80;
319
    } else {
320
        tcg_abort();
321
    }
322
    if (index == -1) {
323
        tcg_out_opc(s, opc, r, rm, 0);
324
        if ((rm & 7) == TCG_REG_RSP) {
325
            tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
326
            tcg_out8(s, 0x04 | (rm & 7));
327
        } else {
328
            tcg_out8(s, mod | ((r & 7) << 3) | (rm & 7));
329
        }
330
    } else {
331
        tcg_out_opc(s, opc, r, rm, index);
332
        tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
333
        tcg_out8(s, (shift << 6) | ((index & 7) << 3) | (rm & 7));
334
    }
335
    if (mod == 0x40) {
336
        tcg_out8(s, offset);
337
    } else if (mod == 0x80) {
338
        tcg_out32(s, offset);
339
    }
340
}
341
#endif
342

    
343
static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
344
{
345
    tcg_out_modrm(s, 0x8b | P_REXW, ret, arg);
346
}
347

    
348
static inline void tcg_out_movi(TCGContext *s, TCGType type, 
349
                                int ret, tcg_target_long arg)
350
{
351
    if (arg == 0) {
352
        tcg_out_modrm(s, 0x01 | (ARITH_XOR << 3), ret, ret); /* xor r0,r0 */
353
    } else if (arg == (uint32_t)arg || type == TCG_TYPE_I32) {
354
        tcg_out_opc(s, 0xb8 + (ret & 7), 0, ret, 0);
355
        tcg_out32(s, arg);
356
    } else if (arg == (int32_t)arg) {
357
        tcg_out_modrm(s, 0xc7 | P_REXW, 0, ret);
358
        tcg_out32(s, arg);
359
    } else {
360
        tcg_out_opc(s, (0xb8 + (ret & 7)) | P_REXW, 0, ret, 0);
361
        tcg_out32(s, arg);
362
        tcg_out32(s, arg >> 32);
363
    }
364
}
365

    
366
static void tcg_out_goto(TCGContext *s, int call, uint8_t *target)
367
{
368
    int32_t disp;
369

    
370
    disp = target - s->code_ptr - 5;
371
    if (disp == (target - s->code_ptr - 5)) {
372
        tcg_out8(s, call ? 0xe8 : 0xe9);
373
        tcg_out32(s, disp);
374
    } else {
375
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R10, (tcg_target_long) target);
376
        tcg_out_modrm(s, 0xff, call ? 2 : 4, TCG_REG_R10);
377
    }
378
}
379

    
380
static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
381
                              int arg1, tcg_target_long arg2)
382
{
383
    if (type == TCG_TYPE_I32)
384
        tcg_out_modrm_offset(s, 0x8b, ret, arg1, arg2); /* movl */
385
    else
386
        tcg_out_modrm_offset(s, 0x8b | P_REXW, ret, arg1, arg2); /* movq */
387
}
388

    
389
static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
390
                              int arg1, tcg_target_long arg2)
391
{
392
    if (type == TCG_TYPE_I32)
393
        tcg_out_modrm_offset(s, 0x89, arg, arg1, arg2); /* movl */
394
    else
395
        tcg_out_modrm_offset(s, 0x89 | P_REXW, arg, arg1, arg2); /* movq */
396
}
397

    
398
static inline void tgen_arithi32(TCGContext *s, int c, int r0, int32_t val)
399
{
400
    if ((c == ARITH_ADD && val == 1) || (c == ARITH_SUB && val == -1)) {
401
        /* inc */
402
        tcg_out_modrm(s, 0xff, 0, r0);
403
    } else if ((c == ARITH_ADD && val == -1) || (c == ARITH_SUB && val == 1)) {
404
        /* dec */
405
        tcg_out_modrm(s, 0xff, 1, r0);
406
    } else if (val == (int8_t)val) {
407
        tcg_out_modrm(s, 0x83, c, r0);
408
        tcg_out8(s, val);
409
    } else if (c == ARITH_AND && val == 0xffu) {
410
        /* movzbl */
411
        tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB, r0, r0);
412
    } else if (c == ARITH_AND && val == 0xffffu) {
413
        /* movzwl */
414
        tcg_out_modrm(s, 0xb7 | P_EXT, r0, r0);
415
    } else {
416
        tcg_out_modrm(s, 0x81, c, r0);
417
        tcg_out32(s, val);
418
    }
419
}
420

    
421
static inline void tgen_arithi64(TCGContext *s, int c, int r0, int64_t val)
422
{
423
    if ((c == ARITH_ADD && val == 1) || (c == ARITH_SUB && val == -1)) {
424
        /* inc */
425
        tcg_out_modrm(s, 0xff | P_REXW, 0, r0);
426
    } else if ((c == ARITH_ADD && val == -1) || (c == ARITH_SUB && val == 1)) {
427
        /* dec */
428
        tcg_out_modrm(s, 0xff | P_REXW, 1, r0);
429
    } else if (val == (int8_t)val) {
430
        tcg_out_modrm(s, 0x83 | P_REXW, c, r0);
431
        tcg_out8(s, val);
432
    } else if (c == ARITH_AND && val == 0xffu) {
433
        /* movzbl */
434
        tcg_out_modrm(s, 0xb6 | P_EXT | P_REXW, r0, r0);
435
    } else if (c == ARITH_AND && val == 0xffffu) {
436
        /* movzwl */
437
        tcg_out_modrm(s, 0xb7 | P_EXT | P_REXW, r0, r0);
438
    } else if (c == ARITH_AND && val == 0xffffffffu) {
439
        /* 32-bit mov zero extends */
440
        tcg_out_modrm(s, 0x8b, r0, r0);
441
    } else if (val == (int32_t)val) {
442
        tcg_out_modrm(s, 0x81 | P_REXW, c, r0);
443
        tcg_out32(s, val);
444
    } else if (c == ARITH_AND && val == (uint32_t)val) {
445
        tcg_out_modrm(s, 0x81, c, r0);
446
        tcg_out32(s, val);
447
    } else {
448
        tcg_abort();
449
    }
450
}
451

    
452
static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
453
{
454
    if (val != 0)
455
        tgen_arithi64(s, ARITH_ADD, reg, val);
456
}
457

    
458
static void tcg_out_jxx(TCGContext *s, int opc, int label_index)
459
{
460
    int32_t val, val1;
461
    TCGLabel *l = &s->labels[label_index];
462
    
463
    if (l->has_value) {
464
        val = l->u.value - (tcg_target_long)s->code_ptr;
465
        val1 = val - 2;
466
        if ((int8_t)val1 == val1) {
467
            if (opc == -1)
468
                tcg_out8(s, 0xeb);
469
            else
470
                tcg_out8(s, 0x70 + opc);
471
            tcg_out8(s, val1);
472
        } else {
473
            if (opc == -1) {
474
                tcg_out8(s, 0xe9);
475
                tcg_out32(s, val - 5);
476
            } else {
477
                tcg_out8(s, 0x0f);
478
                tcg_out8(s, 0x80 + opc);
479
                tcg_out32(s, val - 6);
480
            }
481
        }
482
    } else {
483
        if (opc == -1) {
484
            tcg_out8(s, 0xe9);
485
        } else {
486
            tcg_out8(s, 0x0f);
487
            tcg_out8(s, 0x80 + opc);
488
        }
489
        tcg_out_reloc(s, s->code_ptr, R_386_PC32, label_index, -4);
490
        s->code_ptr += 4;
491
    }
492
}
493

    
494
static void tcg_out_brcond(TCGContext *s, int cond, 
495
                           TCGArg arg1, TCGArg arg2, int const_arg2,
496
                           int label_index, int rexw)
497
{
498
    if (const_arg2) {
499
        if (arg2 == 0) {
500
            /* test r, r */
501
            tcg_out_modrm(s, 0x85 | rexw, arg1, arg1);
502
        } else {
503
            if (rexw)
504
                tgen_arithi64(s, ARITH_CMP, arg1, arg2);
505
            else
506
                tgen_arithi32(s, ARITH_CMP, arg1, arg2);
507
        }
508
    } else {
509
        tcg_out_modrm(s, 0x01 | (ARITH_CMP << 3) | rexw, arg2, arg1);
510
    }
511
    tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index);
512
}
513

    
514
#if defined(CONFIG_SOFTMMU)
515

    
516
#include "../../softmmu_defs.h"
517

    
518
static void *qemu_ld_helpers[4] = {
519
    __ldb_mmu,
520
    __ldw_mmu,
521
    __ldl_mmu,
522
    __ldq_mmu,
523
};
524

    
525
static void *qemu_st_helpers[4] = {
526
    __stb_mmu,
527
    __stw_mmu,
528
    __stl_mmu,
529
    __stq_mmu,
530
};
531
#endif
532

    
533
static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
534
                            int opc)
535
{
536
    int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
537
    int32_t offset;
538
#if defined(CONFIG_SOFTMMU)
539
    uint8_t *label1_ptr, *label2_ptr;
540
#endif
541

    
542
    data_reg = *args++;
543
    addr_reg = *args++;
544
    mem_index = *args;
545
    s_bits = opc & 3;
546

    
547
    r0 = TCG_REG_RDI;
548
    r1 = TCG_REG_RSI;
549

    
550
#if TARGET_LONG_BITS == 32
551
    rexw = 0;
552
#else
553
    rexw = P_REXW;
554
#endif
555
#if defined(CONFIG_SOFTMMU)
556
    /* mov */
557
    tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
558

    
559
    /* mov */
560
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
561
 
562
    tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
563
    tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); 
564
    
565
    tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
566
    tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
567
    
568
    tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
569
    tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
570

    
571
    /* lea offset(r1, env), r1 */
572
    tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
573
                          offsetof(CPUState, tlb_table[mem_index][0].addr_read));
574

    
575
    /* cmp 0(r1), r0 */
576
    tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
577
    
578
    /* mov */
579
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
580
    
581
    /* je label1 */
582
    tcg_out8(s, 0x70 + JCC_JE);
583
    label1_ptr = s->code_ptr;
584
    s->code_ptr++;
585

    
586
    /* XXX: move that code at the end of the TB */
587
    tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RSI, mem_index);
588
    tcg_out_goto(s, 1, qemu_ld_helpers[s_bits]);
589

    
590
    switch(opc) {
591
    case 0 | 4:
592
        /* movsbq */
593
        tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
594
        break;
595
    case 1 | 4:
596
        /* movswq */
597
        tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
598
        break;
599
    case 2 | 4:
600
        /* movslq */
601
        tcg_out_modrm(s, 0x63 | P_REXW, data_reg, TCG_REG_RAX);
602
        break;
603
    case 0:
604
        /* movzbq */
605
        tcg_out_modrm(s, 0xb6 | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
606
        break;
607
    case 1:
608
        /* movzwq */
609
        tcg_out_modrm(s, 0xb7 | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
610
        break;
611
    case 2:
612
    default:
613
        /* movl */
614
        tcg_out_modrm(s, 0x8b, data_reg, TCG_REG_RAX);
615
        break;
616
    case 3:
617
        tcg_out_mov(s, data_reg, TCG_REG_RAX);
618
        break;
619
    }
620

    
621
    /* jmp label2 */
622
    tcg_out8(s, 0xeb);
623
    label2_ptr = s->code_ptr;
624
    s->code_ptr++;
625
    
626
    /* label1: */
627
    *label1_ptr = s->code_ptr - label1_ptr - 1;
628

    
629
    /* add x(r1), r0 */
630
    tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) - 
631
                         offsetof(CPUTLBEntry, addr_read));
632
    offset = 0;
633
#else
634
    if (GUEST_BASE == (int32_t)GUEST_BASE) {
635
        r0 = addr_reg;
636
        offset = GUEST_BASE;
637
    } else {
638
        offset = 0;
639
        /* movq $GUEST_BASE, r0 */
640
        tcg_out_opc(s, (0xb8 + (r0 & 7)) | P_REXW, 0, r0, 0);
641
        tcg_out32(s, GUEST_BASE);
642
        tcg_out32(s, GUEST_BASE >> 32);
643
        /* addq addr_reg, r0 */
644
        tcg_out_modrm(s, 0x01 | P_REXW, addr_reg, r0);
645
    }
646
#endif    
647

    
648
#ifdef TARGET_WORDS_BIGENDIAN
649
    bswap = 1;
650
#else
651
    bswap = 0;
652
#endif
653
    switch(opc) {
654
    case 0:
655
        /* movzbl */
656
        tcg_out_modrm_offset(s, 0xb6 | P_EXT, data_reg, r0, offset);
657
        break;
658
    case 0 | 4:
659
        /* movsbX */
660
        tcg_out_modrm_offset(s, 0xbe | P_EXT | rexw, data_reg, r0, offset);
661
        break;
662
    case 1:
663
        /* movzwl */
664
        tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, offset);
665
        if (bswap) {
666
            /* rolw $8, data_reg */
667
            tcg_out8(s, 0x66); 
668
            tcg_out_modrm(s, 0xc1, 0, data_reg);
669
            tcg_out8(s, 8);
670
        }
671
        break;
672
    case 1 | 4:
673
        if (bswap) {
674
            /* movzwl */
675
            tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, offset);
676
            /* rolw $8, data_reg */
677
            tcg_out8(s, 0x66); 
678
            tcg_out_modrm(s, 0xc1, 0, data_reg);
679
            tcg_out8(s, 8);
680

    
681
            /* movswX data_reg, data_reg */
682
            tcg_out_modrm(s, 0xbf | P_EXT | rexw, data_reg, data_reg);
683
        } else {
684
            /* movswX */
685
            tcg_out_modrm_offset(s, 0xbf | P_EXT | rexw, data_reg, r0, offset);
686
        }
687
        break;
688
    case 2:
689
        /* movl (r0), data_reg */
690
        tcg_out_modrm_offset(s, 0x8b, data_reg, r0, offset);
691
        if (bswap) {
692
            /* bswap */
693
            tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
694
        }
695
        break;
696
    case 2 | 4:
697
        if (bswap) {
698
            /* movl (r0), data_reg */
699
            tcg_out_modrm_offset(s, 0x8b, data_reg, r0, offset);
700
            /* bswap */
701
            tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
702
            /* movslq */
703
            tcg_out_modrm(s, 0x63 | P_REXW, data_reg, data_reg);
704
        } else {
705
            /* movslq */
706
            tcg_out_modrm_offset(s, 0x63 | P_REXW, data_reg, r0, offset);
707
        }
708
        break;
709
    case 3:
710
        /* movq (r0), data_reg */
711
        tcg_out_modrm_offset(s, 0x8b | P_REXW, data_reg, r0, offset);
712
        if (bswap) {
713
            /* bswap */
714
            tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT | P_REXW, 0, data_reg, 0);
715
        }
716
        break;
717
    default:
718
        tcg_abort();
719
    }
720

    
721
#if defined(CONFIG_SOFTMMU)
722
    /* label2: */
723
    *label2_ptr = s->code_ptr - label2_ptr - 1;
724
#endif
725
}
726

    
727
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
728
                            int opc)
729
{
730
    int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
731
    int32_t offset;
732
#if defined(CONFIG_SOFTMMU)
733
    uint8_t *label1_ptr, *label2_ptr;
734
#endif
735

    
736
    data_reg = *args++;
737
    addr_reg = *args++;
738
    mem_index = *args;
739

    
740
    s_bits = opc;
741

    
742
    r0 = TCG_REG_RDI;
743
    r1 = TCG_REG_RSI;
744

    
745
#if TARGET_LONG_BITS == 32
746
    rexw = 0;
747
#else
748
    rexw = P_REXW;
749
#endif
750
#if defined(CONFIG_SOFTMMU)
751
    /* mov */
752
    tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
753

    
754
    /* mov */
755
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
756
 
757
    tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
758
    tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); 
759
    
760
    tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
761
    tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
762
    
763
    tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
764
    tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
765

    
766
    /* lea offset(r1, env), r1 */
767
    tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
768
                          offsetof(CPUState, tlb_table[mem_index][0].addr_write));
769

    
770
    /* cmp 0(r1), r0 */
771
    tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
772
    
773
    /* mov */
774
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
775
    
776
    /* je label1 */
777
    tcg_out8(s, 0x70 + JCC_JE);
778
    label1_ptr = s->code_ptr;
779
    s->code_ptr++;
780

    
781
    /* XXX: move that code at the end of the TB */
782
    switch(opc) {
783
    case 0:
784
        /* movzbl */
785
        tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB, TCG_REG_RSI, data_reg);
786
        break;
787
    case 1:
788
        /* movzwl */
789
        tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_RSI, data_reg);
790
        break;
791
    case 2:
792
        /* movl */
793
        tcg_out_modrm(s, 0x8b, TCG_REG_RSI, data_reg);
794
        break;
795
    default:
796
    case 3:
797
        tcg_out_mov(s, TCG_REG_RSI, data_reg);
798
        break;
799
    }
800
    tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RDX, mem_index);
801
    tcg_out_goto(s, 1, qemu_st_helpers[s_bits]);
802

    
803
    /* jmp label2 */
804
    tcg_out8(s, 0xeb);
805
    label2_ptr = s->code_ptr;
806
    s->code_ptr++;
807
    
808
    /* label1: */
809
    *label1_ptr = s->code_ptr - label1_ptr - 1;
810

    
811
    /* add x(r1), r0 */
812
    tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) - 
813
                         offsetof(CPUTLBEntry, addr_write));
814
    offset = 0;
815
#else
816
    if (GUEST_BASE == (int32_t)GUEST_BASE) {
817
        r0 = addr_reg;
818
        offset = GUEST_BASE;
819
    } else {
820
        offset = 0;
821
        /* movq $GUEST_BASE, r0 */
822
        tcg_out_opc(s, (0xb8 + (r0 & 7)) | P_REXW, 0, r0, 0);
823
        tcg_out32(s, GUEST_BASE);
824
        tcg_out32(s, GUEST_BASE >> 32);
825
        /* addq addr_reg, r0 */
826
        tcg_out_modrm(s, 0x01 | P_REXW, addr_reg, r0);
827
    }
828
#endif
829

    
830
#ifdef TARGET_WORDS_BIGENDIAN
831
    bswap = 1;
832
#else
833
    bswap = 0;
834
#endif
835
    switch(opc) {
836
    case 0:
837
        /* movb */
838
        tcg_out_modrm_offset(s, 0x88 | P_REXB, data_reg, r0, offset);
839
        break;
840
    case 1:
841
        if (bswap) {
842
            tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
843
            tcg_out8(s, 0x66); /* rolw $8, %ecx */
844
            tcg_out_modrm(s, 0xc1, 0, r1);
845
            tcg_out8(s, 8);
846
            data_reg = r1;
847
        }
848
        /* movw */
849
        tcg_out8(s, 0x66);
850
        tcg_out_modrm_offset(s, 0x89, data_reg, r0, offset);
851
        break;
852
    case 2:
853
        if (bswap) {
854
            tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
855
            /* bswap data_reg */
856
            tcg_out_opc(s, (0xc8 + r1) | P_EXT, 0, r1, 0);
857
            data_reg = r1;
858
        }
859
        /* movl */
860
        tcg_out_modrm_offset(s, 0x89, data_reg, r0, offset);
861
        break;
862
    case 3:
863
        if (bswap) {
864
            tcg_out_mov(s, r1, data_reg);
865
            /* bswap data_reg */
866
            tcg_out_opc(s, (0xc8 + r1) | P_EXT | P_REXW, 0, r1, 0);
867
            data_reg = r1;
868
        }
869
        /* movq */
870
        tcg_out_modrm_offset(s, 0x89 | P_REXW, data_reg, r0, offset);
871
        break;
872
    default:
873
        tcg_abort();
874
    }
875

    
876
#if defined(CONFIG_SOFTMMU)
877
    /* label2: */
878
    *label2_ptr = s->code_ptr - label2_ptr - 1;
879
#endif
880
}
881

    
882
static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
883
                              const int *const_args)
884
{
885
    int c;
886
    
887
    switch(opc) {
888
    case INDEX_op_exit_tb:
889
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RAX, args[0]);
890
        tcg_out_goto(s, 0, tb_ret_addr);
891
        break;
892
    case INDEX_op_goto_tb:
893
        if (s->tb_jmp_offset) {
894
            /* direct jump method */
895
            tcg_out8(s, 0xe9); /* jmp im */
896
            s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
897
            tcg_out32(s, 0);
898
        } else {
899
            /* indirect jump method */
900
            /* jmp Ev */
901
            tcg_out_modrm_offset(s, 0xff, 4, -1, 
902
                                 (tcg_target_long)(s->tb_next + 
903
                                                   args[0]));
904
        }
905
        s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
906
        break;
907
    case INDEX_op_call:
908
        if (const_args[0]) {
909
            tcg_out_goto(s, 1, (void *) args[0]);
910
        } else {
911
            tcg_out_modrm(s, 0xff, 2, args[0]);
912
        }
913
        break;
914
    case INDEX_op_jmp:
915
        if (const_args[0]) {
916
            tcg_out_goto(s, 0, (void *) args[0]);
917
        } else {
918
            tcg_out_modrm(s, 0xff, 4, args[0]);
919
        }
920
        break;
921
    case INDEX_op_br:
922
        tcg_out_jxx(s, JCC_JMP, args[0]);
923
        break;
924
    case INDEX_op_movi_i32:
925
        tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
926
        break;
927
    case INDEX_op_movi_i64:
928
        tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
929
        break;
930
    case INDEX_op_ld8u_i32:
931
    case INDEX_op_ld8u_i64:
932
        /* movzbl */
933
        tcg_out_modrm_offset(s, 0xb6 | P_EXT, args[0], args[1], args[2]);
934
        break;
935
    case INDEX_op_ld8s_i32:
936
        /* movsbl */
937
        tcg_out_modrm_offset(s, 0xbe | P_EXT, args[0], args[1], args[2]);
938
        break;
939
    case INDEX_op_ld8s_i64:
940
        /* movsbq */
941
        tcg_out_modrm_offset(s, 0xbe | P_EXT | P_REXW, args[0], args[1], args[2]);
942
        break;
943
    case INDEX_op_ld16u_i32:
944
    case INDEX_op_ld16u_i64:
945
        /* movzwl */
946
        tcg_out_modrm_offset(s, 0xb7 | P_EXT, args[0], args[1], args[2]);
947
        break;
948
    case INDEX_op_ld16s_i32:
949
        /* movswl */
950
        tcg_out_modrm_offset(s, 0xbf | P_EXT, args[0], args[1], args[2]);
951
        break;
952
    case INDEX_op_ld16s_i64:
953
        /* movswq */
954
        tcg_out_modrm_offset(s, 0xbf | P_EXT | P_REXW, args[0], args[1], args[2]);
955
        break;
956
    case INDEX_op_ld_i32:
957
    case INDEX_op_ld32u_i64:
958
        /* movl */
959
        tcg_out_modrm_offset(s, 0x8b, args[0], args[1], args[2]);
960
        break;
961
    case INDEX_op_ld32s_i64:
962
        /* movslq */
963
        tcg_out_modrm_offset(s, 0x63 | P_REXW, args[0], args[1], args[2]);
964
        break;
965
    case INDEX_op_ld_i64:
966
        /* movq */
967
        tcg_out_modrm_offset(s, 0x8b | P_REXW, args[0], args[1], args[2]);
968
        break;
969
        
970
    case INDEX_op_st8_i32:
971
    case INDEX_op_st8_i64:
972
        /* movb */
973
        tcg_out_modrm_offset(s, 0x88 | P_REXB, args[0], args[1], args[2]);
974
        break;
975
    case INDEX_op_st16_i32:
976
    case INDEX_op_st16_i64:
977
        /* movw */
978
        tcg_out8(s, 0x66);
979
        tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
980
        break;
981
    case INDEX_op_st_i32:
982
    case INDEX_op_st32_i64:
983
        /* movl */
984
        tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
985
        break;
986
    case INDEX_op_st_i64:
987
        /* movq */
988
        tcg_out_modrm_offset(s, 0x89 | P_REXW, args[0], args[1], args[2]);
989
        break;
990

    
991
    case INDEX_op_sub_i32:
992
        c = ARITH_SUB;
993
        goto gen_arith32;
994
    case INDEX_op_and_i32:
995
        c = ARITH_AND;
996
        goto gen_arith32;
997
    case INDEX_op_or_i32:
998
        c = ARITH_OR;
999
        goto gen_arith32;
1000
    case INDEX_op_xor_i32:
1001
        c = ARITH_XOR;
1002
        goto gen_arith32;
1003
    case INDEX_op_add_i32:
1004
        c = ARITH_ADD;
1005
    gen_arith32:
1006
        if (const_args[2]) {
1007
            tgen_arithi32(s, c, args[0], args[2]);
1008
        } else {
1009
            tcg_out_modrm(s, 0x01 | (c << 3), args[2], args[0]);
1010
        }
1011
        break;
1012

    
1013
    case INDEX_op_sub_i64:
1014
        c = ARITH_SUB;
1015
        goto gen_arith64;
1016
    case INDEX_op_and_i64:
1017
        c = ARITH_AND;
1018
        goto gen_arith64;
1019
    case INDEX_op_or_i64:
1020
        c = ARITH_OR;
1021
        goto gen_arith64;
1022
    case INDEX_op_xor_i64:
1023
        c = ARITH_XOR;
1024
        goto gen_arith64;
1025
    case INDEX_op_add_i64:
1026
        c = ARITH_ADD;
1027
    gen_arith64:
1028
        if (const_args[2]) {
1029
            tgen_arithi64(s, c, args[0], args[2]);
1030
        } else {
1031
            tcg_out_modrm(s, 0x01 | (c << 3) | P_REXW, args[2], args[0]);
1032
        }
1033
        break;
1034

    
1035
    case INDEX_op_mul_i32:
1036
        if (const_args[2]) {
1037
            int32_t val;
1038
            val = args[2];
1039
            if (val == (int8_t)val) {
1040
                tcg_out_modrm(s, 0x6b, args[0], args[0]);
1041
                tcg_out8(s, val);
1042
            } else {
1043
                tcg_out_modrm(s, 0x69, args[0], args[0]);
1044
                tcg_out32(s, val);
1045
            }
1046
        } else {
1047
            tcg_out_modrm(s, 0xaf | P_EXT, args[0], args[2]);
1048
        }
1049
        break;
1050
    case INDEX_op_mul_i64:
1051
        if (const_args[2]) {
1052
            int32_t val;
1053
            val = args[2];
1054
            if (val == (int8_t)val) {
1055
                tcg_out_modrm(s, 0x6b | P_REXW, args[0], args[0]);
1056
                tcg_out8(s, val);
1057
            } else {
1058
                tcg_out_modrm(s, 0x69 | P_REXW, args[0], args[0]);
1059
                tcg_out32(s, val);
1060
            }
1061
        } else {
1062
            tcg_out_modrm(s, 0xaf | P_EXT | P_REXW, args[0], args[2]);
1063
        }
1064
        break;
1065
    case INDEX_op_div2_i32:
1066
        tcg_out_modrm(s, 0xf7, 7, args[4]);
1067
        break;
1068
    case INDEX_op_divu2_i32:
1069
        tcg_out_modrm(s, 0xf7, 6, args[4]);
1070
        break;
1071
    case INDEX_op_div2_i64:
1072
        tcg_out_modrm(s, 0xf7 | P_REXW, 7, args[4]);
1073
        break;
1074
    case INDEX_op_divu2_i64:
1075
        tcg_out_modrm(s, 0xf7 | P_REXW, 6, args[4]);
1076
        break;
1077

    
1078
    case INDEX_op_shl_i32:
1079
        c = SHIFT_SHL;
1080
    gen_shift32:
1081
        if (const_args[2]) {
1082
            if (args[2] == 1) {
1083
                tcg_out_modrm(s, 0xd1, c, args[0]);
1084
            } else {
1085
                tcg_out_modrm(s, 0xc1, c, args[0]);
1086
                tcg_out8(s, args[2]);
1087
            }
1088
        } else {
1089
            tcg_out_modrm(s, 0xd3, c, args[0]);
1090
        }
1091
        break;
1092
    case INDEX_op_shr_i32:
1093
        c = SHIFT_SHR;
1094
        goto gen_shift32;
1095
    case INDEX_op_sar_i32:
1096
        c = SHIFT_SAR;
1097
        goto gen_shift32;
1098
    case INDEX_op_rotl_i32:
1099
        c = SHIFT_ROL;
1100
        goto gen_shift32;
1101
    case INDEX_op_rotr_i32:
1102
        c = SHIFT_ROR;
1103
        goto gen_shift32;
1104

    
1105
    case INDEX_op_shl_i64:
1106
        c = SHIFT_SHL;
1107
    gen_shift64:
1108
        if (const_args[2]) {
1109
            if (args[2] == 1) {
1110
                tcg_out_modrm(s, 0xd1 | P_REXW, c, args[0]);
1111
            } else {
1112
                tcg_out_modrm(s, 0xc1 | P_REXW, c, args[0]);
1113
                tcg_out8(s, args[2]);
1114
            }
1115
        } else {
1116
            tcg_out_modrm(s, 0xd3 | P_REXW, c, args[0]);
1117
        }
1118
        break;
1119
    case INDEX_op_shr_i64:
1120
        c = SHIFT_SHR;
1121
        goto gen_shift64;
1122
    case INDEX_op_sar_i64:
1123
        c = SHIFT_SAR;
1124
        goto gen_shift64;
1125
    case INDEX_op_rotl_i64:
1126
        c = SHIFT_ROL;
1127
        goto gen_shift64;
1128
    case INDEX_op_rotr_i64:
1129
        c = SHIFT_ROR;
1130
        goto gen_shift64;
1131

    
1132
    case INDEX_op_brcond_i32:
1133
        tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], 
1134
                       args[3], 0);
1135
        break;
1136
    case INDEX_op_brcond_i64:
1137
        tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], 
1138
                       args[3], P_REXW);
1139
        break;
1140

    
1141
    case INDEX_op_bswap16_i32:
1142
    case INDEX_op_bswap16_i64:
1143
        tcg_out8(s, 0x66);
1144
        tcg_out_modrm(s, 0xc1, SHIFT_ROL, args[0]);
1145
        tcg_out8(s, 8);
1146
        break;
1147
    case INDEX_op_bswap32_i32:
1148
    case INDEX_op_bswap32_i64:
1149
        tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT, 0, args[0], 0);
1150
        break;
1151
    case INDEX_op_bswap64_i64:
1152
        tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT | P_REXW, 0, args[0], 0);
1153
        break;
1154

    
1155
    case INDEX_op_neg_i32:
1156
        tcg_out_modrm(s, 0xf7, 3, args[0]);
1157
        break;
1158
    case INDEX_op_neg_i64:
1159
        tcg_out_modrm(s, 0xf7 | P_REXW, 3, args[0]);
1160
        break;
1161

    
1162
    case INDEX_op_not_i32:
1163
        tcg_out_modrm(s, 0xf7, 2, args[0]);
1164
        break;
1165
    case INDEX_op_not_i64:
1166
        tcg_out_modrm(s, 0xf7 | P_REXW, 2, args[0]);
1167
        break;
1168

    
1169
    case INDEX_op_ext8s_i32:
1170
        tcg_out_modrm(s, 0xbe | P_EXT | P_REXB, args[0], args[1]);
1171
        break;
1172
    case INDEX_op_ext16s_i32:
1173
        tcg_out_modrm(s, 0xbf | P_EXT, args[0], args[1]);
1174
        break;
1175
    case INDEX_op_ext8s_i64:
1176
        tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, args[0], args[1]);
1177
        break;
1178
    case INDEX_op_ext16s_i64:
1179
        tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, args[0], args[1]);
1180
        break;
1181
    case INDEX_op_ext32s_i64:
1182
        tcg_out_modrm(s, 0x63 | P_REXW, args[0], args[1]);
1183
        break;
1184
    case INDEX_op_ext8u_i32:
1185
        tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB, args[0], args[1]);
1186
        break;
1187
    case INDEX_op_ext16u_i32:
1188
        tcg_out_modrm(s, 0xb7 | P_EXT, args[0], args[1]);
1189
        break;
1190
    case INDEX_op_ext8u_i64:
1191
        tcg_out_modrm(s, 0xb6 | P_EXT | P_REXW, args[0], args[1]);
1192
        break;
1193
    case INDEX_op_ext16u_i64:
1194
        tcg_out_modrm(s, 0xb7 | P_EXT | P_REXW, args[0], args[1]);
1195
        break;
1196
    case INDEX_op_ext32u_i64:
1197
        tcg_out_modrm(s, 0x8b, args[0], args[1]);
1198
        break;
1199

    
1200
    case INDEX_op_qemu_ld8u:
1201
        tcg_out_qemu_ld(s, args, 0);
1202
        break;
1203
    case INDEX_op_qemu_ld8s:
1204
        tcg_out_qemu_ld(s, args, 0 | 4);
1205
        break;
1206
    case INDEX_op_qemu_ld16u:
1207
        tcg_out_qemu_ld(s, args, 1);
1208
        break;
1209
    case INDEX_op_qemu_ld16s:
1210
        tcg_out_qemu_ld(s, args, 1 | 4);
1211
        break;
1212
    case INDEX_op_qemu_ld32u:
1213
        tcg_out_qemu_ld(s, args, 2);
1214
        break;
1215
    case INDEX_op_qemu_ld32s:
1216
        tcg_out_qemu_ld(s, args, 2 | 4);
1217
        break;
1218
    case INDEX_op_qemu_ld64:
1219
        tcg_out_qemu_ld(s, args, 3);
1220
        break;
1221
        
1222
    case INDEX_op_qemu_st8:
1223
        tcg_out_qemu_st(s, args, 0);
1224
        break;
1225
    case INDEX_op_qemu_st16:
1226
        tcg_out_qemu_st(s, args, 1);
1227
        break;
1228
    case INDEX_op_qemu_st32:
1229
        tcg_out_qemu_st(s, args, 2);
1230
        break;
1231
    case INDEX_op_qemu_st64:
1232
        tcg_out_qemu_st(s, args, 3);
1233
        break;
1234

    
1235
    default:
1236
        tcg_abort();
1237
    }
1238
}
1239

    
1240
static int tcg_target_callee_save_regs[] = {
1241
    TCG_REG_RBP,
1242
    TCG_REG_RBX,
1243
    TCG_REG_R12,
1244
    TCG_REG_R13,
1245
    /*    TCG_REG_R14, */ /* currently used for the global env, so no
1246
                             need to save */
1247
    TCG_REG_R15,
1248
};
1249

    
1250
static inline void tcg_out_push(TCGContext *s, int reg)
1251
{
1252
    tcg_out_opc(s, (0x50 + (reg & 7)), 0, reg, 0);
1253
}
1254

    
1255
static inline void tcg_out_pop(TCGContext *s, int reg)
1256
{
1257
    tcg_out_opc(s, (0x58 + (reg & 7)), 0, reg, 0);
1258
}
1259

    
1260
/* Generate global QEMU prologue and epilogue code */
1261
void tcg_target_qemu_prologue(TCGContext *s)
1262
{
1263
    int i, frame_size, push_size, stack_addend;
1264

    
1265
    /* TB prologue */
1266
    /* save all callee saved registers */
1267
    for(i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1268
        tcg_out_push(s, tcg_target_callee_save_regs[i]);
1269

    
1270
    }
1271
    /* reserve some stack space */
1272
    push_size = 8 + ARRAY_SIZE(tcg_target_callee_save_regs) * 8;
1273
    frame_size = push_size + TCG_STATIC_CALL_ARGS_SIZE;
1274
    frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) & 
1275
        ~(TCG_TARGET_STACK_ALIGN - 1);
1276
    stack_addend = frame_size - push_size;
1277
    tcg_out_addi(s, TCG_REG_RSP, -stack_addend);
1278

    
1279
    tcg_out_modrm(s, 0xff, 4, TCG_REG_RDI); /* jmp *%rdi */
1280
    
1281
    /* TB epilogue */
1282
    tb_ret_addr = s->code_ptr;
1283
    tcg_out_addi(s, TCG_REG_RSP, stack_addend);
1284
    for(i = ARRAY_SIZE(tcg_target_callee_save_regs) - 1; i >= 0; i--) {
1285
        tcg_out_pop(s, tcg_target_callee_save_regs[i]);
1286
    }
1287
    tcg_out8(s, 0xc3); /* ret */
1288
}
1289

    
1290
static const TCGTargetOpDef x86_64_op_defs[] = {
1291
    { INDEX_op_exit_tb, { } },
1292
    { INDEX_op_goto_tb, { } },
1293
    { INDEX_op_call, { "ri" } }, /* XXX: might need a specific constant constraint */
1294
    { INDEX_op_jmp, { "ri" } }, /* XXX: might need a specific constant constraint */
1295
    { INDEX_op_br, { } },
1296

    
1297
    { INDEX_op_mov_i32, { "r", "r" } },
1298
    { INDEX_op_movi_i32, { "r" } },
1299
    { INDEX_op_ld8u_i32, { "r", "r" } },
1300
    { INDEX_op_ld8s_i32, { "r", "r" } },
1301
    { INDEX_op_ld16u_i32, { "r", "r" } },
1302
    { INDEX_op_ld16s_i32, { "r", "r" } },
1303
    { INDEX_op_ld_i32, { "r", "r" } },
1304
    { INDEX_op_st8_i32, { "r", "r" } },
1305
    { INDEX_op_st16_i32, { "r", "r" } },
1306
    { INDEX_op_st_i32, { "r", "r" } },
1307

    
1308
    { INDEX_op_add_i32, { "r", "0", "ri" } },
1309
    { INDEX_op_mul_i32, { "r", "0", "ri" } },
1310
    { INDEX_op_div2_i32, { "a", "d", "0", "1", "r" } },
1311
    { INDEX_op_divu2_i32, { "a", "d", "0", "1", "r" } },
1312
    { INDEX_op_sub_i32, { "r", "0", "ri" } },
1313
    { INDEX_op_and_i32, { "r", "0", "ri" } },
1314
    { INDEX_op_or_i32, { "r", "0", "ri" } },
1315
    { INDEX_op_xor_i32, { "r", "0", "ri" } },
1316

    
1317
    { INDEX_op_shl_i32, { "r", "0", "ci" } },
1318
    { INDEX_op_shr_i32, { "r", "0", "ci" } },
1319
    { INDEX_op_sar_i32, { "r", "0", "ci" } },
1320
    { INDEX_op_rotl_i32, { "r", "0", "ci" } },
1321
    { INDEX_op_rotr_i32, { "r", "0", "ci" } },
1322

    
1323
    { INDEX_op_brcond_i32, { "r", "ri" } },
1324

    
1325
    { INDEX_op_mov_i64, { "r", "r" } },
1326
    { INDEX_op_movi_i64, { "r" } },
1327
    { INDEX_op_ld8u_i64, { "r", "r" } },
1328
    { INDEX_op_ld8s_i64, { "r", "r" } },
1329
    { INDEX_op_ld16u_i64, { "r", "r" } },
1330
    { INDEX_op_ld16s_i64, { "r", "r" } },
1331
    { INDEX_op_ld32u_i64, { "r", "r" } },
1332
    { INDEX_op_ld32s_i64, { "r", "r" } },
1333
    { INDEX_op_ld_i64, { "r", "r" } },
1334
    { INDEX_op_st8_i64, { "r", "r" } },
1335
    { INDEX_op_st16_i64, { "r", "r" } },
1336
    { INDEX_op_st32_i64, { "r", "r" } },
1337
    { INDEX_op_st_i64, { "r", "r" } },
1338

    
1339
    { INDEX_op_add_i64, { "r", "0", "re" } },
1340
    { INDEX_op_mul_i64, { "r", "0", "re" } },
1341
    { INDEX_op_div2_i64, { "a", "d", "0", "1", "r" } },
1342
    { INDEX_op_divu2_i64, { "a", "d", "0", "1", "r" } },
1343
    { INDEX_op_sub_i64, { "r", "0", "re" } },
1344
    { INDEX_op_and_i64, { "r", "0", "reZ" } },
1345
    { INDEX_op_or_i64, { "r", "0", "re" } },
1346
    { INDEX_op_xor_i64, { "r", "0", "re" } },
1347

    
1348
    { INDEX_op_shl_i64, { "r", "0", "ci" } },
1349
    { INDEX_op_shr_i64, { "r", "0", "ci" } },
1350
    { INDEX_op_sar_i64, { "r", "0", "ci" } },
1351
    { INDEX_op_rotl_i64, { "r", "0", "ci" } },
1352
    { INDEX_op_rotr_i64, { "r", "0", "ci" } },
1353

    
1354
    { INDEX_op_brcond_i64, { "r", "re" } },
1355

    
1356
    { INDEX_op_bswap16_i32, { "r", "0" } },
1357
    { INDEX_op_bswap16_i64, { "r", "0" } },
1358
    { INDEX_op_bswap32_i32, { "r", "0" } },
1359
    { INDEX_op_bswap32_i64, { "r", "0" } },
1360
    { INDEX_op_bswap64_i64, { "r", "0" } },
1361

    
1362
    { INDEX_op_neg_i32, { "r", "0" } },
1363
    { INDEX_op_neg_i64, { "r", "0" } },
1364

    
1365
    { INDEX_op_not_i32, { "r", "0" } },
1366
    { INDEX_op_not_i64, { "r", "0" } },
1367

    
1368
    { INDEX_op_ext8s_i32, { "r", "r"} },
1369
    { INDEX_op_ext16s_i32, { "r", "r"} },
1370
    { INDEX_op_ext8s_i64, { "r", "r"} },
1371
    { INDEX_op_ext16s_i64, { "r", "r"} },
1372
    { INDEX_op_ext32s_i64, { "r", "r"} },
1373
    { INDEX_op_ext8u_i32, { "r", "r"} },
1374
    { INDEX_op_ext16u_i32, { "r", "r"} },
1375
    { INDEX_op_ext8u_i64, { "r", "r"} },
1376
    { INDEX_op_ext16u_i64, { "r", "r"} },
1377
    { INDEX_op_ext32u_i64, { "r", "r"} },
1378

    
1379
    { INDEX_op_qemu_ld8u, { "r", "L" } },
1380
    { INDEX_op_qemu_ld8s, { "r", "L" } },
1381
    { INDEX_op_qemu_ld16u, { "r", "L" } },
1382
    { INDEX_op_qemu_ld16s, { "r", "L" } },
1383
    { INDEX_op_qemu_ld32u, { "r", "L" } },
1384
    { INDEX_op_qemu_ld32s, { "r", "L" } },
1385
    { INDEX_op_qemu_ld64, { "r", "L" } },
1386

    
1387
    { INDEX_op_qemu_st8, { "L", "L" } },
1388
    { INDEX_op_qemu_st16, { "L", "L" } },
1389
    { INDEX_op_qemu_st32, { "L", "L" } },
1390
    { INDEX_op_qemu_st64, { "L", "L" } },
1391

    
1392
    { -1 },
1393
};
1394

    
1395
void tcg_target_init(TCGContext *s)
1396
{
1397
    /* fail safe */
1398
    if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
1399
        tcg_abort();
1400

    
1401
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffff);
1402
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffff);
1403
    tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1404
                     (1 << TCG_REG_RDI) | 
1405
                     (1 << TCG_REG_RSI) | 
1406
                     (1 << TCG_REG_RDX) |
1407
                     (1 << TCG_REG_RCX) |
1408
                     (1 << TCG_REG_R8) |
1409
                     (1 << TCG_REG_R9) |
1410
                     (1 << TCG_REG_RAX) |
1411
                     (1 << TCG_REG_R10) |
1412
                     (1 << TCG_REG_R11));
1413
    
1414
    tcg_regset_clear(s->reserved_regs);
1415
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_RSP);
1416

    
1417
    tcg_add_target_add_op_defs(x86_64_op_defs);
1418
}