Statistics
| Branch: | Revision:

root / tcg / x86_64 / tcg-target.c @ e4d5434c

History | View | Annotate | Download (34.5 kB)

1
/*
2
 * Tiny Code Generator for QEMU
3
 *
4
 * Copyright (c) 2008 Fabrice Bellard
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22
 * THE SOFTWARE.
23
 */
24
const char *tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
25
    "%rax",
26
    "%rcx",
27
    "%rdx",
28
    "%rbx",
29
    "%rsp",
30
    "%rbp",
31
    "%rsi",
32
    "%rdi",
33
    "%r8",
34
    "%r9",
35
    "%r10",
36
    "%r11",
37
    "%r12",
38
    "%r13",
39
    "%r14",
40
    "%r15",
41
};
42

    
43
int tcg_target_reg_alloc_order[] = {
44
    TCG_REG_RDI,
45
    TCG_REG_RSI,
46
    TCG_REG_RDX,
47
    TCG_REG_RCX,
48
    TCG_REG_R8,
49
    TCG_REG_R9,
50
    TCG_REG_RAX,
51
    TCG_REG_R10,
52
    TCG_REG_R11,
53

    
54
    TCG_REG_RBP,
55
    TCG_REG_RBX,
56
    TCG_REG_R12,
57
    TCG_REG_R13,
58
    TCG_REG_R14,
59
    TCG_REG_R15,
60
};
61

    
62
const int tcg_target_call_iarg_regs[6] = { 
63
    TCG_REG_RDI,
64
    TCG_REG_RSI,
65
    TCG_REG_RDX,
66
    TCG_REG_RCX,
67
    TCG_REG_R8,
68
    TCG_REG_R9,
69
};
70

    
71
const int tcg_target_call_oarg_regs[2] = { 
72
    TCG_REG_RAX, 
73
    TCG_REG_RDX 
74
};
75

    
76
static void patch_reloc(uint8_t *code_ptr, int type, 
77
                        tcg_target_long value)
78
{
79
    switch(type) {
80
    case R_X86_64_32:
81
        if (value != (uint32_t)value)
82
            tcg_abort();
83
        *(uint32_t *)code_ptr = value;
84
        break;
85
    case R_X86_64_32S:
86
        if (value != (int32_t)value)
87
            tcg_abort();
88
        *(uint32_t *)code_ptr = value;
89
        break;
90
    case R_386_PC32:
91
        value -= (long)code_ptr;
92
        if (value != (int32_t)value)
93
            tcg_abort();
94
        *(uint32_t *)code_ptr = value;
95
        break;
96
    default:
97
        tcg_abort();
98
    }
99
}
100

    
101
/* maximum number of register used for input function arguments */
102
static inline int tcg_target_get_call_iarg_regs_count(int flags)
103
{
104
    return 6;
105
}
106

    
107
/* parse target specific constraints */
108
int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
109
{
110
    const char *ct_str;
111

    
112
    ct_str = *pct_str;
113
    switch(ct_str[0]) {
114
    case 'a':
115
        ct->ct |= TCG_CT_REG;
116
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RAX);
117
        break;
118
    case 'b':
119
        ct->ct |= TCG_CT_REG;
120
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RBX);
121
        break;
122
    case 'c':
123
        ct->ct |= TCG_CT_REG;
124
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RCX);
125
        break;
126
    case 'd':
127
        ct->ct |= TCG_CT_REG;
128
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RDX);
129
        break;
130
    case 'S':
131
        ct->ct |= TCG_CT_REG;
132
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RSI);
133
        break;
134
    case 'D':
135
        ct->ct |= TCG_CT_REG;
136
        tcg_regset_set_reg(ct->u.regs, TCG_REG_RDI);
137
        break;
138
    case 'q':
139
        ct->ct |= TCG_CT_REG;
140
        tcg_regset_set32(ct->u.regs, 0, 0xf);
141
        break;
142
    case 'r':
143
        ct->ct |= TCG_CT_REG;
144
        tcg_regset_set32(ct->u.regs, 0, 0xffff);
145
        break;
146
    case 'L': /* qemu_ld/st constraint */
147
        ct->ct |= TCG_CT_REG;
148
        tcg_regset_set32(ct->u.regs, 0, 0xffff);
149
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_RSI);
150
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_RDI);
151
        break;
152
    case 'e':
153
        ct->ct |= TCG_CT_CONST_S32;
154
        break;
155
    case 'Z':
156
        ct->ct |= TCG_CT_CONST_U32;
157
        break;
158
    default:
159
        return -1;
160
    }
161
    ct_str++;
162
    *pct_str = ct_str;
163
    return 0;
164
}
165

    
166
/* test if a constant matches the constraint */
167
static inline int tcg_target_const_match(tcg_target_long val,
168
                                         const TCGArgConstraint *arg_ct)
169
{
170
    int ct;
171
    ct = arg_ct->ct;
172
    if (ct & TCG_CT_CONST)
173
        return 1;
174
    else if ((ct & TCG_CT_CONST_S32) && val == (int32_t)val)
175
        return 1;
176
    else if ((ct & TCG_CT_CONST_U32) && val == (uint32_t)val)
177
        return 1;
178
    else
179
        return 0;
180
}
181

    
182
#define ARITH_ADD 0
183
#define ARITH_OR  1
184
#define ARITH_ADC 2
185
#define ARITH_SBB 3
186
#define ARITH_AND 4
187
#define ARITH_SUB 5
188
#define ARITH_XOR 6
189
#define ARITH_CMP 7
190

    
191
#define SHIFT_SHL 4
192
#define SHIFT_SHR 5
193
#define SHIFT_SAR 7
194

    
195
#define JCC_JMP (-1)
196
#define JCC_JO  0x0
197
#define JCC_JNO 0x1
198
#define JCC_JB  0x2
199
#define JCC_JAE 0x3
200
#define JCC_JE  0x4
201
#define JCC_JNE 0x5
202
#define JCC_JBE 0x6
203
#define JCC_JA  0x7
204
#define JCC_JS  0x8
205
#define JCC_JNS 0x9
206
#define JCC_JP  0xa
207
#define JCC_JNP 0xb
208
#define JCC_JL  0xc
209
#define JCC_JGE 0xd
210
#define JCC_JLE 0xe
211
#define JCC_JG  0xf
212

    
213
#define P_EXT   0x100 /* 0x0f opcode prefix */
214
#define P_REXW  0x200 /* set rex.w = 1 */
215
#define P_REX   0x400 /* force rex usage */
216
                                  
217
static const uint8_t tcg_cond_to_jcc[10] = {
218
    [TCG_COND_EQ] = JCC_JE,
219
    [TCG_COND_NE] = JCC_JNE,
220
    [TCG_COND_LT] = JCC_JL,
221
    [TCG_COND_GE] = JCC_JGE,
222
    [TCG_COND_LE] = JCC_JLE,
223
    [TCG_COND_GT] = JCC_JG,
224
    [TCG_COND_LTU] = JCC_JB,
225
    [TCG_COND_GEU] = JCC_JAE,
226
    [TCG_COND_LEU] = JCC_JBE,
227
    [TCG_COND_GTU] = JCC_JA,
228
};
229

    
230
static inline void tcg_out_opc(TCGContext *s, int opc, int r, int rm, int x)
231
{
232
    int rex;
233
    rex = ((opc >> 6) & 0x8) | ((r >> 1) & 0x4) | 
234
        ((x >> 2) & 2) | ((rm >> 3) & 1);
235
    if (rex || (opc & P_REX)) {
236
        tcg_out8(s, rex | 0x40);
237
    }
238
    if (opc & P_EXT)
239
        tcg_out8(s, 0x0f);
240
    tcg_out8(s, opc);
241
}
242

    
243
static inline void tcg_out_modrm(TCGContext *s, int opc, int r, int rm)
244
{
245
    tcg_out_opc(s, opc, r, rm, 0);
246
    tcg_out8(s, 0xc0 | ((r & 7) << 3) | (rm & 7));
247
}
248

    
249
/* rm < 0 means no register index plus (-rm - 1 immediate bytes) */
250
static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm, 
251
                                        tcg_target_long offset)
252
{
253
    if (rm < 0) {
254
        tcg_target_long val;
255
        tcg_out_opc(s, opc, r, 0, 0);
256
        val = offset - ((tcg_target_long)s->code_ptr + 5 + (-rm - 1));
257
        if (val == (int32_t)val) {
258
            /* eip relative */
259
            tcg_out8(s, 0x05 | ((r & 7) << 3));
260
            tcg_out32(s, val);
261
        } else if (offset == (int32_t)offset) {
262
            tcg_out8(s, 0x04 | ((r & 7) << 3));
263
            tcg_out8(s, 0x25); /* sib */
264
            tcg_out32(s, offset);
265
        } else {
266
            tcg_abort();
267
        }
268
    } else if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
269
        tcg_out_opc(s, opc, r, rm, 0);
270
        if ((rm & 7) == TCG_REG_RSP) {
271
            tcg_out8(s, 0x04 | ((r & 7) << 3));
272
            tcg_out8(s, 0x24);
273
        } else {
274
            tcg_out8(s, 0x00 | ((r & 7) << 3) | (rm & 7));
275
        }
276
    } else if ((int8_t)offset == offset) {
277
        tcg_out_opc(s, opc, r, rm, 0);
278
        if ((rm & 7) == TCG_REG_RSP) {
279
            tcg_out8(s, 0x44 | ((r & 7) << 3));
280
            tcg_out8(s, 0x24);
281
        } else {
282
            tcg_out8(s, 0x40 | ((r & 7) << 3) | (rm & 7));
283
        }
284
        tcg_out8(s, offset);
285
    } else {
286
        tcg_out_opc(s, opc, r, rm, 0);
287
        if ((rm & 7) == TCG_REG_RSP) {
288
            tcg_out8(s, 0x84 | ((r & 7) << 3));
289
            tcg_out8(s, 0x24);
290
        } else {
291
            tcg_out8(s, 0x80 | ((r & 7) << 3) | (rm & 7));
292
        }
293
        tcg_out32(s, offset);
294
    }
295
}
296

    
297
#if defined(CONFIG_SOFTMMU)
298
/* XXX: incomplete. index must be different from ESP */
299
static void tcg_out_modrm_offset2(TCGContext *s, int opc, int r, int rm, 
300
                                  int index, int shift,
301
                                  tcg_target_long offset)
302
{
303
    int mod;
304
    if (rm == -1)
305
        tcg_abort();
306
    if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
307
        mod = 0;
308
    } else if (offset == (int8_t)offset) {
309
        mod = 0x40;
310
    } else if (offset == (int32_t)offset) {
311
        mod = 0x80;
312
    } else {
313
        tcg_abort();
314
    }
315
    if (index == -1) {
316
        tcg_out_opc(s, opc, r, rm, 0);
317
        if ((rm & 7) == TCG_REG_RSP) {
318
            tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
319
            tcg_out8(s, 0x04 | (rm & 7));
320
        } else {
321
            tcg_out8(s, mod | ((r & 7) << 3) | (rm & 7));
322
        }
323
    } else {
324
        tcg_out_opc(s, opc, r, rm, index);
325
        tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
326
        tcg_out8(s, (shift << 6) | ((index & 7) << 3) | (rm & 7));
327
    }
328
    if (mod == 0x40) {
329
        tcg_out8(s, offset);
330
    } else if (mod == 0x80) {
331
        tcg_out32(s, offset);
332
    }
333
}
334
#endif
335

    
336
static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
337
{
338
    tcg_out_modrm(s, 0x8b | P_REXW, ret, arg);
339
}
340

    
341
static inline void tcg_out_movi(TCGContext *s, TCGType type, 
342
                                int ret, tcg_target_long arg)
343
{
344
    if (arg == 0) {
345
        tcg_out_modrm(s, 0x01 | (ARITH_XOR << 3), ret, ret); /* xor r0,r0 */
346
    } else if (arg == (uint32_t)arg || type == TCG_TYPE_I32) {
347
        tcg_out_opc(s, 0xb8 + (ret & 7), 0, ret, 0);
348
        tcg_out32(s, arg);
349
    } else if (arg == (int32_t)arg) {
350
        tcg_out_modrm(s, 0xc7 | P_REXW, 0, ret);
351
        tcg_out32(s, arg);
352
    } else {
353
        tcg_out_opc(s, (0xb8 + (ret & 7)) | P_REXW, 0, ret, 0);
354
        tcg_out32(s, arg);
355
        tcg_out32(s, arg >> 32);
356
    }
357
}
358

    
359
static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
360
                              int arg1, tcg_target_long arg2)
361
{
362
    if (type == TCG_TYPE_I32)
363
        tcg_out_modrm_offset(s, 0x8b, ret, arg1, arg2); /* movl */
364
    else
365
        tcg_out_modrm_offset(s, 0x8b | P_REXW, ret, arg1, arg2); /* movq */
366
}
367

    
368
static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
369
                              int arg1, tcg_target_long arg2)
370
{
371
    if (type == TCG_TYPE_I32)
372
        tcg_out_modrm_offset(s, 0x89, arg, arg1, arg2); /* movl */
373
    else
374
        tcg_out_modrm_offset(s, 0x89 | P_REXW, arg, arg1, arg2); /* movq */
375
}
376

    
377
static inline void tgen_arithi32(TCGContext *s, int c, int r0, int32_t val)
378
{
379
    if (val == (int8_t)val) {
380
        tcg_out_modrm(s, 0x83, c, r0);
381
        tcg_out8(s, val);
382
    } else {
383
        tcg_out_modrm(s, 0x81, c, r0);
384
        tcg_out32(s, val);
385
    }
386
}
387

    
388
static inline void tgen_arithi64(TCGContext *s, int c, int r0, int64_t val)
389
{
390
    if (val == (int8_t)val) {
391
        tcg_out_modrm(s, 0x83 | P_REXW, c, r0);
392
        tcg_out8(s, val);
393
    } else if (val == (int32_t)val) {
394
        tcg_out_modrm(s, 0x81 | P_REXW, c, r0);
395
        tcg_out32(s, val);
396
    } else if (c == ARITH_AND && val == (uint32_t)val) {
397
        tcg_out_modrm(s, 0x81, c, r0);
398
        tcg_out32(s, val);
399
    } else {
400
        tcg_abort();
401
    }
402
}
403

    
404
void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
405
{
406
    if (val != 0)
407
        tgen_arithi64(s, ARITH_ADD, reg, val);
408
}
409

    
410
static void tcg_out_jxx(TCGContext *s, int opc, int label_index)
411
{
412
    int32_t val, val1;
413
    TCGLabel *l = &s->labels[label_index];
414
    
415
    if (l->has_value) {
416
        val = l->u.value - (tcg_target_long)s->code_ptr;
417
        val1 = val - 2;
418
        if ((int8_t)val1 == val1) {
419
            if (opc == -1)
420
                tcg_out8(s, 0xeb);
421
            else
422
                tcg_out8(s, 0x70 + opc);
423
            tcg_out8(s, val1);
424
        } else {
425
            if (opc == -1) {
426
                tcg_out8(s, 0xe9);
427
                tcg_out32(s, val - 5);
428
            } else {
429
                tcg_out8(s, 0x0f);
430
                tcg_out8(s, 0x80 + opc);
431
                tcg_out32(s, val - 6);
432
            }
433
        }
434
    } else {
435
        if (opc == -1) {
436
            tcg_out8(s, 0xe9);
437
        } else {
438
            tcg_out8(s, 0x0f);
439
            tcg_out8(s, 0x80 + opc);
440
        }
441
        tcg_out_reloc(s, s->code_ptr, R_386_PC32, label_index, -4);
442
        s->code_ptr += 4;
443
    }
444
}
445

    
446
static void tcg_out_brcond(TCGContext *s, int cond, 
447
                           TCGArg arg1, TCGArg arg2, int const_arg2,
448
                           int label_index, int rexw)
449
{
450
    int c;
451
    if (const_arg2) {
452
        if (arg2 == 0) {
453
            /* use test */
454
            switch(cond) {
455
            case TCG_COND_EQ:
456
                c = JCC_JE;
457
                break;
458
            case TCG_COND_NE:
459
                c = JCC_JNE;
460
                break;
461
            case TCG_COND_LT:
462
                c = JCC_JS;
463
                break;
464
            case TCG_COND_GE:
465
                c = JCC_JNS;
466
                break;
467
            default:
468
                goto do_cmpi;
469
            }
470
            /* test r, r */
471
            tcg_out_modrm(s, 0x85 | rexw, arg1, arg1);
472
            tcg_out_jxx(s, c, label_index);
473
        } else {
474
        do_cmpi:
475
            if (rexw)
476
                tgen_arithi64(s, ARITH_CMP, arg1, arg2);
477
            else
478
                tgen_arithi32(s, ARITH_CMP, arg1, arg2);
479
            tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index);
480
        }
481
    } else {
482
        tcg_out_modrm(s, 0x01 | (ARITH_CMP << 3) | rexw, arg2, arg1);
483
        tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index);
484
    }
485
}
486

    
487
#if defined(CONFIG_SOFTMMU)
488
extern void __ldb_mmu(void);
489
extern void __ldw_mmu(void);
490
extern void __ldl_mmu(void);
491
extern void __ldq_mmu(void);
492

    
493
extern void __stb_mmu(void);
494
extern void __stw_mmu(void);
495
extern void __stl_mmu(void);
496
extern void __stq_mmu(void);
497

    
498

    
499
static void *qemu_ld_helpers[4] = {
500
    __ldb_mmu,
501
    __ldw_mmu,
502
    __ldl_mmu,
503
    __ldq_mmu,
504
};
505

    
506
static void *qemu_st_helpers[4] = {
507
    __stb_mmu,
508
    __stw_mmu,
509
    __stl_mmu,
510
    __stq_mmu,
511
};
512
#endif
513

    
514
static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
515
                            int opc)
516
{
517
    int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
518
#if defined(CONFIG_SOFTMMU)
519
    uint8_t *label1_ptr, *label2_ptr;
520
#endif
521

    
522
    data_reg = *args++;
523
    addr_reg = *args++;
524
    mem_index = *args;
525
    s_bits = opc & 3;
526

    
527
    r0 = TCG_REG_RDI;
528
    r1 = TCG_REG_RSI;
529

    
530
#if TARGET_LONG_BITS == 32
531
    rexw = 0;
532
#else
533
    rexw = P_REXW;
534
#endif
535
#if defined(CONFIG_SOFTMMU)
536
    /* mov */
537
    tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
538

    
539
    /* mov */
540
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
541
 
542
    tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
543
    tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); 
544
    
545
    tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
546
    tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
547
    
548
    tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
549
    tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
550

    
551
    /* lea offset(r1, env), r1 */
552
    tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
553
                          offsetof(CPUState, tlb_table[mem_index][0].addr_read));
554

    
555
    /* cmp 0(r1), r0 */
556
    tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
557
    
558
    /* mov */
559
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
560
    
561
    /* je label1 */
562
    tcg_out8(s, 0x70 + JCC_JE);
563
    label1_ptr = s->code_ptr;
564
    s->code_ptr++;
565

    
566
    /* XXX: move that code at the end of the TB */
567
    tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RSI, mem_index);
568
    tcg_out8(s, 0xe8);
569
    tcg_out32(s, (tcg_target_long)qemu_ld_helpers[s_bits] - 
570
              (tcg_target_long)s->code_ptr - 4);
571

    
572
    switch(opc) {
573
    case 0 | 4:
574
        /* movsbq */
575
        tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
576
        break;
577
    case 1 | 4:
578
        /* movswq */
579
        tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
580
        break;
581
    case 2 | 4:
582
        /* movslq */
583
        tcg_out_modrm(s, 0x63 | P_REXW, data_reg, TCG_REG_RAX);
584
        break;
585
    case 0:
586
    case 1:
587
    case 2:
588
    default:
589
        /* movl */
590
        tcg_out_modrm(s, 0x8b, data_reg, TCG_REG_RAX);
591
        break;
592
    case 3:
593
        tcg_out_mov(s, data_reg, TCG_REG_RAX);
594
        break;
595
    }
596

    
597
    /* jmp label2 */
598
    tcg_out8(s, 0xeb);
599
    label2_ptr = s->code_ptr;
600
    s->code_ptr++;
601
    
602
    /* label1: */
603
    *label1_ptr = s->code_ptr - label1_ptr - 1;
604

    
605
    /* add x(r1), r0 */
606
    tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) - 
607
                         offsetof(CPUTLBEntry, addr_read));
608
#else
609
    r0 = addr_reg;
610
#endif    
611

    
612
#ifdef TARGET_WORDS_BIGENDIAN
613
    bswap = 1;
614
#else
615
    bswap = 0;
616
#endif
617
    switch(opc) {
618
    case 0:
619
        /* movzbl */
620
        tcg_out_modrm_offset(s, 0xb6 | P_EXT, data_reg, r0, 0);
621
        break;
622
    case 0 | 4:
623
        /* movsbX */
624
        tcg_out_modrm_offset(s, 0xbe | P_EXT | rexw, data_reg, r0, 0);
625
        break;
626
    case 1:
627
        /* movzwl */
628
        tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, 0);
629
        if (bswap) {
630
            /* rolw $8, data_reg */
631
            tcg_out8(s, 0x66); 
632
            tcg_out_modrm(s, 0xc1, 0, data_reg);
633
            tcg_out8(s, 8);
634
        }
635
        break;
636
    case 1 | 4:
637
        if (bswap) {
638
            /* movzwl */
639
            tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, 0);
640
            /* rolw $8, data_reg */
641
            tcg_out8(s, 0x66); 
642
            tcg_out_modrm(s, 0xc1, 0, data_reg);
643
            tcg_out8(s, 8);
644

    
645
            /* movswX data_reg, data_reg */
646
            tcg_out_modrm(s, 0xbf | P_EXT | rexw, data_reg, data_reg);
647
        } else {
648
            /* movswX */
649
            tcg_out_modrm_offset(s, 0xbf | P_EXT | rexw, data_reg, r0, 0);
650
        }
651
        break;
652
    case 2:
653
        /* movl (r0), data_reg */
654
        tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
655
        if (bswap) {
656
            /* bswap */
657
            tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
658
        }
659
        break;
660
    case 2 | 4:
661
        if (bswap) {
662
            /* movl (r0), data_reg */
663
            tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
664
            /* bswap */
665
            tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
666
            /* movslq */
667
            tcg_out_modrm(s, 0x63 | P_REXW, data_reg, data_reg);
668
        } else {
669
            /* movslq */
670
            tcg_out_modrm_offset(s, 0x63 | P_REXW, data_reg, r0, 0);
671
        }
672
        break;
673
    case 3:
674
        /* movq (r0), data_reg */
675
        tcg_out_modrm_offset(s, 0x8b | P_REXW, data_reg, r0, 0);
676
        if (bswap) {
677
            /* bswap */
678
            tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT | P_REXW, 0, data_reg, 0);
679
        }
680
        break;
681
    default:
682
        tcg_abort();
683
    }
684

    
685
#if defined(CONFIG_SOFTMMU)
686
    /* label2: */
687
    *label2_ptr = s->code_ptr - label2_ptr - 1;
688
#endif
689
}
690

    
691
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
692
                            int opc)
693
{
694
    int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
695
#if defined(CONFIG_SOFTMMU)
696
    uint8_t *label1_ptr, *label2_ptr;
697
#endif
698

    
699
    data_reg = *args++;
700
    addr_reg = *args++;
701
    mem_index = *args;
702

    
703
    s_bits = opc;
704

    
705
    r0 = TCG_REG_RDI;
706
    r1 = TCG_REG_RSI;
707

    
708
#if TARGET_LONG_BITS == 32
709
    rexw = 0;
710
#else
711
    rexw = P_REXW;
712
#endif
713
#if defined(CONFIG_SOFTMMU)
714
    /* mov */
715
    tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
716

    
717
    /* mov */
718
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
719
 
720
    tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
721
    tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); 
722
    
723
    tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
724
    tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
725
    
726
    tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
727
    tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
728

    
729
    /* lea offset(r1, env), r1 */
730
    tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
731
                          offsetof(CPUState, tlb_table[mem_index][0].addr_write));
732

    
733
    /* cmp 0(r1), r0 */
734
    tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
735
    
736
    /* mov */
737
    tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
738
    
739
    /* je label1 */
740
    tcg_out8(s, 0x70 + JCC_JE);
741
    label1_ptr = s->code_ptr;
742
    s->code_ptr++;
743

    
744
    /* XXX: move that code at the end of the TB */
745
    switch(opc) {
746
    case 0:
747
        /* movzbl */
748
        tcg_out_modrm(s, 0xb6 | P_EXT, TCG_REG_RSI, data_reg);
749
        break;
750
    case 1:
751
        /* movzwl */
752
        tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_RSI, data_reg);
753
        break;
754
    case 2:
755
        /* movl */
756
        tcg_out_modrm(s, 0x8b, TCG_REG_RSI, data_reg);
757
        break;
758
    default:
759
    case 3:
760
        tcg_out_mov(s, TCG_REG_RSI, data_reg);
761
        break;
762
    }
763
    tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RDX, mem_index);
764
    tcg_out8(s, 0xe8);
765
    tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 
766
              (tcg_target_long)s->code_ptr - 4);
767

    
768
    /* jmp label2 */
769
    tcg_out8(s, 0xeb);
770
    label2_ptr = s->code_ptr;
771
    s->code_ptr++;
772
    
773
    /* label1: */
774
    *label1_ptr = s->code_ptr - label1_ptr - 1;
775

    
776
    /* add x(r1), r0 */
777
    tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) - 
778
                         offsetof(CPUTLBEntry, addr_write));
779
#else
780
    r0 = addr_reg;
781
#endif
782

    
783
#ifdef TARGET_WORDS_BIGENDIAN
784
    bswap = 1;
785
#else
786
    bswap = 0;
787
#endif
788
    switch(opc) {
789
    case 0:
790
        /* movb */
791
        tcg_out_modrm_offset(s, 0x88 | P_REX, data_reg, r0, 0);
792
        break;
793
    case 1:
794
        if (bswap) {
795
            tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
796
            tcg_out8(s, 0x66); /* rolw $8, %ecx */
797
            tcg_out_modrm(s, 0xc1, 0, r1);
798
            tcg_out8(s, 8);
799
            data_reg = r1;
800
        }
801
        /* movw */
802
        tcg_out8(s, 0x66);
803
        tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
804
        break;
805
    case 2:
806
        if (bswap) {
807
            tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
808
            /* bswap data_reg */
809
            tcg_out_opc(s, (0xc8 + r1) | P_EXT, 0, r1, 0);
810
            data_reg = r1;
811
        }
812
        /* movl */
813
        tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
814
        break;
815
    case 3:
816
        if (bswap) {
817
            tcg_out_mov(s, r1, data_reg);
818
            /* bswap data_reg */
819
            tcg_out_opc(s, (0xc8 + r1) | P_EXT | P_REXW, 0, r1, 0);
820
            data_reg = r1;
821
        }
822
        /* movq */
823
        tcg_out_modrm_offset(s, 0x89 | P_REXW, data_reg, r0, 0);
824
        break;
825
    default:
826
        tcg_abort();
827
    }
828

    
829
#if defined(CONFIG_SOFTMMU)
830
    /* label2: */
831
    *label2_ptr = s->code_ptr - label2_ptr - 1;
832
#endif
833
}
834

    
835
static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
836
                              const int *const_args)
837
{
838
    int c;
839
    
840
    switch(opc) {
841
    case INDEX_op_exit_tb:
842
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RAX, args[0]);
843
        tcg_out8(s, 0xc3); /* ret */
844
        break;
845
    case INDEX_op_goto_tb:
846
        if (s->tb_jmp_offset) {
847
            /* direct jump method */
848
            tcg_out8(s, 0xe9); /* jmp im */
849
            s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
850
            tcg_out32(s, 0);
851
        } else {
852
            /* indirect jump method */
853
            /* jmp Ev */
854
            tcg_out_modrm_offset(s, 0xff, 4, -1, 
855
                                 (tcg_target_long)(s->tb_next + 
856
                                                   args[0]));
857
        }
858
        s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
859
        break;
860
    case INDEX_op_call:
861
        if (const_args[0]) {
862
            tcg_out8(s, 0xe8);
863
            tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
864
        } else {
865
            tcg_out_modrm(s, 0xff, 2, args[0]);
866
        }
867
        break;
868
    case INDEX_op_jmp:
869
        if (const_args[0]) {
870
            tcg_out8(s, 0xe9);
871
            tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
872
        } else {
873
            tcg_out_modrm(s, 0xff, 4, args[0]);
874
        }
875
        break;
876
    case INDEX_op_br:
877
        tcg_out_jxx(s, JCC_JMP, args[0]);
878
        break;
879
    case INDEX_op_movi_i32:
880
        tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
881
        break;
882
    case INDEX_op_movi_i64:
883
        tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
884
        break;
885
    case INDEX_op_ld8u_i32:
886
    case INDEX_op_ld8u_i64:
887
        /* movzbl */
888
        tcg_out_modrm_offset(s, 0xb6 | P_EXT, args[0], args[1], args[2]);
889
        break;
890
    case INDEX_op_ld8s_i32:
891
        /* movsbl */
892
        tcg_out_modrm_offset(s, 0xbe | P_EXT, args[0], args[1], args[2]);
893
        break;
894
    case INDEX_op_ld8s_i64:
895
        /* movsbq */
896
        tcg_out_modrm_offset(s, 0xbe | P_EXT | P_REXW, args[0], args[1], args[2]);
897
        break;
898
    case INDEX_op_ld16u_i32:
899
    case INDEX_op_ld16u_i64:
900
        /* movzwl */
901
        tcg_out_modrm_offset(s, 0xb7 | P_EXT, args[0], args[1], args[2]);
902
        break;
903
    case INDEX_op_ld16s_i32:
904
        /* movswl */
905
        tcg_out_modrm_offset(s, 0xbf | P_EXT, args[0], args[1], args[2]);
906
        break;
907
    case INDEX_op_ld16s_i64:
908
        /* movswq */
909
        tcg_out_modrm_offset(s, 0xbf | P_EXT | P_REXW, args[0], args[1], args[2]);
910
        break;
911
    case INDEX_op_ld_i32:
912
    case INDEX_op_ld32u_i64:
913
        /* movl */
914
        tcg_out_modrm_offset(s, 0x8b, args[0], args[1], args[2]);
915
        break;
916
    case INDEX_op_ld32s_i64:
917
        /* movslq */
918
        tcg_out_modrm_offset(s, 0x63 | P_REXW, args[0], args[1], args[2]);
919
        break;
920
    case INDEX_op_ld_i64:
921
        /* movq */
922
        tcg_out_modrm_offset(s, 0x8b | P_REXW, args[0], args[1], args[2]);
923
        break;
924
        
925
    case INDEX_op_st8_i32:
926
    case INDEX_op_st8_i64:
927
        /* movb */
928
        tcg_out_modrm_offset(s, 0x88 | P_REX, args[0], args[1], args[2]);
929
        break;
930
    case INDEX_op_st16_i32:
931
    case INDEX_op_st16_i64:
932
        /* movw */
933
        tcg_out8(s, 0x66);
934
        tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
935
        break;
936
    case INDEX_op_st_i32:
937
    case INDEX_op_st32_i64:
938
        /* movl */
939
        tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
940
        break;
941
    case INDEX_op_st_i64:
942
        /* movq */
943
        tcg_out_modrm_offset(s, 0x89 | P_REXW, args[0], args[1], args[2]);
944
        break;
945

    
946
    case INDEX_op_sub_i32:
947
        c = ARITH_SUB;
948
        goto gen_arith32;
949
    case INDEX_op_and_i32:
950
        c = ARITH_AND;
951
        goto gen_arith32;
952
    case INDEX_op_or_i32:
953
        c = ARITH_OR;
954
        goto gen_arith32;
955
    case INDEX_op_xor_i32:
956
        c = ARITH_XOR;
957
        goto gen_arith32;
958
    case INDEX_op_add_i32:
959
        c = ARITH_ADD;
960
    gen_arith32:
961
        if (const_args[2]) {
962
            tgen_arithi32(s, c, args[0], args[2]);
963
        } else {
964
            tcg_out_modrm(s, 0x01 | (c << 3), args[2], args[0]);
965
        }
966
        break;
967

    
968
    case INDEX_op_sub_i64:
969
        c = ARITH_SUB;
970
        goto gen_arith64;
971
    case INDEX_op_and_i64:
972
        c = ARITH_AND;
973
        goto gen_arith64;
974
    case INDEX_op_or_i64:
975
        c = ARITH_OR;
976
        goto gen_arith64;
977
    case INDEX_op_xor_i64:
978
        c = ARITH_XOR;
979
        goto gen_arith64;
980
    case INDEX_op_add_i64:
981
        c = ARITH_ADD;
982
    gen_arith64:
983
        if (const_args[2]) {
984
            tgen_arithi64(s, c, args[0], args[2]);
985
        } else {
986
            tcg_out_modrm(s, 0x01 | (c << 3) | P_REXW, args[2], args[0]);
987
        }
988
        break;
989

    
990
    case INDEX_op_mul_i32:
991
        if (const_args[2]) {
992
            int32_t val;
993
            val = args[2];
994
            if (val == (int8_t)val) {
995
                tcg_out_modrm(s, 0x6b, args[0], args[0]);
996
                tcg_out8(s, val);
997
            } else {
998
                tcg_out_modrm(s, 0x69, args[0], args[0]);
999
                tcg_out32(s, val);
1000
            }
1001
        } else {
1002
            tcg_out_modrm(s, 0xaf | P_EXT, args[0], args[2]);
1003
        }
1004
        break;
1005
    case INDEX_op_mul_i64:
1006
        if (const_args[2]) {
1007
            int32_t val;
1008
            val = args[2];
1009
            if (val == (int8_t)val) {
1010
                tcg_out_modrm(s, 0x6b | P_REXW, args[0], args[0]);
1011
                tcg_out8(s, val);
1012
            } else {
1013
                tcg_out_modrm(s, 0x69 | P_REXW, args[0], args[0]);
1014
                tcg_out32(s, val);
1015
            }
1016
        } else {
1017
            tcg_out_modrm(s, 0xaf | P_EXT | P_REXW, args[0], args[2]);
1018
        }
1019
        break;
1020
    case INDEX_op_div2_i32:
1021
        tcg_out_modrm(s, 0xf7, 7, args[4]);
1022
        break;
1023
    case INDEX_op_divu2_i32:
1024
        tcg_out_modrm(s, 0xf7, 6, args[4]);
1025
        break;
1026
    case INDEX_op_div2_i64:
1027
        tcg_out_modrm(s, 0xf7 | P_REXW, 7, args[4]);
1028
        break;
1029
    case INDEX_op_divu2_i64:
1030
        tcg_out_modrm(s, 0xf7 | P_REXW, 6, args[4]);
1031
        break;
1032

    
1033
    case INDEX_op_shl_i32:
1034
        c = SHIFT_SHL;
1035
    gen_shift32:
1036
        if (const_args[2]) {
1037
            if (args[2] == 1) {
1038
                tcg_out_modrm(s, 0xd1, c, args[0]);
1039
            } else {
1040
                tcg_out_modrm(s, 0xc1, c, args[0]);
1041
                tcg_out8(s, args[2]);
1042
            }
1043
        } else {
1044
            tcg_out_modrm(s, 0xd3, c, args[0]);
1045
        }
1046
        break;
1047
    case INDEX_op_shr_i32:
1048
        c = SHIFT_SHR;
1049
        goto gen_shift32;
1050
    case INDEX_op_sar_i32:
1051
        c = SHIFT_SAR;
1052
        goto gen_shift32;
1053
        
1054
    case INDEX_op_shl_i64:
1055
        c = SHIFT_SHL;
1056
    gen_shift64:
1057
        if (const_args[2]) {
1058
            if (args[2] == 1) {
1059
                tcg_out_modrm(s, 0xd1 | P_REXW, c, args[0]);
1060
            } else {
1061
                tcg_out_modrm(s, 0xc1 | P_REXW, c, args[0]);
1062
                tcg_out8(s, args[2]);
1063
            }
1064
        } else {
1065
            tcg_out_modrm(s, 0xd3 | P_REXW, c, args[0]);
1066
        }
1067
        break;
1068
    case INDEX_op_shr_i64:
1069
        c = SHIFT_SHR;
1070
        goto gen_shift64;
1071
    case INDEX_op_sar_i64:
1072
        c = SHIFT_SAR;
1073
        goto gen_shift64;
1074
        
1075
    case INDEX_op_brcond_i32:
1076
        tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], 
1077
                       args[3], 0);
1078
        break;
1079
    case INDEX_op_brcond_i64:
1080
        tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], 
1081
                       args[3], P_REXW);
1082
        break;
1083

    
1084
    case INDEX_op_bswap_i32:
1085
        tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT, 0, args[0], 0);
1086
        break;
1087
    case INDEX_op_bswap_i64:
1088
        tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT | P_REXW, 0, args[0], 0);
1089
        break;
1090

    
1091
    case INDEX_op_qemu_ld8u:
1092
        tcg_out_qemu_ld(s, args, 0);
1093
        break;
1094
    case INDEX_op_qemu_ld8s:
1095
        tcg_out_qemu_ld(s, args, 0 | 4);
1096
        break;
1097
    case INDEX_op_qemu_ld16u:
1098
        tcg_out_qemu_ld(s, args, 1);
1099
        break;
1100
    case INDEX_op_qemu_ld16s:
1101
        tcg_out_qemu_ld(s, args, 1 | 4);
1102
        break;
1103
    case INDEX_op_qemu_ld32u:
1104
        tcg_out_qemu_ld(s, args, 2);
1105
        break;
1106
    case INDEX_op_qemu_ld32s:
1107
        tcg_out_qemu_ld(s, args, 2 | 4);
1108
        break;
1109
    case INDEX_op_qemu_ld64:
1110
        tcg_out_qemu_ld(s, args, 3);
1111
        break;
1112
        
1113
    case INDEX_op_qemu_st8:
1114
        tcg_out_qemu_st(s, args, 0);
1115
        break;
1116
    case INDEX_op_qemu_st16:
1117
        tcg_out_qemu_st(s, args, 1);
1118
        break;
1119
    case INDEX_op_qemu_st32:
1120
        tcg_out_qemu_st(s, args, 2);
1121
        break;
1122
    case INDEX_op_qemu_st64:
1123
        tcg_out_qemu_st(s, args, 3);
1124
        break;
1125

    
1126
    default:
1127
        tcg_abort();
1128
    }
1129
}
1130

    
1131
static const TCGTargetOpDef x86_64_op_defs[] = {
1132
    { INDEX_op_exit_tb, { } },
1133
    { INDEX_op_goto_tb, { } },
1134
    { INDEX_op_call, { "ri" } }, /* XXX: might need a specific constant constraint */
1135
    { INDEX_op_jmp, { "ri" } }, /* XXX: might need a specific constant constraint */
1136
    { INDEX_op_br, { } },
1137

    
1138
    { INDEX_op_mov_i32, { "r", "r" } },
1139
    { INDEX_op_movi_i32, { "r" } },
1140
    { INDEX_op_ld8u_i32, { "r", "r" } },
1141
    { INDEX_op_ld8s_i32, { "r", "r" } },
1142
    { INDEX_op_ld16u_i32, { "r", "r" } },
1143
    { INDEX_op_ld16s_i32, { "r", "r" } },
1144
    { INDEX_op_ld_i32, { "r", "r" } },
1145
    { INDEX_op_st8_i32, { "r", "r" } },
1146
    { INDEX_op_st16_i32, { "r", "r" } },
1147
    { INDEX_op_st_i32, { "r", "r" } },
1148

    
1149
    { INDEX_op_add_i32, { "r", "0", "ri" } },
1150
    { INDEX_op_mul_i32, { "r", "0", "ri" } },
1151
    { INDEX_op_div2_i32, { "a", "d", "0", "1", "r" } },
1152
    { INDEX_op_divu2_i32, { "a", "d", "0", "1", "r" } },
1153
    { INDEX_op_sub_i32, { "r", "0", "ri" } },
1154
    { INDEX_op_and_i32, { "r", "0", "ri" } },
1155
    { INDEX_op_or_i32, { "r", "0", "ri" } },
1156
    { INDEX_op_xor_i32, { "r", "0", "ri" } },
1157

    
1158
    { INDEX_op_shl_i32, { "r", "0", "ci" } },
1159
    { INDEX_op_shr_i32, { "r", "0", "ci" } },
1160
    { INDEX_op_sar_i32, { "r", "0", "ci" } },
1161

    
1162
    { INDEX_op_brcond_i32, { "r", "ri" } },
1163

    
1164
    { INDEX_op_mov_i64, { "r", "r" } },
1165
    { INDEX_op_movi_i64, { "r" } },
1166
    { INDEX_op_ld8u_i64, { "r", "r" } },
1167
    { INDEX_op_ld8s_i64, { "r", "r" } },
1168
    { INDEX_op_ld16u_i64, { "r", "r" } },
1169
    { INDEX_op_ld16s_i64, { "r", "r" } },
1170
    { INDEX_op_ld32u_i64, { "r", "r" } },
1171
    { INDEX_op_ld32s_i64, { "r", "r" } },
1172
    { INDEX_op_ld_i64, { "r", "r" } },
1173
    { INDEX_op_st8_i64, { "r", "r" } },
1174
    { INDEX_op_st16_i64, { "r", "r" } },
1175
    { INDEX_op_st32_i64, { "r", "r" } },
1176
    { INDEX_op_st_i64, { "r", "r" } },
1177

    
1178
    { INDEX_op_add_i64, { "r", "0", "re" } },
1179
    { INDEX_op_mul_i64, { "r", "0", "re" } },
1180
    { INDEX_op_div2_i64, { "a", "d", "0", "1", "r" } },
1181
    { INDEX_op_divu2_i64, { "a", "d", "0", "1", "r" } },
1182
    { INDEX_op_sub_i64, { "r", "0", "re" } },
1183
    { INDEX_op_and_i64, { "r", "0", "reZ" } },
1184
    { INDEX_op_or_i64, { "r", "0", "re" } },
1185
    { INDEX_op_xor_i64, { "r", "0", "re" } },
1186

    
1187
    { INDEX_op_shl_i64, { "r", "0", "ci" } },
1188
    { INDEX_op_shr_i64, { "r", "0", "ci" } },
1189
    { INDEX_op_sar_i64, { "r", "0", "ci" } },
1190

    
1191
    { INDEX_op_brcond_i64, { "r", "re" } },
1192

    
1193
    { INDEX_op_bswap_i32, { "r", "0" } },
1194
    { INDEX_op_bswap_i64, { "r", "0" } },
1195

    
1196
    { INDEX_op_qemu_ld8u, { "r", "L" } },
1197
    { INDEX_op_qemu_ld8s, { "r", "L" } },
1198
    { INDEX_op_qemu_ld16u, { "r", "L" } },
1199
    { INDEX_op_qemu_ld16s, { "r", "L" } },
1200
    { INDEX_op_qemu_ld32u, { "r", "L" } },
1201
    { INDEX_op_qemu_ld32s, { "r", "L" } },
1202
    { INDEX_op_qemu_ld64, { "r", "L" } },
1203

    
1204
    { INDEX_op_qemu_st8, { "L", "L" } },
1205
    { INDEX_op_qemu_st16, { "L", "L" } },
1206
    { INDEX_op_qemu_st32, { "L", "L" } },
1207
    { INDEX_op_qemu_st64, { "L", "L", "L" } },
1208

    
1209
    { -1 },
1210
};
1211

    
1212
void tcg_target_init(TCGContext *s)
1213
{
1214
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffff);
1215
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffff);
1216
    tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1217
                     (1 << TCG_REG_RDI) | 
1218
                     (1 << TCG_REG_RSI) | 
1219
                     (1 << TCG_REG_RDX) |
1220
                     (1 << TCG_REG_RCX) |
1221
                     (1 << TCG_REG_R8) |
1222
                     (1 << TCG_REG_R9) |
1223
                     (1 << TCG_REG_RAX) |
1224
                     (1 << TCG_REG_R10) |
1225
                     (1 << TCG_REG_R11));
1226
    
1227
    tcg_regset_clear(s->reserved_regs);
1228
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_RSP);
1229
    /* XXX: will be suppresed when proper global TB entry code will be
1230
       generated */
1231
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_RBX);
1232
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_RBP);
1233
    
1234
    tcg_add_target_add_op_defs(x86_64_op_defs);
1235
}