Statistics
| Branch: | Revision:

root / tcg / tci / tcg-target.c @ 4699ca6d

History | View | Annotate | Download (26.6 kB)

1
/*
2
 * Tiny Code Generator for QEMU
3
 *
4
 * Copyright (c) 2009, 2011 Stefan Weil
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22
 * THE SOFTWARE.
23
 */
24

    
25
/* TODO list:
26
 * - See TODO comments in code.
27
 */
28

    
29
/* Marker for missing code. */
30
#define TODO() \
31
    do { \
32
        fprintf(stderr, "TODO %s:%u: %s()\n", \
33
                __FILE__, __LINE__, __func__); \
34
        tcg_abort(); \
35
    } while (0)
36

    
37
/* Single bit n. */
38
#define BIT(n) (1 << (n))
39

    
40
/* Bitfield n...m (in 32 bit value). */
41
#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
42

    
43
/* Used for function call generation. */
44
#define TCG_REG_CALL_STACK              TCG_REG_R4
45
#define TCG_TARGET_STACK_ALIGN          16
46
#define TCG_TARGET_CALL_STACK_OFFSET    0
47

    
48
/* Macros used in tcg_target_op_defs. */
49
#define R       "r"
50
#define RI      "ri"
51
#if TCG_TARGET_REG_BITS == 32
52
# define R64    "r", "r"
53
#else
54
# define R64    "r"
55
#endif
56
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
57
# define L      "L", "L"
58
# define S      "S", "S"
59
#else
60
# define L      "L"
61
# define S      "S"
62
#endif
63

    
64
/* TODO: documentation. */
65
static const TCGTargetOpDef tcg_target_op_defs[] = {
66
    { INDEX_op_exit_tb, { NULL } },
67
    { INDEX_op_goto_tb, { NULL } },
68
    { INDEX_op_call, { RI } },
69
    { INDEX_op_br, { NULL } },
70

    
71
    { INDEX_op_mov_i32, { R, R } },
72
    { INDEX_op_movi_i32, { R } },
73

    
74
    { INDEX_op_ld8u_i32, { R, R } },
75
    { INDEX_op_ld8s_i32, { R, R } },
76
    { INDEX_op_ld16u_i32, { R, R } },
77
    { INDEX_op_ld16s_i32, { R, R } },
78
    { INDEX_op_ld_i32, { R, R } },
79
    { INDEX_op_st8_i32, { R, R } },
80
    { INDEX_op_st16_i32, { R, R } },
81
    { INDEX_op_st_i32, { R, R } },
82

    
83
    { INDEX_op_add_i32, { R, RI, RI } },
84
    { INDEX_op_sub_i32, { R, RI, RI } },
85
    { INDEX_op_mul_i32, { R, RI, RI } },
86
#if TCG_TARGET_HAS_div_i32
87
    { INDEX_op_div_i32, { R, R, R } },
88
    { INDEX_op_divu_i32, { R, R, R } },
89
    { INDEX_op_rem_i32, { R, R, R } },
90
    { INDEX_op_remu_i32, { R, R, R } },
91
#elif TCG_TARGET_HAS_div2_i32
92
    { INDEX_op_div2_i32, { R, R, "0", "1", R } },
93
    { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
94
#endif
95
    /* TODO: Does R, RI, RI result in faster code than R, R, RI?
96
       If both operands are constants, we can optimize. */
97
    { INDEX_op_and_i32, { R, RI, RI } },
98
#if TCG_TARGET_HAS_andc_i32
99
    { INDEX_op_andc_i32, { R, RI, RI } },
100
#endif
101
#if TCG_TARGET_HAS_eqv_i32
102
    { INDEX_op_eqv_i32, { R, RI, RI } },
103
#endif
104
#if TCG_TARGET_HAS_nand_i32
105
    { INDEX_op_nand_i32, { R, RI, RI } },
106
#endif
107
#if TCG_TARGET_HAS_nor_i32
108
    { INDEX_op_nor_i32, { R, RI, RI } },
109
#endif
110
    { INDEX_op_or_i32, { R, RI, RI } },
111
#if TCG_TARGET_HAS_orc_i32
112
    { INDEX_op_orc_i32, { R, RI, RI } },
113
#endif
114
    { INDEX_op_xor_i32, { R, RI, RI } },
115
    { INDEX_op_shl_i32, { R, RI, RI } },
116
    { INDEX_op_shr_i32, { R, RI, RI } },
117
    { INDEX_op_sar_i32, { R, RI, RI } },
118
#if TCG_TARGET_HAS_rot_i32
119
    { INDEX_op_rotl_i32, { R, RI, RI } },
120
    { INDEX_op_rotr_i32, { R, RI, RI } },
121
#endif
122
#if TCG_TARGET_HAS_deposit_i32
123
    { INDEX_op_deposit_i32, { R, "0", R } },
124
#endif
125

    
126
    { INDEX_op_brcond_i32, { R, RI } },
127

    
128
    { INDEX_op_setcond_i32, { R, R, RI } },
129
#if TCG_TARGET_REG_BITS == 64
130
    { INDEX_op_setcond_i64, { R, R, RI } },
131
#endif /* TCG_TARGET_REG_BITS == 64 */
132

    
133
#if TCG_TARGET_REG_BITS == 32
134
    /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
135
    { INDEX_op_add2_i32, { R, R, R, R, R, R } },
136
    { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
137
    { INDEX_op_brcond2_i32, { R, R, RI, RI } },
138
    { INDEX_op_mulu2_i32, { R, R, R, R } },
139
    { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
140
#endif
141

    
142
#if TCG_TARGET_HAS_not_i32
143
    { INDEX_op_not_i32, { R, R } },
144
#endif
145
#if TCG_TARGET_HAS_neg_i32
146
    { INDEX_op_neg_i32, { R, R } },
147
#endif
148

    
149
#if TCG_TARGET_REG_BITS == 64
150
    { INDEX_op_mov_i64, { R, R } },
151
    { INDEX_op_movi_i64, { R } },
152

    
153
    { INDEX_op_ld8u_i64, { R, R } },
154
    { INDEX_op_ld8s_i64, { R, R } },
155
    { INDEX_op_ld16u_i64, { R, R } },
156
    { INDEX_op_ld16s_i64, { R, R } },
157
    { INDEX_op_ld32u_i64, { R, R } },
158
    { INDEX_op_ld32s_i64, { R, R } },
159
    { INDEX_op_ld_i64, { R, R } },
160

    
161
    { INDEX_op_st8_i64, { R, R } },
162
    { INDEX_op_st16_i64, { R, R } },
163
    { INDEX_op_st32_i64, { R, R } },
164
    { INDEX_op_st_i64, { R, R } },
165

    
166
    { INDEX_op_add_i64, { R, RI, RI } },
167
    { INDEX_op_sub_i64, { R, RI, RI } },
168
    { INDEX_op_mul_i64, { R, RI, RI } },
169
#if TCG_TARGET_HAS_div_i64
170
    { INDEX_op_div_i64, { R, R, R } },
171
    { INDEX_op_divu_i64, { R, R, R } },
172
    { INDEX_op_rem_i64, { R, R, R } },
173
    { INDEX_op_remu_i64, { R, R, R } },
174
#elif TCG_TARGET_HAS_div2_i64
175
    { INDEX_op_div2_i64, { R, R, "0", "1", R } },
176
    { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
177
#endif
178
    { INDEX_op_and_i64, { R, RI, RI } },
179
#if TCG_TARGET_HAS_andc_i64
180
    { INDEX_op_andc_i64, { R, RI, RI } },
181
#endif
182
#if TCG_TARGET_HAS_eqv_i64
183
    { INDEX_op_eqv_i64, { R, RI, RI } },
184
#endif
185
#if TCG_TARGET_HAS_nand_i64
186
    { INDEX_op_nand_i64, { R, RI, RI } },
187
#endif
188
#if TCG_TARGET_HAS_nor_i64
189
    { INDEX_op_nor_i64, { R, RI, RI } },
190
#endif
191
    { INDEX_op_or_i64, { R, RI, RI } },
192
#if TCG_TARGET_HAS_orc_i64
193
    { INDEX_op_orc_i64, { R, RI, RI } },
194
#endif
195
    { INDEX_op_xor_i64, { R, RI, RI } },
196
    { INDEX_op_shl_i64, { R, RI, RI } },
197
    { INDEX_op_shr_i64, { R, RI, RI } },
198
    { INDEX_op_sar_i64, { R, RI, RI } },
199
#if TCG_TARGET_HAS_rot_i64
200
    { INDEX_op_rotl_i64, { R, RI, RI } },
201
    { INDEX_op_rotr_i64, { R, RI, RI } },
202
#endif
203
#if TCG_TARGET_HAS_deposit_i64
204
    { INDEX_op_deposit_i64, { R, "0", R } },
205
#endif
206
    { INDEX_op_brcond_i64, { R, RI } },
207

    
208
#if TCG_TARGET_HAS_ext8s_i64
209
    { INDEX_op_ext8s_i64, { R, R } },
210
#endif
211
#if TCG_TARGET_HAS_ext16s_i64
212
    { INDEX_op_ext16s_i64, { R, R } },
213
#endif
214
#if TCG_TARGET_HAS_ext32s_i64
215
    { INDEX_op_ext32s_i64, { R, R } },
216
#endif
217
#if TCG_TARGET_HAS_ext8u_i64
218
    { INDEX_op_ext8u_i64, { R, R } },
219
#endif
220
#if TCG_TARGET_HAS_ext16u_i64
221
    { INDEX_op_ext16u_i64, { R, R } },
222
#endif
223
#if TCG_TARGET_HAS_ext32u_i64
224
    { INDEX_op_ext32u_i64, { R, R } },
225
#endif
226
#if TCG_TARGET_HAS_bswap16_i64
227
    { INDEX_op_bswap16_i64, { R, R } },
228
#endif
229
#if TCG_TARGET_HAS_bswap32_i64
230
    { INDEX_op_bswap32_i64, { R, R } },
231
#endif
232
#if TCG_TARGET_HAS_bswap64_i64
233
    { INDEX_op_bswap64_i64, { R, R } },
234
#endif
235
#if TCG_TARGET_HAS_not_i64
236
    { INDEX_op_not_i64, { R, R } },
237
#endif
238
#if TCG_TARGET_HAS_neg_i64
239
    { INDEX_op_neg_i64, { R, R } },
240
#endif
241
#endif /* TCG_TARGET_REG_BITS == 64 */
242

    
243
    { INDEX_op_qemu_ld8u, { R, L } },
244
    { INDEX_op_qemu_ld8s, { R, L } },
245
    { INDEX_op_qemu_ld16u, { R, L } },
246
    { INDEX_op_qemu_ld16s, { R, L } },
247
    { INDEX_op_qemu_ld32, { R, L } },
248
#if TCG_TARGET_REG_BITS == 64
249
    { INDEX_op_qemu_ld32u, { R, L } },
250
    { INDEX_op_qemu_ld32s, { R, L } },
251
#endif
252
    { INDEX_op_qemu_ld64, { R64, L } },
253

    
254
    { INDEX_op_qemu_st8, { R, S } },
255
    { INDEX_op_qemu_st16, { R, S } },
256
    { INDEX_op_qemu_st32, { R, S } },
257
    { INDEX_op_qemu_st64, { R64, S } },
258

    
259
#if TCG_TARGET_HAS_ext8s_i32
260
    { INDEX_op_ext8s_i32, { R, R } },
261
#endif
262
#if TCG_TARGET_HAS_ext16s_i32
263
    { INDEX_op_ext16s_i32, { R, R } },
264
#endif
265
#if TCG_TARGET_HAS_ext8u_i32
266
    { INDEX_op_ext8u_i32, { R, R } },
267
#endif
268
#if TCG_TARGET_HAS_ext16u_i32
269
    { INDEX_op_ext16u_i32, { R, R } },
270
#endif
271

    
272
#if TCG_TARGET_HAS_bswap16_i32
273
    { INDEX_op_bswap16_i32, { R, R } },
274
#endif
275
#if TCG_TARGET_HAS_bswap32_i32
276
    { INDEX_op_bswap32_i32, { R, R } },
277
#endif
278

    
279
    { -1 },
280
};
281

    
282
static const int tcg_target_reg_alloc_order[] = {
283
    TCG_REG_R0,
284
    TCG_REG_R1,
285
    TCG_REG_R2,
286
    TCG_REG_R3,
287
#if 0 /* used for TCG_REG_CALL_STACK */
288
    TCG_REG_R4,
289
#endif
290
    TCG_REG_R5,
291
    TCG_REG_R6,
292
    TCG_REG_R7,
293
#if TCG_TARGET_NB_REGS >= 16
294
    TCG_REG_R8,
295
    TCG_REG_R9,
296
    TCG_REG_R10,
297
    TCG_REG_R11,
298
    TCG_REG_R12,
299
    TCG_REG_R13,
300
    TCG_REG_R14,
301
    TCG_REG_R15,
302
#endif
303
};
304

    
305
#if MAX_OPC_PARAM_IARGS != 5
306
# error Fix needed, number of supported input arguments changed!
307
#endif
308

    
309
static const int tcg_target_call_iarg_regs[] = {
310
    TCG_REG_R0,
311
    TCG_REG_R1,
312
    TCG_REG_R2,
313
    TCG_REG_R3,
314
#if 0 /* used for TCG_REG_CALL_STACK */
315
    TCG_REG_R4,
316
#endif
317
    TCG_REG_R5,
318
#if TCG_TARGET_REG_BITS == 32
319
    /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
320
    TCG_REG_R6,
321
    TCG_REG_R7,
322
#if TCG_TARGET_NB_REGS >= 16
323
    TCG_REG_R8,
324
    TCG_REG_R9,
325
    TCG_REG_R10,
326
#else
327
# error Too few input registers available
328
#endif
329
#endif
330
};
331

    
332
static const int tcg_target_call_oarg_regs[] = {
333
    TCG_REG_R0,
334
#if TCG_TARGET_REG_BITS == 32
335
    TCG_REG_R1
336
#endif
337
};
338

    
339
#ifndef NDEBUG
340
static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
341
    "r00",
342
    "r01",
343
    "r02",
344
    "r03",
345
    "r04",
346
    "r05",
347
    "r06",
348
    "r07",
349
#if TCG_TARGET_NB_REGS >= 16
350
    "r08",
351
    "r09",
352
    "r10",
353
    "r11",
354
    "r12",
355
    "r13",
356
    "r14",
357
    "r15",
358
#if TCG_TARGET_NB_REGS >= 32
359
    "r16",
360
    "r17",
361
    "r18",
362
    "r19",
363
    "r20",
364
    "r21",
365
    "r22",
366
    "r23",
367
    "r24",
368
    "r25",
369
    "r26",
370
    "r27",
371
    "r28",
372
    "r29",
373
    "r30",
374
    "r31"
375
#endif
376
#endif
377
};
378
#endif
379

    
380
static void patch_reloc(uint8_t *code_ptr, int type,
381
                        tcg_target_long value, tcg_target_long addend)
382
{
383
    /* tcg_out_reloc always uses the same type, addend. */
384
    assert(type == sizeof(tcg_target_long));
385
    assert(addend == 0);
386
    assert(value != 0);
387
    *(tcg_target_long *)code_ptr = value;
388
}
389

    
390
/* Parse target specific constraints. */
391
static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
392
{
393
    const char *ct_str = *pct_str;
394
    switch (ct_str[0]) {
395
    case 'r':
396
    case 'L':                   /* qemu_ld constraint */
397
    case 'S':                   /* qemu_st constraint */
398
        ct->ct |= TCG_CT_REG;
399
        tcg_regset_set32(ct->u.regs, 0, BIT(TCG_TARGET_NB_REGS) - 1);
400
        break;
401
    default:
402
        return -1;
403
    }
404
    ct_str++;
405
    *pct_str = ct_str;
406
    return 0;
407
}
408

    
409
#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
410
/* Show current bytecode. Used by tcg interpreter. */
411
void tci_disas(uint8_t opc)
412
{
413
    const TCGOpDef *def = &tcg_op_defs[opc];
414
    fprintf(stderr, "TCG %s %u, %u, %u\n",
415
            def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
416
}
417
#endif
418

    
419
/* Write value (native size). */
420
static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
421
{
422
    *(tcg_target_ulong *)s->code_ptr = v;
423
    s->code_ptr += sizeof(tcg_target_ulong);
424
}
425

    
426
/* Write 64 bit value. */
427
static void tcg_out64(TCGContext *s, uint64_t v)
428
{
429
    *(uint64_t *)s->code_ptr = v;
430
    s->code_ptr += sizeof(v);
431
}
432

    
433
/* Write opcode. */
434
static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
435
{
436
    tcg_out8(s, op);
437
    tcg_out8(s, 0);
438
}
439

    
440
/* Write register. */
441
static void tcg_out_r(TCGContext *s, TCGArg t0)
442
{
443
    assert(t0 < TCG_TARGET_NB_REGS);
444
    tcg_out8(s, t0);
445
}
446

    
447
/* Write register or constant (native size). */
448
static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
449
{
450
    if (const_arg) {
451
        assert(const_arg == 1);
452
        tcg_out8(s, TCG_CONST);
453
        tcg_out_i(s, arg);
454
    } else {
455
        tcg_out_r(s, arg);
456
    }
457
}
458

    
459
/* Write register or constant (32 bit). */
460
static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
461
{
462
    if (const_arg) {
463
        assert(const_arg == 1);
464
        tcg_out8(s, TCG_CONST);
465
        tcg_out32(s, arg);
466
    } else {
467
        tcg_out_r(s, arg);
468
    }
469
}
470

    
471
#if TCG_TARGET_REG_BITS == 64
472
/* Write register or constant (64 bit). */
473
static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
474
{
475
    if (const_arg) {
476
        assert(const_arg == 1);
477
        tcg_out8(s, TCG_CONST);
478
        tcg_out64(s, arg);
479
    } else {
480
        tcg_out_r(s, arg);
481
    }
482
}
483
#endif
484

    
485
/* Write label. */
486
static void tci_out_label(TCGContext *s, TCGArg arg)
487
{
488
    TCGLabel *label = &s->labels[arg];
489
    if (label->has_value) {
490
        tcg_out_i(s, label->u.value);
491
        assert(label->u.value);
492
    } else {
493
        tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), arg, 0);
494
        s->code_ptr += sizeof(tcg_target_ulong);
495
    }
496
}
497

    
498
static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
499
                       tcg_target_long arg2)
500
{
501
    uint8_t *old_code_ptr = s->code_ptr;
502
    if (type == TCG_TYPE_I32) {
503
        tcg_out_op_t(s, INDEX_op_ld_i32);
504
        tcg_out_r(s, ret);
505
        tcg_out_r(s, arg1);
506
        tcg_out32(s, arg2);
507
    } else {
508
        assert(type == TCG_TYPE_I64);
509
#if TCG_TARGET_REG_BITS == 64
510
        tcg_out_op_t(s, INDEX_op_ld_i64);
511
        tcg_out_r(s, ret);
512
        tcg_out_r(s, arg1);
513
        assert(arg2 == (int32_t)arg2);
514
        tcg_out32(s, arg2);
515
#else
516
        TODO();
517
#endif
518
    }
519
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
520
}
521

    
522
static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
523
{
524
    uint8_t *old_code_ptr = s->code_ptr;
525
    assert(ret != arg);
526
#if TCG_TARGET_REG_BITS == 32
527
    tcg_out_op_t(s, INDEX_op_mov_i32);
528
#else
529
    tcg_out_op_t(s, INDEX_op_mov_i64);
530
#endif
531
    tcg_out_r(s, ret);
532
    tcg_out_r(s, arg);
533
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
534
}
535

    
536
static void tcg_out_movi(TCGContext *s, TCGType type,
537
                         TCGReg t0, tcg_target_long arg)
538
{
539
    uint8_t *old_code_ptr = s->code_ptr;
540
    uint32_t arg32 = arg;
541
    if (type == TCG_TYPE_I32 || arg == arg32) {
542
        tcg_out_op_t(s, INDEX_op_movi_i32);
543
        tcg_out_r(s, t0);
544
        tcg_out32(s, arg32);
545
    } else {
546
        assert(type == TCG_TYPE_I64);
547
#if TCG_TARGET_REG_BITS == 64
548
        tcg_out_op_t(s, INDEX_op_movi_i64);
549
        tcg_out_r(s, t0);
550
        tcg_out64(s, arg);
551
#else
552
        TODO();
553
#endif
554
    }
555
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
556
}
557

    
558
static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
559
                       const int *const_args)
560
{
561
    uint8_t *old_code_ptr = s->code_ptr;
562

    
563
    tcg_out_op_t(s, opc);
564

    
565
    switch (opc) {
566
    case INDEX_op_exit_tb:
567
        tcg_out64(s, args[0]);
568
        break;
569
    case INDEX_op_goto_tb:
570
        if (s->tb_jmp_offset) {
571
            /* Direct jump method. */
572
            assert(args[0] < ARRAY_SIZE(s->tb_jmp_offset));
573
            s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
574
            tcg_out32(s, 0);
575
        } else {
576
            /* Indirect jump method. */
577
            TODO();
578
        }
579
        assert(args[0] < ARRAY_SIZE(s->tb_next_offset));
580
        s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
581
        break;
582
    case INDEX_op_br:
583
        tci_out_label(s, args[0]);
584
        break;
585
    case INDEX_op_call:
586
        tcg_out_ri(s, const_args[0], args[0]);
587
        break;
588
    case INDEX_op_setcond_i32:
589
        tcg_out_r(s, args[0]);
590
        tcg_out_r(s, args[1]);
591
        tcg_out_ri32(s, const_args[2], args[2]);
592
        tcg_out8(s, args[3]);   /* condition */
593
        break;
594
#if TCG_TARGET_REG_BITS == 32
595
    case INDEX_op_setcond2_i32:
596
        /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
597
        tcg_out_r(s, args[0]);
598
        tcg_out_r(s, args[1]);
599
        tcg_out_r(s, args[2]);
600
        tcg_out_ri32(s, const_args[3], args[3]);
601
        tcg_out_ri32(s, const_args[4], args[4]);
602
        tcg_out8(s, args[5]);   /* condition */
603
        break;
604
#elif TCG_TARGET_REG_BITS == 64
605
    case INDEX_op_setcond_i64:
606
        tcg_out_r(s, args[0]);
607
        tcg_out_r(s, args[1]);
608
        tcg_out_ri64(s, const_args[2], args[2]);
609
        tcg_out8(s, args[3]);   /* condition */
610
        break;
611
#endif
612
    case INDEX_op_movi_i32:
613
        TODO(); /* Handled by tcg_out_movi? */
614
        break;
615
    case INDEX_op_ld8u_i32:
616
    case INDEX_op_ld8s_i32:
617
    case INDEX_op_ld16u_i32:
618
    case INDEX_op_ld16s_i32:
619
    case INDEX_op_ld_i32:
620
    case INDEX_op_st8_i32:
621
    case INDEX_op_st16_i32:
622
    case INDEX_op_st_i32:
623
    case INDEX_op_ld8u_i64:
624
    case INDEX_op_ld8s_i64:
625
    case INDEX_op_ld16u_i64:
626
    case INDEX_op_ld16s_i64:
627
    case INDEX_op_ld32u_i64:
628
    case INDEX_op_ld32s_i64:
629
    case INDEX_op_ld_i64:
630
    case INDEX_op_st8_i64:
631
    case INDEX_op_st16_i64:
632
    case INDEX_op_st32_i64:
633
    case INDEX_op_st_i64:
634
        tcg_out_r(s, args[0]);
635
        tcg_out_r(s, args[1]);
636
        assert(args[2] == (int32_t)args[2]);
637
        tcg_out32(s, args[2]);
638
        break;
639
    case INDEX_op_add_i32:
640
    case INDEX_op_sub_i32:
641
    case INDEX_op_mul_i32:
642
    case INDEX_op_and_i32:
643
    case INDEX_op_andc_i32:     /* Optional (TCG_TARGET_HAS_andc_i32). */
644
    case INDEX_op_eqv_i32:      /* Optional (TCG_TARGET_HAS_eqv_i32). */
645
    case INDEX_op_nand_i32:     /* Optional (TCG_TARGET_HAS_nand_i32). */
646
    case INDEX_op_nor_i32:      /* Optional (TCG_TARGET_HAS_nor_i32). */
647
    case INDEX_op_or_i32:
648
    case INDEX_op_orc_i32:      /* Optional (TCG_TARGET_HAS_orc_i32). */
649
    case INDEX_op_xor_i32:
650
    case INDEX_op_shl_i32:
651
    case INDEX_op_shr_i32:
652
    case INDEX_op_sar_i32:
653
    case INDEX_op_rotl_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
654
    case INDEX_op_rotr_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
655
        tcg_out_r(s, args[0]);
656
        tcg_out_ri32(s, const_args[1], args[1]);
657
        tcg_out_ri32(s, const_args[2], args[2]);
658
        break;
659
    case INDEX_op_deposit_i32:  /* Optional (TCG_TARGET_HAS_deposit_i32). */
660
        tcg_out_r(s, args[0]);
661
        tcg_out_r(s, args[1]);
662
        tcg_out_r(s, args[2]);
663
        assert(args[3] <= UINT8_MAX);
664
        tcg_out8(s, args[3]);
665
        assert(args[4] <= UINT8_MAX);
666
        tcg_out8(s, args[4]);
667
        break;
668

    
669
#if TCG_TARGET_REG_BITS == 64
670
    case INDEX_op_mov_i64:
671
    case INDEX_op_movi_i64:
672
        TODO();
673
        break;
674
    case INDEX_op_add_i64:
675
    case INDEX_op_sub_i64:
676
    case INDEX_op_mul_i64:
677
    case INDEX_op_and_i64:
678
    case INDEX_op_andc_i64:     /* Optional (TCG_TARGET_HAS_andc_i64). */
679
    case INDEX_op_eqv_i64:      /* Optional (TCG_TARGET_HAS_eqv_i64). */
680
    case INDEX_op_nand_i64:     /* Optional (TCG_TARGET_HAS_nand_i64). */
681
    case INDEX_op_nor_i64:      /* Optional (TCG_TARGET_HAS_nor_i64). */
682
    case INDEX_op_or_i64:
683
    case INDEX_op_orc_i64:      /* Optional (TCG_TARGET_HAS_orc_i64). */
684
    case INDEX_op_xor_i64:
685
    case INDEX_op_shl_i64:
686
    case INDEX_op_shr_i64:
687
    case INDEX_op_sar_i64:
688
        /* TODO: Implementation of rotl_i64, rotr_i64 missing in tci.c. */
689
    case INDEX_op_rotl_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
690
    case INDEX_op_rotr_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
691
        tcg_out_r(s, args[0]);
692
        tcg_out_ri64(s, const_args[1], args[1]);
693
        tcg_out_ri64(s, const_args[2], args[2]);
694
        break;
695
    case INDEX_op_deposit_i64:  /* Optional (TCG_TARGET_HAS_deposit_i64). */
696
        tcg_out_r(s, args[0]);
697
        tcg_out_r(s, args[1]);
698
        tcg_out_r(s, args[2]);
699
        assert(args[3] <= UINT8_MAX);
700
        tcg_out8(s, args[3]);
701
        assert(args[4] <= UINT8_MAX);
702
        tcg_out8(s, args[4]);
703
        break;
704
    case INDEX_op_div_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
705
    case INDEX_op_divu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
706
    case INDEX_op_rem_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
707
    case INDEX_op_remu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
708
        TODO();
709
        break;
710
    case INDEX_op_div2_i64:     /* Optional (TCG_TARGET_HAS_div2_i64). */
711
    case INDEX_op_divu2_i64:    /* Optional (TCG_TARGET_HAS_div2_i64). */
712
        TODO();
713
        break;
714
    case INDEX_op_brcond_i64:
715
        tcg_out_r(s, args[0]);
716
        tcg_out_ri64(s, const_args[1], args[1]);
717
        tcg_out8(s, args[2]);           /* condition */
718
        tci_out_label(s, args[3]);
719
        break;
720
    case INDEX_op_bswap16_i64:  /* Optional (TCG_TARGET_HAS_bswap16_i64). */
721
    case INDEX_op_bswap32_i64:  /* Optional (TCG_TARGET_HAS_bswap32_i64). */
722
    case INDEX_op_bswap64_i64:  /* Optional (TCG_TARGET_HAS_bswap64_i64). */
723
    case INDEX_op_not_i64:      /* Optional (TCG_TARGET_HAS_not_i64). */
724
    case INDEX_op_neg_i64:      /* Optional (TCG_TARGET_HAS_neg_i64). */
725
    case INDEX_op_ext8s_i64:    /* Optional (TCG_TARGET_HAS_ext8s_i64). */
726
    case INDEX_op_ext8u_i64:    /* Optional (TCG_TARGET_HAS_ext8u_i64). */
727
    case INDEX_op_ext16s_i64:   /* Optional (TCG_TARGET_HAS_ext16s_i64). */
728
    case INDEX_op_ext16u_i64:   /* Optional (TCG_TARGET_HAS_ext16u_i64). */
729
    case INDEX_op_ext32s_i64:   /* Optional (TCG_TARGET_HAS_ext32s_i64). */
730
    case INDEX_op_ext32u_i64:   /* Optional (TCG_TARGET_HAS_ext32u_i64). */
731
#endif /* TCG_TARGET_REG_BITS == 64 */
732
    case INDEX_op_neg_i32:      /* Optional (TCG_TARGET_HAS_neg_i32). */
733
    case INDEX_op_not_i32:      /* Optional (TCG_TARGET_HAS_not_i32). */
734
    case INDEX_op_ext8s_i32:    /* Optional (TCG_TARGET_HAS_ext8s_i32). */
735
    case INDEX_op_ext16s_i32:   /* Optional (TCG_TARGET_HAS_ext16s_i32). */
736
    case INDEX_op_ext8u_i32:    /* Optional (TCG_TARGET_HAS_ext8u_i32). */
737
    case INDEX_op_ext16u_i32:   /* Optional (TCG_TARGET_HAS_ext16u_i32). */
738
    case INDEX_op_bswap16_i32:  /* Optional (TCG_TARGET_HAS_bswap16_i32). */
739
    case INDEX_op_bswap32_i32:  /* Optional (TCG_TARGET_HAS_bswap32_i32). */
740
        tcg_out_r(s, args[0]);
741
        tcg_out_r(s, args[1]);
742
        break;
743
    case INDEX_op_div_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
744
    case INDEX_op_divu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
745
    case INDEX_op_rem_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
746
    case INDEX_op_remu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
747
        tcg_out_r(s, args[0]);
748
        tcg_out_ri32(s, const_args[1], args[1]);
749
        tcg_out_ri32(s, const_args[2], args[2]);
750
        break;
751
    case INDEX_op_div2_i32:     /* Optional (TCG_TARGET_HAS_div2_i32). */
752
    case INDEX_op_divu2_i32:    /* Optional (TCG_TARGET_HAS_div2_i32). */
753
        TODO();
754
        break;
755
#if TCG_TARGET_REG_BITS == 32
756
    case INDEX_op_add2_i32:
757
    case INDEX_op_sub2_i32:
758
        tcg_out_r(s, args[0]);
759
        tcg_out_r(s, args[1]);
760
        tcg_out_r(s, args[2]);
761
        tcg_out_r(s, args[3]);
762
        tcg_out_r(s, args[4]);
763
        tcg_out_r(s, args[5]);
764
        break;
765
    case INDEX_op_brcond2_i32:
766
        tcg_out_r(s, args[0]);
767
        tcg_out_r(s, args[1]);
768
        tcg_out_ri32(s, const_args[2], args[2]);
769
        tcg_out_ri32(s, const_args[3], args[3]);
770
        tcg_out8(s, args[4]);           /* condition */
771
        tci_out_label(s, args[5]);
772
        break;
773
    case INDEX_op_mulu2_i32:
774
        tcg_out_r(s, args[0]);
775
        tcg_out_r(s, args[1]);
776
        tcg_out_r(s, args[2]);
777
        tcg_out_r(s, args[3]);
778
        break;
779
#endif
780
    case INDEX_op_brcond_i32:
781
        tcg_out_r(s, args[0]);
782
        tcg_out_ri32(s, const_args[1], args[1]);
783
        tcg_out8(s, args[2]);           /* condition */
784
        tci_out_label(s, args[3]);
785
        break;
786
    case INDEX_op_qemu_ld8u:
787
    case INDEX_op_qemu_ld8s:
788
    case INDEX_op_qemu_ld16u:
789
    case INDEX_op_qemu_ld16s:
790
    case INDEX_op_qemu_ld32:
791
#if TCG_TARGET_REG_BITS == 64
792
    case INDEX_op_qemu_ld32s:
793
    case INDEX_op_qemu_ld32u:
794
#endif
795
        tcg_out_r(s, *args++);
796
        tcg_out_r(s, *args++);
797
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
798
        tcg_out_r(s, *args++);
799
#endif
800
#ifdef CONFIG_SOFTMMU
801
        tcg_out_i(s, *args);
802
#endif
803
        break;
804
    case INDEX_op_qemu_ld64:
805
        tcg_out_r(s, *args++);
806
#if TCG_TARGET_REG_BITS == 32
807
        tcg_out_r(s, *args++);
808
#endif
809
        tcg_out_r(s, *args++);
810
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
811
        tcg_out_r(s, *args++);
812
#endif
813
#ifdef CONFIG_SOFTMMU
814
        tcg_out_i(s, *args);
815
#endif
816
        break;
817
    case INDEX_op_qemu_st8:
818
    case INDEX_op_qemu_st16:
819
    case INDEX_op_qemu_st32:
820
        tcg_out_r(s, *args++);
821
        tcg_out_r(s, *args++);
822
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
823
        tcg_out_r(s, *args++);
824
#endif
825
#ifdef CONFIG_SOFTMMU
826
        tcg_out_i(s, *args);
827
#endif
828
        break;
829
    case INDEX_op_qemu_st64:
830
        tcg_out_r(s, *args++);
831
#if TCG_TARGET_REG_BITS == 32
832
        tcg_out_r(s, *args++);
833
#endif
834
        tcg_out_r(s, *args++);
835
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
836
        tcg_out_r(s, *args++);
837
#endif
838
#ifdef CONFIG_SOFTMMU
839
        tcg_out_i(s, *args);
840
#endif
841
        break;
842
    case INDEX_op_end:
843
        TODO();
844
        break;
845
    default:
846
        fprintf(stderr, "Missing: %s\n", tcg_op_defs[opc].name);
847
        tcg_abort();
848
    }
849
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
850
}
851

    
852
static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
853
                       tcg_target_long arg2)
854
{
855
    uint8_t *old_code_ptr = s->code_ptr;
856
    if (type == TCG_TYPE_I32) {
857
        tcg_out_op_t(s, INDEX_op_st_i32);
858
        tcg_out_r(s, arg);
859
        tcg_out_r(s, arg1);
860
        tcg_out32(s, arg2);
861
    } else {
862
        assert(type == TCG_TYPE_I64);
863
#if TCG_TARGET_REG_BITS == 64
864
        tcg_out_op_t(s, INDEX_op_st_i64);
865
        tcg_out_r(s, arg);
866
        tcg_out_r(s, arg1);
867
        tcg_out32(s, arg2);
868
#else
869
        TODO();
870
#endif
871
    }
872
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
873
}
874

    
875
/* Test if a constant matches the constraint. */
876
static int tcg_target_const_match(tcg_target_long val,
877
                                  const TCGArgConstraint *arg_ct)
878
{
879
    /* No need to return 0 or 1, 0 or != 0 is good enough. */
880
    return arg_ct->ct & TCG_CT_CONST;
881
}
882

    
883
static void tcg_target_init(TCGContext *s)
884
{
885
#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
886
    const char *envval = getenv("DEBUG_TCG");
887
    if (envval) {
888
        qemu_set_log(strtol(envval, NULL, 0));
889
    }
890
#endif
891

    
892
    /* The current code uses uint8_t for tcg operations. */
893
    assert(ARRAY_SIZE(tcg_op_defs) <= UINT8_MAX);
894

    
895
    /* Registers available for 32 bit operations. */
896
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0,
897
                     BIT(TCG_TARGET_NB_REGS) - 1);
898
    /* Registers available for 64 bit operations. */
899
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0,
900
                     BIT(TCG_TARGET_NB_REGS) - 1);
901
    /* TODO: Which registers should be set here? */
902
    tcg_regset_set32(tcg_target_call_clobber_regs, 0,
903
                     BIT(TCG_TARGET_NB_REGS) - 1);
904
    tcg_regset_clear(s->reserved_regs);
905
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
906
    tcg_add_target_add_op_defs(tcg_target_op_defs);
907
    tcg_set_frame(s, TCG_AREG0, offsetof(CPUArchState, temp_buf),
908
                  CPU_TEMP_BUF_NLONGS * sizeof(long));
909
}
910

    
911
/* Generate global QEMU prologue and epilogue code. */
912
static inline void tcg_target_qemu_prologue(TCGContext *s)
913
{
914
}