Statistics
| Branch: | Revision:

root / tcg / tci / tcg-target.c @ 3cf246f0

History | View | Annotate | Download (26.3 kB)

1
/*
2
 * Tiny Code Generator for QEMU
3
 *
4
 * Copyright (c) 2009, 2011 Stefan Weil
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22
 * THE SOFTWARE.
23
 */
24

    
25
#include "tcg-be-null.h"
26

    
27
/* TODO list:
28
 * - See TODO comments in code.
29
 */
30

    
31
/* Marker for missing code. */
32
#define TODO() \
33
    do { \
34
        fprintf(stderr, "TODO %s:%u: %s()\n", \
35
                __FILE__, __LINE__, __func__); \
36
        tcg_abort(); \
37
    } while (0)
38

    
39
/* Bitfield n...m (in 32 bit value). */
40
#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
41

    
42
/* Macros used in tcg_target_op_defs. */
43
#define R       "r"
44
#define RI      "ri"
45
#if TCG_TARGET_REG_BITS == 32
46
# define R64    "r", "r"
47
#else
48
# define R64    "r"
49
#endif
50
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
51
# define L      "L", "L"
52
# define S      "S", "S"
53
#else
54
# define L      "L"
55
# define S      "S"
56
#endif
57

    
58
/* TODO: documentation. */
59
static const TCGTargetOpDef tcg_target_op_defs[] = {
60
    { INDEX_op_exit_tb, { NULL } },
61
    { INDEX_op_goto_tb, { NULL } },
62
    { INDEX_op_call, { RI } },
63
    { INDEX_op_br, { NULL } },
64

    
65
    { INDEX_op_mov_i32, { R, R } },
66
    { INDEX_op_movi_i32, { R } },
67

    
68
    { INDEX_op_ld8u_i32, { R, R } },
69
    { INDEX_op_ld8s_i32, { R, R } },
70
    { INDEX_op_ld16u_i32, { R, R } },
71
    { INDEX_op_ld16s_i32, { R, R } },
72
    { INDEX_op_ld_i32, { R, R } },
73
    { INDEX_op_st8_i32, { R, R } },
74
    { INDEX_op_st16_i32, { R, R } },
75
    { INDEX_op_st_i32, { R, R } },
76

    
77
    { INDEX_op_add_i32, { R, RI, RI } },
78
    { INDEX_op_sub_i32, { R, RI, RI } },
79
    { INDEX_op_mul_i32, { R, RI, RI } },
80
#if TCG_TARGET_HAS_div_i32
81
    { INDEX_op_div_i32, { R, R, R } },
82
    { INDEX_op_divu_i32, { R, R, R } },
83
    { INDEX_op_rem_i32, { R, R, R } },
84
    { INDEX_op_remu_i32, { R, R, R } },
85
#elif TCG_TARGET_HAS_div2_i32
86
    { INDEX_op_div2_i32, { R, R, "0", "1", R } },
87
    { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
88
#endif
89
    /* TODO: Does R, RI, RI result in faster code than R, R, RI?
90
       If both operands are constants, we can optimize. */
91
    { INDEX_op_and_i32, { R, RI, RI } },
92
#if TCG_TARGET_HAS_andc_i32
93
    { INDEX_op_andc_i32, { R, RI, RI } },
94
#endif
95
#if TCG_TARGET_HAS_eqv_i32
96
    { INDEX_op_eqv_i32, { R, RI, RI } },
97
#endif
98
#if TCG_TARGET_HAS_nand_i32
99
    { INDEX_op_nand_i32, { R, RI, RI } },
100
#endif
101
#if TCG_TARGET_HAS_nor_i32
102
    { INDEX_op_nor_i32, { R, RI, RI } },
103
#endif
104
    { INDEX_op_or_i32, { R, RI, RI } },
105
#if TCG_TARGET_HAS_orc_i32
106
    { INDEX_op_orc_i32, { R, RI, RI } },
107
#endif
108
    { INDEX_op_xor_i32, { R, RI, RI } },
109
    { INDEX_op_shl_i32, { R, RI, RI } },
110
    { INDEX_op_shr_i32, { R, RI, RI } },
111
    { INDEX_op_sar_i32, { R, RI, RI } },
112
#if TCG_TARGET_HAS_rot_i32
113
    { INDEX_op_rotl_i32, { R, RI, RI } },
114
    { INDEX_op_rotr_i32, { R, RI, RI } },
115
#endif
116
#if TCG_TARGET_HAS_deposit_i32
117
    { INDEX_op_deposit_i32, { R, "0", R } },
118
#endif
119

    
120
    { INDEX_op_brcond_i32, { R, RI } },
121

    
122
    { INDEX_op_setcond_i32, { R, R, RI } },
123
#if TCG_TARGET_REG_BITS == 64
124
    { INDEX_op_setcond_i64, { R, R, RI } },
125
#endif /* TCG_TARGET_REG_BITS == 64 */
126

    
127
#if TCG_TARGET_REG_BITS == 32
128
    /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
129
    { INDEX_op_add2_i32, { R, R, R, R, R, R } },
130
    { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
131
    { INDEX_op_brcond2_i32, { R, R, RI, RI } },
132
    { INDEX_op_mulu2_i32, { R, R, R, R } },
133
    { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
134
#endif
135

    
136
#if TCG_TARGET_HAS_not_i32
137
    { INDEX_op_not_i32, { R, R } },
138
#endif
139
#if TCG_TARGET_HAS_neg_i32
140
    { INDEX_op_neg_i32, { R, R } },
141
#endif
142

    
143
#if TCG_TARGET_REG_BITS == 64
144
    { INDEX_op_mov_i64, { R, R } },
145
    { INDEX_op_movi_i64, { R } },
146

    
147
    { INDEX_op_ld8u_i64, { R, R } },
148
    { INDEX_op_ld8s_i64, { R, R } },
149
    { INDEX_op_ld16u_i64, { R, R } },
150
    { INDEX_op_ld16s_i64, { R, R } },
151
    { INDEX_op_ld32u_i64, { R, R } },
152
    { INDEX_op_ld32s_i64, { R, R } },
153
    { INDEX_op_ld_i64, { R, R } },
154

    
155
    { INDEX_op_st8_i64, { R, R } },
156
    { INDEX_op_st16_i64, { R, R } },
157
    { INDEX_op_st32_i64, { R, R } },
158
    { INDEX_op_st_i64, { R, R } },
159

    
160
    { INDEX_op_add_i64, { R, RI, RI } },
161
    { INDEX_op_sub_i64, { R, RI, RI } },
162
    { INDEX_op_mul_i64, { R, RI, RI } },
163
#if TCG_TARGET_HAS_div_i64
164
    { INDEX_op_div_i64, { R, R, R } },
165
    { INDEX_op_divu_i64, { R, R, R } },
166
    { INDEX_op_rem_i64, { R, R, R } },
167
    { INDEX_op_remu_i64, { R, R, R } },
168
#elif TCG_TARGET_HAS_div2_i64
169
    { INDEX_op_div2_i64, { R, R, "0", "1", R } },
170
    { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
171
#endif
172
    { INDEX_op_and_i64, { R, RI, RI } },
173
#if TCG_TARGET_HAS_andc_i64
174
    { INDEX_op_andc_i64, { R, RI, RI } },
175
#endif
176
#if TCG_TARGET_HAS_eqv_i64
177
    { INDEX_op_eqv_i64, { R, RI, RI } },
178
#endif
179
#if TCG_TARGET_HAS_nand_i64
180
    { INDEX_op_nand_i64, { R, RI, RI } },
181
#endif
182
#if TCG_TARGET_HAS_nor_i64
183
    { INDEX_op_nor_i64, { R, RI, RI } },
184
#endif
185
    { INDEX_op_or_i64, { R, RI, RI } },
186
#if TCG_TARGET_HAS_orc_i64
187
    { INDEX_op_orc_i64, { R, RI, RI } },
188
#endif
189
    { INDEX_op_xor_i64, { R, RI, RI } },
190
    { INDEX_op_shl_i64, { R, RI, RI } },
191
    { INDEX_op_shr_i64, { R, RI, RI } },
192
    { INDEX_op_sar_i64, { R, RI, RI } },
193
#if TCG_TARGET_HAS_rot_i64
194
    { INDEX_op_rotl_i64, { R, RI, RI } },
195
    { INDEX_op_rotr_i64, { R, RI, RI } },
196
#endif
197
#if TCG_TARGET_HAS_deposit_i64
198
    { INDEX_op_deposit_i64, { R, "0", R } },
199
#endif
200
    { INDEX_op_brcond_i64, { R, RI } },
201

    
202
#if TCG_TARGET_HAS_ext8s_i64
203
    { INDEX_op_ext8s_i64, { R, R } },
204
#endif
205
#if TCG_TARGET_HAS_ext16s_i64
206
    { INDEX_op_ext16s_i64, { R, R } },
207
#endif
208
#if TCG_TARGET_HAS_ext32s_i64
209
    { INDEX_op_ext32s_i64, { R, R } },
210
#endif
211
#if TCG_TARGET_HAS_ext8u_i64
212
    { INDEX_op_ext8u_i64, { R, R } },
213
#endif
214
#if TCG_TARGET_HAS_ext16u_i64
215
    { INDEX_op_ext16u_i64, { R, R } },
216
#endif
217
#if TCG_TARGET_HAS_ext32u_i64
218
    { INDEX_op_ext32u_i64, { R, R } },
219
#endif
220
#if TCG_TARGET_HAS_bswap16_i64
221
    { INDEX_op_bswap16_i64, { R, R } },
222
#endif
223
#if TCG_TARGET_HAS_bswap32_i64
224
    { INDEX_op_bswap32_i64, { R, R } },
225
#endif
226
#if TCG_TARGET_HAS_bswap64_i64
227
    { INDEX_op_bswap64_i64, { R, R } },
228
#endif
229
#if TCG_TARGET_HAS_not_i64
230
    { INDEX_op_not_i64, { R, R } },
231
#endif
232
#if TCG_TARGET_HAS_neg_i64
233
    { INDEX_op_neg_i64, { R, R } },
234
#endif
235
#endif /* TCG_TARGET_REG_BITS == 64 */
236

    
237
    { INDEX_op_qemu_ld8u, { R, L } },
238
    { INDEX_op_qemu_ld8s, { R, L } },
239
    { INDEX_op_qemu_ld16u, { R, L } },
240
    { INDEX_op_qemu_ld16s, { R, L } },
241
    { INDEX_op_qemu_ld32, { R, L } },
242
#if TCG_TARGET_REG_BITS == 64
243
    { INDEX_op_qemu_ld32u, { R, L } },
244
    { INDEX_op_qemu_ld32s, { R, L } },
245
#endif
246
    { INDEX_op_qemu_ld64, { R64, L } },
247

    
248
    { INDEX_op_qemu_st8, { R, S } },
249
    { INDEX_op_qemu_st16, { R, S } },
250
    { INDEX_op_qemu_st32, { R, S } },
251
    { INDEX_op_qemu_st64, { R64, S } },
252

    
253
#if TCG_TARGET_HAS_ext8s_i32
254
    { INDEX_op_ext8s_i32, { R, R } },
255
#endif
256
#if TCG_TARGET_HAS_ext16s_i32
257
    { INDEX_op_ext16s_i32, { R, R } },
258
#endif
259
#if TCG_TARGET_HAS_ext8u_i32
260
    { INDEX_op_ext8u_i32, { R, R } },
261
#endif
262
#if TCG_TARGET_HAS_ext16u_i32
263
    { INDEX_op_ext16u_i32, { R, R } },
264
#endif
265

    
266
#if TCG_TARGET_HAS_bswap16_i32
267
    { INDEX_op_bswap16_i32, { R, R } },
268
#endif
269
#if TCG_TARGET_HAS_bswap32_i32
270
    { INDEX_op_bswap32_i32, { R, R } },
271
#endif
272

    
273
    { -1 },
274
};
275

    
276
static const int tcg_target_reg_alloc_order[] = {
277
    TCG_REG_R0,
278
    TCG_REG_R1,
279
    TCG_REG_R2,
280
    TCG_REG_R3,
281
#if 0 /* used for TCG_REG_CALL_STACK */
282
    TCG_REG_R4,
283
#endif
284
    TCG_REG_R5,
285
    TCG_REG_R6,
286
    TCG_REG_R7,
287
#if TCG_TARGET_NB_REGS >= 16
288
    TCG_REG_R8,
289
    TCG_REG_R9,
290
    TCG_REG_R10,
291
    TCG_REG_R11,
292
    TCG_REG_R12,
293
    TCG_REG_R13,
294
    TCG_REG_R14,
295
    TCG_REG_R15,
296
#endif
297
};
298

    
299
#if MAX_OPC_PARAM_IARGS != 5
300
# error Fix needed, number of supported input arguments changed!
301
#endif
302

    
303
static const int tcg_target_call_iarg_regs[] = {
304
    TCG_REG_R0,
305
    TCG_REG_R1,
306
    TCG_REG_R2,
307
    TCG_REG_R3,
308
#if 0 /* used for TCG_REG_CALL_STACK */
309
    TCG_REG_R4,
310
#endif
311
    TCG_REG_R5,
312
#if TCG_TARGET_REG_BITS == 32
313
    /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
314
    TCG_REG_R6,
315
    TCG_REG_R7,
316
#if TCG_TARGET_NB_REGS >= 16
317
    TCG_REG_R8,
318
    TCG_REG_R9,
319
    TCG_REG_R10,
320
#else
321
# error Too few input registers available
322
#endif
323
#endif
324
};
325

    
326
static const int tcg_target_call_oarg_regs[] = {
327
    TCG_REG_R0,
328
#if TCG_TARGET_REG_BITS == 32
329
    TCG_REG_R1
330
#endif
331
};
332

    
333
#ifndef NDEBUG
334
static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
335
    "r00",
336
    "r01",
337
    "r02",
338
    "r03",
339
    "r04",
340
    "r05",
341
    "r06",
342
    "r07",
343
#if TCG_TARGET_NB_REGS >= 16
344
    "r08",
345
    "r09",
346
    "r10",
347
    "r11",
348
    "r12",
349
    "r13",
350
    "r14",
351
    "r15",
352
#if TCG_TARGET_NB_REGS >= 32
353
    "r16",
354
    "r17",
355
    "r18",
356
    "r19",
357
    "r20",
358
    "r21",
359
    "r22",
360
    "r23",
361
    "r24",
362
    "r25",
363
    "r26",
364
    "r27",
365
    "r28",
366
    "r29",
367
    "r30",
368
    "r31"
369
#endif
370
#endif
371
};
372
#endif
373

    
374
static void patch_reloc(uint8_t *code_ptr, int type,
375
                        intptr_t value, intptr_t addend)
376
{
377
    /* tcg_out_reloc always uses the same type, addend. */
378
    assert(type == sizeof(tcg_target_long));
379
    assert(addend == 0);
380
    assert(value != 0);
381
    *(tcg_target_long *)code_ptr = value;
382
}
383

    
384
/* Parse target specific constraints. */
385
static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
386
{
387
    const char *ct_str = *pct_str;
388
    switch (ct_str[0]) {
389
    case 'r':
390
    case 'L':                   /* qemu_ld constraint */
391
    case 'S':                   /* qemu_st constraint */
392
        ct->ct |= TCG_CT_REG;
393
        tcg_regset_set32(ct->u.regs, 0, BIT(TCG_TARGET_NB_REGS) - 1);
394
        break;
395
    default:
396
        return -1;
397
    }
398
    ct_str++;
399
    *pct_str = ct_str;
400
    return 0;
401
}
402

    
403
#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
404
/* Show current bytecode. Used by tcg interpreter. */
405
void tci_disas(uint8_t opc)
406
{
407
    const TCGOpDef *def = &tcg_op_defs[opc];
408
    fprintf(stderr, "TCG %s %u, %u, %u\n",
409
            def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
410
}
411
#endif
412

    
413
/* Write value (native size). */
414
static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
415
{
416
    *(tcg_target_ulong *)s->code_ptr = v;
417
    s->code_ptr += sizeof(tcg_target_ulong);
418
}
419

    
420
/* Write opcode. */
421
static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
422
{
423
    tcg_out8(s, op);
424
    tcg_out8(s, 0);
425
}
426

    
427
/* Write register. */
428
static void tcg_out_r(TCGContext *s, TCGArg t0)
429
{
430
    assert(t0 < TCG_TARGET_NB_REGS);
431
    tcg_out8(s, t0);
432
}
433

    
434
/* Write register or constant (native size). */
435
static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
436
{
437
    if (const_arg) {
438
        assert(const_arg == 1);
439
        tcg_out8(s, TCG_CONST);
440
        tcg_out_i(s, arg);
441
    } else {
442
        tcg_out_r(s, arg);
443
    }
444
}
445

    
446
/* Write register or constant (32 bit). */
447
static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
448
{
449
    if (const_arg) {
450
        assert(const_arg == 1);
451
        tcg_out8(s, TCG_CONST);
452
        tcg_out32(s, arg);
453
    } else {
454
        tcg_out_r(s, arg);
455
    }
456
}
457

    
458
#if TCG_TARGET_REG_BITS == 64
459
/* Write register or constant (64 bit). */
460
static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
461
{
462
    if (const_arg) {
463
        assert(const_arg == 1);
464
        tcg_out8(s, TCG_CONST);
465
        tcg_out64(s, arg);
466
    } else {
467
        tcg_out_r(s, arg);
468
    }
469
}
470
#endif
471

    
472
/* Write label. */
473
static void tci_out_label(TCGContext *s, TCGArg arg)
474
{
475
    TCGLabel *label = &s->labels[arg];
476
    if (label->has_value) {
477
        tcg_out_i(s, label->u.value);
478
        assert(label->u.value);
479
    } else {
480
        tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), arg, 0);
481
        s->code_ptr += sizeof(tcg_target_ulong);
482
    }
483
}
484

    
485
static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
486
                       intptr_t arg2)
487
{
488
    uint8_t *old_code_ptr = s->code_ptr;
489
    if (type == TCG_TYPE_I32) {
490
        tcg_out_op_t(s, INDEX_op_ld_i32);
491
        tcg_out_r(s, ret);
492
        tcg_out_r(s, arg1);
493
        tcg_out32(s, arg2);
494
    } else {
495
        assert(type == TCG_TYPE_I64);
496
#if TCG_TARGET_REG_BITS == 64
497
        tcg_out_op_t(s, INDEX_op_ld_i64);
498
        tcg_out_r(s, ret);
499
        tcg_out_r(s, arg1);
500
        assert(arg2 == (int32_t)arg2);
501
        tcg_out32(s, arg2);
502
#else
503
        TODO();
504
#endif
505
    }
506
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
507
}
508

    
509
static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
510
{
511
    uint8_t *old_code_ptr = s->code_ptr;
512
    assert(ret != arg);
513
#if TCG_TARGET_REG_BITS == 32
514
    tcg_out_op_t(s, INDEX_op_mov_i32);
515
#else
516
    tcg_out_op_t(s, INDEX_op_mov_i64);
517
#endif
518
    tcg_out_r(s, ret);
519
    tcg_out_r(s, arg);
520
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
521
}
522

    
523
static void tcg_out_movi(TCGContext *s, TCGType type,
524
                         TCGReg t0, tcg_target_long arg)
525
{
526
    uint8_t *old_code_ptr = s->code_ptr;
527
    uint32_t arg32 = arg;
528
    if (type == TCG_TYPE_I32 || arg == arg32) {
529
        tcg_out_op_t(s, INDEX_op_movi_i32);
530
        tcg_out_r(s, t0);
531
        tcg_out32(s, arg32);
532
    } else {
533
        assert(type == TCG_TYPE_I64);
534
#if TCG_TARGET_REG_BITS == 64
535
        tcg_out_op_t(s, INDEX_op_movi_i64);
536
        tcg_out_r(s, t0);
537
        tcg_out64(s, arg);
538
#else
539
        TODO();
540
#endif
541
    }
542
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
543
}
544

    
545
static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
546
                       const int *const_args)
547
{
548
    uint8_t *old_code_ptr = s->code_ptr;
549

    
550
    tcg_out_op_t(s, opc);
551

    
552
    switch (opc) {
553
    case INDEX_op_exit_tb:
554
        tcg_out64(s, args[0]);
555
        break;
556
    case INDEX_op_goto_tb:
557
        if (s->tb_jmp_offset) {
558
            /* Direct jump method. */
559
            assert(args[0] < ARRAY_SIZE(s->tb_jmp_offset));
560
            s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
561
            tcg_out32(s, 0);
562
        } else {
563
            /* Indirect jump method. */
564
            TODO();
565
        }
566
        assert(args[0] < ARRAY_SIZE(s->tb_next_offset));
567
        s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
568
        break;
569
    case INDEX_op_br:
570
        tci_out_label(s, args[0]);
571
        break;
572
    case INDEX_op_call:
573
        tcg_out_ri(s, const_args[0], args[0]);
574
        break;
575
    case INDEX_op_setcond_i32:
576
        tcg_out_r(s, args[0]);
577
        tcg_out_r(s, args[1]);
578
        tcg_out_ri32(s, const_args[2], args[2]);
579
        tcg_out8(s, args[3]);   /* condition */
580
        break;
581
#if TCG_TARGET_REG_BITS == 32
582
    case INDEX_op_setcond2_i32:
583
        /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
584
        tcg_out_r(s, args[0]);
585
        tcg_out_r(s, args[1]);
586
        tcg_out_r(s, args[2]);
587
        tcg_out_ri32(s, const_args[3], args[3]);
588
        tcg_out_ri32(s, const_args[4], args[4]);
589
        tcg_out8(s, args[5]);   /* condition */
590
        break;
591
#elif TCG_TARGET_REG_BITS == 64
592
    case INDEX_op_setcond_i64:
593
        tcg_out_r(s, args[0]);
594
        tcg_out_r(s, args[1]);
595
        tcg_out_ri64(s, const_args[2], args[2]);
596
        tcg_out8(s, args[3]);   /* condition */
597
        break;
598
#endif
599
    case INDEX_op_movi_i32:
600
        TODO(); /* Handled by tcg_out_movi? */
601
        break;
602
    case INDEX_op_ld8u_i32:
603
    case INDEX_op_ld8s_i32:
604
    case INDEX_op_ld16u_i32:
605
    case INDEX_op_ld16s_i32:
606
    case INDEX_op_ld_i32:
607
    case INDEX_op_st8_i32:
608
    case INDEX_op_st16_i32:
609
    case INDEX_op_st_i32:
610
    case INDEX_op_ld8u_i64:
611
    case INDEX_op_ld8s_i64:
612
    case INDEX_op_ld16u_i64:
613
    case INDEX_op_ld16s_i64:
614
    case INDEX_op_ld32u_i64:
615
    case INDEX_op_ld32s_i64:
616
    case INDEX_op_ld_i64:
617
    case INDEX_op_st8_i64:
618
    case INDEX_op_st16_i64:
619
    case INDEX_op_st32_i64:
620
    case INDEX_op_st_i64:
621
        tcg_out_r(s, args[0]);
622
        tcg_out_r(s, args[1]);
623
        assert(args[2] == (int32_t)args[2]);
624
        tcg_out32(s, args[2]);
625
        break;
626
    case INDEX_op_add_i32:
627
    case INDEX_op_sub_i32:
628
    case INDEX_op_mul_i32:
629
    case INDEX_op_and_i32:
630
    case INDEX_op_andc_i32:     /* Optional (TCG_TARGET_HAS_andc_i32). */
631
    case INDEX_op_eqv_i32:      /* Optional (TCG_TARGET_HAS_eqv_i32). */
632
    case INDEX_op_nand_i32:     /* Optional (TCG_TARGET_HAS_nand_i32). */
633
    case INDEX_op_nor_i32:      /* Optional (TCG_TARGET_HAS_nor_i32). */
634
    case INDEX_op_or_i32:
635
    case INDEX_op_orc_i32:      /* Optional (TCG_TARGET_HAS_orc_i32). */
636
    case INDEX_op_xor_i32:
637
    case INDEX_op_shl_i32:
638
    case INDEX_op_shr_i32:
639
    case INDEX_op_sar_i32:
640
    case INDEX_op_rotl_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
641
    case INDEX_op_rotr_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
642
        tcg_out_r(s, args[0]);
643
        tcg_out_ri32(s, const_args[1], args[1]);
644
        tcg_out_ri32(s, const_args[2], args[2]);
645
        break;
646
    case INDEX_op_deposit_i32:  /* Optional (TCG_TARGET_HAS_deposit_i32). */
647
        tcg_out_r(s, args[0]);
648
        tcg_out_r(s, args[1]);
649
        tcg_out_r(s, args[2]);
650
        assert(args[3] <= UINT8_MAX);
651
        tcg_out8(s, args[3]);
652
        assert(args[4] <= UINT8_MAX);
653
        tcg_out8(s, args[4]);
654
        break;
655

    
656
#if TCG_TARGET_REG_BITS == 64
657
    case INDEX_op_mov_i64:
658
    case INDEX_op_movi_i64:
659
        TODO();
660
        break;
661
    case INDEX_op_add_i64:
662
    case INDEX_op_sub_i64:
663
    case INDEX_op_mul_i64:
664
    case INDEX_op_and_i64:
665
    case INDEX_op_andc_i64:     /* Optional (TCG_TARGET_HAS_andc_i64). */
666
    case INDEX_op_eqv_i64:      /* Optional (TCG_TARGET_HAS_eqv_i64). */
667
    case INDEX_op_nand_i64:     /* Optional (TCG_TARGET_HAS_nand_i64). */
668
    case INDEX_op_nor_i64:      /* Optional (TCG_TARGET_HAS_nor_i64). */
669
    case INDEX_op_or_i64:
670
    case INDEX_op_orc_i64:      /* Optional (TCG_TARGET_HAS_orc_i64). */
671
    case INDEX_op_xor_i64:
672
    case INDEX_op_shl_i64:
673
    case INDEX_op_shr_i64:
674
    case INDEX_op_sar_i64:
675
    case INDEX_op_rotl_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
676
    case INDEX_op_rotr_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
677
        tcg_out_r(s, args[0]);
678
        tcg_out_ri64(s, const_args[1], args[1]);
679
        tcg_out_ri64(s, const_args[2], args[2]);
680
        break;
681
    case INDEX_op_deposit_i64:  /* Optional (TCG_TARGET_HAS_deposit_i64). */
682
        tcg_out_r(s, args[0]);
683
        tcg_out_r(s, args[1]);
684
        tcg_out_r(s, args[2]);
685
        assert(args[3] <= UINT8_MAX);
686
        tcg_out8(s, args[3]);
687
        assert(args[4] <= UINT8_MAX);
688
        tcg_out8(s, args[4]);
689
        break;
690
    case INDEX_op_div_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
691
    case INDEX_op_divu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
692
    case INDEX_op_rem_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
693
    case INDEX_op_remu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
694
        TODO();
695
        break;
696
    case INDEX_op_div2_i64:     /* Optional (TCG_TARGET_HAS_div2_i64). */
697
    case INDEX_op_divu2_i64:    /* Optional (TCG_TARGET_HAS_div2_i64). */
698
        TODO();
699
        break;
700
    case INDEX_op_brcond_i64:
701
        tcg_out_r(s, args[0]);
702
        tcg_out_ri64(s, const_args[1], args[1]);
703
        tcg_out8(s, args[2]);           /* condition */
704
        tci_out_label(s, args[3]);
705
        break;
706
    case INDEX_op_bswap16_i64:  /* Optional (TCG_TARGET_HAS_bswap16_i64). */
707
    case INDEX_op_bswap32_i64:  /* Optional (TCG_TARGET_HAS_bswap32_i64). */
708
    case INDEX_op_bswap64_i64:  /* Optional (TCG_TARGET_HAS_bswap64_i64). */
709
    case INDEX_op_not_i64:      /* Optional (TCG_TARGET_HAS_not_i64). */
710
    case INDEX_op_neg_i64:      /* Optional (TCG_TARGET_HAS_neg_i64). */
711
    case INDEX_op_ext8s_i64:    /* Optional (TCG_TARGET_HAS_ext8s_i64). */
712
    case INDEX_op_ext8u_i64:    /* Optional (TCG_TARGET_HAS_ext8u_i64). */
713
    case INDEX_op_ext16s_i64:   /* Optional (TCG_TARGET_HAS_ext16s_i64). */
714
    case INDEX_op_ext16u_i64:   /* Optional (TCG_TARGET_HAS_ext16u_i64). */
715
    case INDEX_op_ext32s_i64:   /* Optional (TCG_TARGET_HAS_ext32s_i64). */
716
    case INDEX_op_ext32u_i64:   /* Optional (TCG_TARGET_HAS_ext32u_i64). */
717
#endif /* TCG_TARGET_REG_BITS == 64 */
718
    case INDEX_op_neg_i32:      /* Optional (TCG_TARGET_HAS_neg_i32). */
719
    case INDEX_op_not_i32:      /* Optional (TCG_TARGET_HAS_not_i32). */
720
    case INDEX_op_ext8s_i32:    /* Optional (TCG_TARGET_HAS_ext8s_i32). */
721
    case INDEX_op_ext16s_i32:   /* Optional (TCG_TARGET_HAS_ext16s_i32). */
722
    case INDEX_op_ext8u_i32:    /* Optional (TCG_TARGET_HAS_ext8u_i32). */
723
    case INDEX_op_ext16u_i32:   /* Optional (TCG_TARGET_HAS_ext16u_i32). */
724
    case INDEX_op_bswap16_i32:  /* Optional (TCG_TARGET_HAS_bswap16_i32). */
725
    case INDEX_op_bswap32_i32:  /* Optional (TCG_TARGET_HAS_bswap32_i32). */
726
        tcg_out_r(s, args[0]);
727
        tcg_out_r(s, args[1]);
728
        break;
729
    case INDEX_op_div_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
730
    case INDEX_op_divu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
731
    case INDEX_op_rem_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
732
    case INDEX_op_remu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
733
        tcg_out_r(s, args[0]);
734
        tcg_out_ri32(s, const_args[1], args[1]);
735
        tcg_out_ri32(s, const_args[2], args[2]);
736
        break;
737
    case INDEX_op_div2_i32:     /* Optional (TCG_TARGET_HAS_div2_i32). */
738
    case INDEX_op_divu2_i32:    /* Optional (TCG_TARGET_HAS_div2_i32). */
739
        TODO();
740
        break;
741
#if TCG_TARGET_REG_BITS == 32
742
    case INDEX_op_add2_i32:
743
    case INDEX_op_sub2_i32:
744
        tcg_out_r(s, args[0]);
745
        tcg_out_r(s, args[1]);
746
        tcg_out_r(s, args[2]);
747
        tcg_out_r(s, args[3]);
748
        tcg_out_r(s, args[4]);
749
        tcg_out_r(s, args[5]);
750
        break;
751
    case INDEX_op_brcond2_i32:
752
        tcg_out_r(s, args[0]);
753
        tcg_out_r(s, args[1]);
754
        tcg_out_ri32(s, const_args[2], args[2]);
755
        tcg_out_ri32(s, const_args[3], args[3]);
756
        tcg_out8(s, args[4]);           /* condition */
757
        tci_out_label(s, args[5]);
758
        break;
759
    case INDEX_op_mulu2_i32:
760
        tcg_out_r(s, args[0]);
761
        tcg_out_r(s, args[1]);
762
        tcg_out_r(s, args[2]);
763
        tcg_out_r(s, args[3]);
764
        break;
765
#endif
766
    case INDEX_op_brcond_i32:
767
        tcg_out_r(s, args[0]);
768
        tcg_out_ri32(s, const_args[1], args[1]);
769
        tcg_out8(s, args[2]);           /* condition */
770
        tci_out_label(s, args[3]);
771
        break;
772
    case INDEX_op_qemu_ld8u:
773
    case INDEX_op_qemu_ld8s:
774
    case INDEX_op_qemu_ld16u:
775
    case INDEX_op_qemu_ld16s:
776
    case INDEX_op_qemu_ld32:
777
#if TCG_TARGET_REG_BITS == 64
778
    case INDEX_op_qemu_ld32s:
779
    case INDEX_op_qemu_ld32u:
780
#endif
781
        tcg_out_r(s, *args++);
782
        tcg_out_r(s, *args++);
783
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
784
        tcg_out_r(s, *args++);
785
#endif
786
#ifdef CONFIG_SOFTMMU
787
        tcg_out_i(s, *args);
788
#endif
789
        break;
790
    case INDEX_op_qemu_ld64:
791
        tcg_out_r(s, *args++);
792
#if TCG_TARGET_REG_BITS == 32
793
        tcg_out_r(s, *args++);
794
#endif
795
        tcg_out_r(s, *args++);
796
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
797
        tcg_out_r(s, *args++);
798
#endif
799
#ifdef CONFIG_SOFTMMU
800
        tcg_out_i(s, *args);
801
#endif
802
        break;
803
    case INDEX_op_qemu_st8:
804
    case INDEX_op_qemu_st16:
805
    case INDEX_op_qemu_st32:
806
        tcg_out_r(s, *args++);
807
        tcg_out_r(s, *args++);
808
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
809
        tcg_out_r(s, *args++);
810
#endif
811
#ifdef CONFIG_SOFTMMU
812
        tcg_out_i(s, *args);
813
#endif
814
        break;
815
    case INDEX_op_qemu_st64:
816
        tcg_out_r(s, *args++);
817
#if TCG_TARGET_REG_BITS == 32
818
        tcg_out_r(s, *args++);
819
#endif
820
        tcg_out_r(s, *args++);
821
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
822
        tcg_out_r(s, *args++);
823
#endif
824
#ifdef CONFIG_SOFTMMU
825
        tcg_out_i(s, *args);
826
#endif
827
        break;
828
    case INDEX_op_end:
829
        TODO();
830
        break;
831
    default:
832
        fprintf(stderr, "Missing: %s\n", tcg_op_defs[opc].name);
833
        tcg_abort();
834
    }
835
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
836
}
837

    
838
static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
839
                       intptr_t arg2)
840
{
841
    uint8_t *old_code_ptr = s->code_ptr;
842
    if (type == TCG_TYPE_I32) {
843
        tcg_out_op_t(s, INDEX_op_st_i32);
844
        tcg_out_r(s, arg);
845
        tcg_out_r(s, arg1);
846
        tcg_out32(s, arg2);
847
    } else {
848
        assert(type == TCG_TYPE_I64);
849
#if TCG_TARGET_REG_BITS == 64
850
        tcg_out_op_t(s, INDEX_op_st_i64);
851
        tcg_out_r(s, arg);
852
        tcg_out_r(s, arg1);
853
        tcg_out32(s, arg2);
854
#else
855
        TODO();
856
#endif
857
    }
858
    old_code_ptr[1] = s->code_ptr - old_code_ptr;
859
}
860

    
861
/* Test if a constant matches the constraint. */
862
static int tcg_target_const_match(tcg_target_long val,
863
                                  const TCGArgConstraint *arg_ct)
864
{
865
    /* No need to return 0 or 1, 0 or != 0 is good enough. */
866
    return arg_ct->ct & TCG_CT_CONST;
867
}
868

    
869
static void tcg_target_init(TCGContext *s)
870
{
871
#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
872
    const char *envval = getenv("DEBUG_TCG");
873
    if (envval) {
874
        qemu_set_log(strtol(envval, NULL, 0));
875
    }
876
#endif
877

    
878
    /* The current code uses uint8_t for tcg operations. */
879
    assert(ARRAY_SIZE(tcg_op_defs) <= UINT8_MAX);
880

    
881
    /* Registers available for 32 bit operations. */
882
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0,
883
                     BIT(TCG_TARGET_NB_REGS) - 1);
884
    /* Registers available for 64 bit operations. */
885
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0,
886
                     BIT(TCG_TARGET_NB_REGS) - 1);
887
    /* TODO: Which registers should be set here? */
888
    tcg_regset_set32(tcg_target_call_clobber_regs, 0,
889
                     BIT(TCG_TARGET_NB_REGS) - 1);
890

    
891
    tcg_regset_clear(s->reserved_regs);
892
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
893
    tcg_add_target_add_op_defs(tcg_target_op_defs);
894

    
895
    /* We use negative offsets from "sp" so that we can distinguish
896
       stores that might pretend to be call arguments.  */
897
    tcg_set_frame(s, TCG_REG_CALL_STACK,
898
                  -CPU_TEMP_BUF_NLONGS * sizeof(long),
899
                  CPU_TEMP_BUF_NLONGS * sizeof(long));
900
}
901

    
902
/* Generate global QEMU prologue and epilogue code. */
903
static inline void tcg_target_qemu_prologue(TCGContext *s)
904
{
905
}