Statistics
| Branch: | Revision:

root / tci.c @ 771142c2

History | View | Annotate | Download (34.3 kB)

1
/*
2
 * Tiny Code Interpreter for QEMU
3
 *
4
 * Copyright (c) 2009, 2011 Stefan Weil
5
 *
6
 * This program is free software: you can redistribute it and/or modify
7
 * it under the terms of the GNU General Public License as published by
8
 * the Free Software Foundation, either version 2 of the License, or
9
 * (at your option) any later version.
10
 *
11
 * This program is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
 * GNU General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU General Public License
17
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18
 */
19

    
20
#include "config.h"
21

    
22
/* Defining NDEBUG disables assertions (which makes the code faster). */
23
#if !defined(CONFIG_TCG_DEBUG) && !defined(NDEBUG)
24
# define NDEBUG
25
#endif
26

    
27
#include "qemu-common.h"
28
#include "dyngen-exec.h"        /* env */
29
#include "exec-all.h"           /* MAX_OPC_PARAM_IARGS */
30
#include "tcg-op.h"
31

    
32
/* Marker for missing code. */
33
#define TODO() \
34
    do { \
35
        fprintf(stderr, "TODO %s:%u: %s()\n", \
36
                __FILE__, __LINE__, __func__); \
37
        tcg_abort(); \
38
    } while (0)
39

    
40
#if MAX_OPC_PARAM_IARGS != 4
41
# error Fix needed, number of supported input arguments changed!
42
#endif
43
#if TCG_TARGET_REG_BITS == 32
44
typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
45
                                    tcg_target_ulong, tcg_target_ulong,
46
                                    tcg_target_ulong, tcg_target_ulong,
47
                                    tcg_target_ulong, tcg_target_ulong);
48
#else
49
typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
50
                                    tcg_target_ulong, tcg_target_ulong);
51
#endif
52

    
53
/* TCI can optionally use a global register variable for env. */
54
#if !defined(AREG0)
55
CPUState *env;
56
#endif
57

    
58
/* Targets which don't use GETPC also don't need tci_tb_ptr
59
   which makes them a little faster. */
60
#if defined(GETPC)
61
void *tci_tb_ptr;
62
#endif
63

    
64
static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
65

    
66
static tcg_target_ulong tci_read_reg(TCGReg index)
67
{
68
    assert(index < ARRAY_SIZE(tci_reg));
69
    return tci_reg[index];
70
}
71

    
72
#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
73
static int8_t tci_read_reg8s(TCGReg index)
74
{
75
    return (int8_t)tci_read_reg(index);
76
}
77
#endif
78

    
79
#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
80
static int16_t tci_read_reg16s(TCGReg index)
81
{
82
    return (int16_t)tci_read_reg(index);
83
}
84
#endif
85

    
86
#if TCG_TARGET_REG_BITS == 64
87
static int32_t tci_read_reg32s(TCGReg index)
88
{
89
    return (int32_t)tci_read_reg(index);
90
}
91
#endif
92

    
93
static uint8_t tci_read_reg8(TCGReg index)
94
{
95
    return (uint8_t)tci_read_reg(index);
96
}
97

    
98
static uint16_t tci_read_reg16(TCGReg index)
99
{
100
    return (uint16_t)tci_read_reg(index);
101
}
102

    
103
static uint32_t tci_read_reg32(TCGReg index)
104
{
105
    return (uint32_t)tci_read_reg(index);
106
}
107

    
108
#if TCG_TARGET_REG_BITS == 64
109
static uint64_t tci_read_reg64(TCGReg index)
110
{
111
    return tci_read_reg(index);
112
}
113
#endif
114

    
115
static void tci_write_reg(TCGReg index, tcg_target_ulong value)
116
{
117
    assert(index < ARRAY_SIZE(tci_reg));
118
    assert(index != TCG_AREG0);
119
    tci_reg[index] = value;
120
}
121

    
122
static void tci_write_reg8s(TCGReg index, int8_t value)
123
{
124
    tci_write_reg(index, value);
125
}
126

    
127
static void tci_write_reg16s(TCGReg index, int16_t value)
128
{
129
    tci_write_reg(index, value);
130
}
131

    
132
#if TCG_TARGET_REG_BITS == 64
133
static void tci_write_reg32s(TCGReg index, int32_t value)
134
{
135
    tci_write_reg(index, value);
136
}
137
#endif
138

    
139
static void tci_write_reg8(TCGReg index, uint8_t value)
140
{
141
    tci_write_reg(index, value);
142
}
143

    
144
static void tci_write_reg16(TCGReg index, uint16_t value)
145
{
146
    tci_write_reg(index, value);
147
}
148

    
149
static void tci_write_reg32(TCGReg index, uint32_t value)
150
{
151
    tci_write_reg(index, value);
152
}
153

    
154
#if TCG_TARGET_REG_BITS == 32
155
static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
156
                            uint64_t value)
157
{
158
    tci_write_reg(low_index, value);
159
    tci_write_reg(high_index, value >> 32);
160
}
161
#elif TCG_TARGET_REG_BITS == 64
162
static void tci_write_reg64(TCGReg index, uint64_t value)
163
{
164
    tci_write_reg(index, value);
165
}
166
#endif
167

    
168
#if TCG_TARGET_REG_BITS == 32
169
/* Create a 64 bit value from two 32 bit values. */
170
static uint64_t tci_uint64(uint32_t high, uint32_t low)
171
{
172
    return ((uint64_t)high << 32) + low;
173
}
174
#endif
175

    
176
/* Read constant (native size) from bytecode. */
177
static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
178
{
179
    tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
180
    *tb_ptr += sizeof(value);
181
    return value;
182
}
183

    
184
/* Read constant (32 bit) from bytecode. */
185
static uint32_t tci_read_i32(uint8_t **tb_ptr)
186
{
187
    uint32_t value = *(uint32_t *)(*tb_ptr);
188
    *tb_ptr += sizeof(value);
189
    return value;
190
}
191

    
192
#if TCG_TARGET_REG_BITS == 64
193
/* Read constant (64 bit) from bytecode. */
194
static uint64_t tci_read_i64(uint8_t **tb_ptr)
195
{
196
    uint64_t value = *(uint64_t *)(*tb_ptr);
197
    *tb_ptr += sizeof(value);
198
    return value;
199
}
200
#endif
201

    
202
/* Read indexed register (native size) from bytecode. */
203
static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
204
{
205
    tcg_target_ulong value = tci_read_reg(**tb_ptr);
206
    *tb_ptr += 1;
207
    return value;
208
}
209

    
210
/* Read indexed register (8 bit) from bytecode. */
211
static uint8_t tci_read_r8(uint8_t **tb_ptr)
212
{
213
    uint8_t value = tci_read_reg8(**tb_ptr);
214
    *tb_ptr += 1;
215
    return value;
216
}
217

    
218
#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
219
/* Read indexed register (8 bit signed) from bytecode. */
220
static int8_t tci_read_r8s(uint8_t **tb_ptr)
221
{
222
    int8_t value = tci_read_reg8s(**tb_ptr);
223
    *tb_ptr += 1;
224
    return value;
225
}
226
#endif
227

    
228
/* Read indexed register (16 bit) from bytecode. */
229
static uint16_t tci_read_r16(uint8_t **tb_ptr)
230
{
231
    uint16_t value = tci_read_reg16(**tb_ptr);
232
    *tb_ptr += 1;
233
    return value;
234
}
235

    
236
#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
237
/* Read indexed register (16 bit signed) from bytecode. */
238
static int16_t tci_read_r16s(uint8_t **tb_ptr)
239
{
240
    int16_t value = tci_read_reg16s(**tb_ptr);
241
    *tb_ptr += 1;
242
    return value;
243
}
244
#endif
245

    
246
/* Read indexed register (32 bit) from bytecode. */
247
static uint32_t tci_read_r32(uint8_t **tb_ptr)
248
{
249
    uint32_t value = tci_read_reg32(**tb_ptr);
250
    *tb_ptr += 1;
251
    return value;
252
}
253

    
254
#if TCG_TARGET_REG_BITS == 32
255
/* Read two indexed registers (2 * 32 bit) from bytecode. */
256
static uint64_t tci_read_r64(uint8_t **tb_ptr)
257
{
258
    uint32_t low = tci_read_r32(tb_ptr);
259
    return tci_uint64(tci_read_r32(tb_ptr), low);
260
}
261
#elif TCG_TARGET_REG_BITS == 64
262
/* Read indexed register (32 bit signed) from bytecode. */
263
static int32_t tci_read_r32s(uint8_t **tb_ptr)
264
{
265
    int32_t value = tci_read_reg32s(**tb_ptr);
266
    *tb_ptr += 1;
267
    return value;
268
}
269

    
270
/* Read indexed register (64 bit) from bytecode. */
271
static uint64_t tci_read_r64(uint8_t **tb_ptr)
272
{
273
    uint64_t value = tci_read_reg64(**tb_ptr);
274
    *tb_ptr += 1;
275
    return value;
276
}
277
#endif
278

    
279
/* Read indexed register(s) with target address from bytecode. */
280
static target_ulong tci_read_ulong(uint8_t **tb_ptr)
281
{
282
    target_ulong taddr = tci_read_r(tb_ptr);
283
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
284
    taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
285
#endif
286
    return taddr;
287
}
288

    
289
/* Read indexed register or constant (native size) from bytecode. */
290
static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
291
{
292
    tcg_target_ulong value;
293
    TCGReg r = **tb_ptr;
294
    *tb_ptr += 1;
295
    if (r == TCG_CONST) {
296
        value = tci_read_i(tb_ptr);
297
    } else {
298
        value = tci_read_reg(r);
299
    }
300
    return value;
301
}
302

    
303
/* Read indexed register or constant (32 bit) from bytecode. */
304
static uint32_t tci_read_ri32(uint8_t **tb_ptr)
305
{
306
    uint32_t value;
307
    TCGReg r = **tb_ptr;
308
    *tb_ptr += 1;
309
    if (r == TCG_CONST) {
310
        value = tci_read_i32(tb_ptr);
311
    } else {
312
        value = tci_read_reg32(r);
313
    }
314
    return value;
315
}
316

    
317
#if TCG_TARGET_REG_BITS == 32
318
/* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
319
static uint64_t tci_read_ri64(uint8_t **tb_ptr)
320
{
321
    uint32_t low = tci_read_ri32(tb_ptr);
322
    return tci_uint64(tci_read_ri32(tb_ptr), low);
323
}
324
#elif TCG_TARGET_REG_BITS == 64
325
/* Read indexed register or constant (64 bit) from bytecode. */
326
static uint64_t tci_read_ri64(uint8_t **tb_ptr)
327
{
328
    uint64_t value;
329
    TCGReg r = **tb_ptr;
330
    *tb_ptr += 1;
331
    if (r == TCG_CONST) {
332
        value = tci_read_i64(tb_ptr);
333
    } else {
334
        value = tci_read_reg64(r);
335
    }
336
    return value;
337
}
338
#endif
339

    
340
static target_ulong tci_read_label(uint8_t **tb_ptr)
341
{
342
    target_ulong label = tci_read_i(tb_ptr);
343
    assert(label != 0);
344
    return label;
345
}
346

    
347
static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
348
{
349
    bool result = false;
350
    int32_t i0 = u0;
351
    int32_t i1 = u1;
352
    switch (condition) {
353
    case TCG_COND_EQ:
354
        result = (u0 == u1);
355
        break;
356
    case TCG_COND_NE:
357
        result = (u0 != u1);
358
        break;
359
    case TCG_COND_LT:
360
        result = (i0 < i1);
361
        break;
362
    case TCG_COND_GE:
363
        result = (i0 >= i1);
364
        break;
365
    case TCG_COND_LE:
366
        result = (i0 <= i1);
367
        break;
368
    case TCG_COND_GT:
369
        result = (i0 > i1);
370
        break;
371
    case TCG_COND_LTU:
372
        result = (u0 < u1);
373
        break;
374
    case TCG_COND_GEU:
375
        result = (u0 >= u1);
376
        break;
377
    case TCG_COND_LEU:
378
        result = (u0 <= u1);
379
        break;
380
    case TCG_COND_GTU:
381
        result = (u0 > u1);
382
        break;
383
    default:
384
        TODO();
385
    }
386
    return result;
387
}
388

    
389
static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
390
{
391
    bool result = false;
392
    int64_t i0 = u0;
393
    int64_t i1 = u1;
394
    switch (condition) {
395
    case TCG_COND_EQ:
396
        result = (u0 == u1);
397
        break;
398
    case TCG_COND_NE:
399
        result = (u0 != u1);
400
        break;
401
    case TCG_COND_LT:
402
        result = (i0 < i1);
403
        break;
404
    case TCG_COND_GE:
405
        result = (i0 >= i1);
406
        break;
407
    case TCG_COND_LE:
408
        result = (i0 <= i1);
409
        break;
410
    case TCG_COND_GT:
411
        result = (i0 > i1);
412
        break;
413
    case TCG_COND_LTU:
414
        result = (u0 < u1);
415
        break;
416
    case TCG_COND_GEU:
417
        result = (u0 >= u1);
418
        break;
419
    case TCG_COND_LEU:
420
        result = (u0 <= u1);
421
        break;
422
    case TCG_COND_GTU:
423
        result = (u0 > u1);
424
        break;
425
    default:
426
        TODO();
427
    }
428
    return result;
429
}
430

    
431
/* Interpret pseudo code in tb. */
432
unsigned long tcg_qemu_tb_exec(CPUState *cpustate, uint8_t *tb_ptr)
433
{
434
    unsigned long next_tb = 0;
435

    
436
    env = cpustate;
437
    tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
438
    assert(tb_ptr);
439

    
440
    for (;;) {
441
#if defined(GETPC)
442
        tci_tb_ptr = tb_ptr;
443
#endif
444
        TCGOpcode opc = tb_ptr[0];
445
#if !defined(NDEBUG)
446
        uint8_t op_size = tb_ptr[1];
447
        uint8_t *old_code_ptr = tb_ptr;
448
#endif
449
        tcg_target_ulong t0;
450
        tcg_target_ulong t1;
451
        tcg_target_ulong t2;
452
        tcg_target_ulong label;
453
        TCGCond condition;
454
        target_ulong taddr;
455
#ifndef CONFIG_SOFTMMU
456
        tcg_target_ulong host_addr;
457
#endif
458
        uint8_t tmp8;
459
        uint16_t tmp16;
460
        uint32_t tmp32;
461
        uint64_t tmp64;
462
#if TCG_TARGET_REG_BITS == 32
463
        uint64_t v64;
464
#endif
465

    
466
        /* Skip opcode and size entry. */
467
        tb_ptr += 2;
468

    
469
        switch (opc) {
470
        case INDEX_op_end:
471
        case INDEX_op_nop:
472
            break;
473
        case INDEX_op_nop1:
474
        case INDEX_op_nop2:
475
        case INDEX_op_nop3:
476
        case INDEX_op_nopn:
477
        case INDEX_op_discard:
478
            TODO();
479
            break;
480
        case INDEX_op_set_label:
481
            TODO();
482
            break;
483
        case INDEX_op_call:
484
            t0 = tci_read_ri(&tb_ptr);
485
#if TCG_TARGET_REG_BITS == 32
486
            tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
487
                                          tci_read_reg(TCG_REG_R1),
488
                                          tci_read_reg(TCG_REG_R2),
489
                                          tci_read_reg(TCG_REG_R3),
490
                                          tci_read_reg(TCG_REG_R5),
491
                                          tci_read_reg(TCG_REG_R6),
492
                                          tci_read_reg(TCG_REG_R7),
493
                                          tci_read_reg(TCG_REG_R8));
494
            tci_write_reg(TCG_REG_R0, tmp64);
495
            tci_write_reg(TCG_REG_R1, tmp64 >> 32);
496
#else
497
            tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
498
                                          tci_read_reg(TCG_REG_R1),
499
                                          tci_read_reg(TCG_REG_R2),
500
                                          tci_read_reg(TCG_REG_R3));
501
            tci_write_reg(TCG_REG_R0, tmp64);
502
#endif
503
            break;
504
        case INDEX_op_jmp:
505
        case INDEX_op_br:
506
            label = tci_read_label(&tb_ptr);
507
            assert(tb_ptr == old_code_ptr + op_size);
508
            tb_ptr = (uint8_t *)label;
509
            continue;
510
        case INDEX_op_setcond_i32:
511
            t0 = *tb_ptr++;
512
            t1 = tci_read_r32(&tb_ptr);
513
            t2 = tci_read_ri32(&tb_ptr);
514
            condition = *tb_ptr++;
515
            tci_write_reg32(t0, tci_compare32(t1, t2, condition));
516
            break;
517
#if TCG_TARGET_REG_BITS == 32
518
        case INDEX_op_setcond2_i32:
519
            t0 = *tb_ptr++;
520
            tmp64 = tci_read_r64(&tb_ptr);
521
            v64 = tci_read_ri64(&tb_ptr);
522
            condition = *tb_ptr++;
523
            tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
524
            break;
525
#elif TCG_TARGET_REG_BITS == 64
526
        case INDEX_op_setcond_i64:
527
            t0 = *tb_ptr++;
528
            t1 = tci_read_r64(&tb_ptr);
529
            t2 = tci_read_ri64(&tb_ptr);
530
            condition = *tb_ptr++;
531
            tci_write_reg64(t0, tci_compare64(t1, t2, condition));
532
            break;
533
#endif
534
        case INDEX_op_mov_i32:
535
            t0 = *tb_ptr++;
536
            t1 = tci_read_r32(&tb_ptr);
537
            tci_write_reg32(t0, t1);
538
            break;
539
        case INDEX_op_movi_i32:
540
            t0 = *tb_ptr++;
541
            t1 = tci_read_i32(&tb_ptr);
542
            tci_write_reg32(t0, t1);
543
            break;
544

    
545
            /* Load/store operations (32 bit). */
546

    
547
        case INDEX_op_ld8u_i32:
548
            t0 = *tb_ptr++;
549
            t1 = tci_read_r(&tb_ptr);
550
            t2 = tci_read_i32(&tb_ptr);
551
            tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
552
            break;
553
        case INDEX_op_ld8s_i32:
554
        case INDEX_op_ld16u_i32:
555
            TODO();
556
            break;
557
        case INDEX_op_ld16s_i32:
558
            TODO();
559
            break;
560
        case INDEX_op_ld_i32:
561
            t0 = *tb_ptr++;
562
            t1 = tci_read_r(&tb_ptr);
563
            t2 = tci_read_i32(&tb_ptr);
564
            tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
565
            break;
566
        case INDEX_op_st8_i32:
567
            t0 = tci_read_r8(&tb_ptr);
568
            t1 = tci_read_r(&tb_ptr);
569
            t2 = tci_read_i32(&tb_ptr);
570
            *(uint8_t *)(t1 + t2) = t0;
571
            break;
572
        case INDEX_op_st16_i32:
573
            t0 = tci_read_r16(&tb_ptr);
574
            t1 = tci_read_r(&tb_ptr);
575
            t2 = tci_read_i32(&tb_ptr);
576
            *(uint16_t *)(t1 + t2) = t0;
577
            break;
578
        case INDEX_op_st_i32:
579
            t0 = tci_read_r32(&tb_ptr);
580
            t1 = tci_read_r(&tb_ptr);
581
            t2 = tci_read_i32(&tb_ptr);
582
            *(uint32_t *)(t1 + t2) = t0;
583
            break;
584

    
585
            /* Arithmetic operations (32 bit). */
586

    
587
        case INDEX_op_add_i32:
588
            t0 = *tb_ptr++;
589
            t1 = tci_read_ri32(&tb_ptr);
590
            t2 = tci_read_ri32(&tb_ptr);
591
            tci_write_reg32(t0, t1 + t2);
592
            break;
593
        case INDEX_op_sub_i32:
594
            t0 = *tb_ptr++;
595
            t1 = tci_read_ri32(&tb_ptr);
596
            t2 = tci_read_ri32(&tb_ptr);
597
            tci_write_reg32(t0, t1 - t2);
598
            break;
599
        case INDEX_op_mul_i32:
600
            t0 = *tb_ptr++;
601
            t1 = tci_read_ri32(&tb_ptr);
602
            t2 = tci_read_ri32(&tb_ptr);
603
            tci_write_reg32(t0, t1 * t2);
604
            break;
605
#if TCG_TARGET_HAS_div_i32
606
        case INDEX_op_div_i32:
607
            t0 = *tb_ptr++;
608
            t1 = tci_read_ri32(&tb_ptr);
609
            t2 = tci_read_ri32(&tb_ptr);
610
            tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
611
            break;
612
        case INDEX_op_divu_i32:
613
            t0 = *tb_ptr++;
614
            t1 = tci_read_ri32(&tb_ptr);
615
            t2 = tci_read_ri32(&tb_ptr);
616
            tci_write_reg32(t0, t1 / t2);
617
            break;
618
        case INDEX_op_rem_i32:
619
            t0 = *tb_ptr++;
620
            t1 = tci_read_ri32(&tb_ptr);
621
            t2 = tci_read_ri32(&tb_ptr);
622
            tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
623
            break;
624
        case INDEX_op_remu_i32:
625
            t0 = *tb_ptr++;
626
            t1 = tci_read_ri32(&tb_ptr);
627
            t2 = tci_read_ri32(&tb_ptr);
628
            tci_write_reg32(t0, t1 % t2);
629
            break;
630
#elif TCG_TARGET_HAS_div2_i32
631
        case INDEX_op_div2_i32:
632
        case INDEX_op_divu2_i32:
633
            TODO();
634
            break;
635
#endif
636
        case INDEX_op_and_i32:
637
            t0 = *tb_ptr++;
638
            t1 = tci_read_ri32(&tb_ptr);
639
            t2 = tci_read_ri32(&tb_ptr);
640
            tci_write_reg32(t0, t1 & t2);
641
            break;
642
        case INDEX_op_or_i32:
643
            t0 = *tb_ptr++;
644
            t1 = tci_read_ri32(&tb_ptr);
645
            t2 = tci_read_ri32(&tb_ptr);
646
            tci_write_reg32(t0, t1 | t2);
647
            break;
648
        case INDEX_op_xor_i32:
649
            t0 = *tb_ptr++;
650
            t1 = tci_read_ri32(&tb_ptr);
651
            t2 = tci_read_ri32(&tb_ptr);
652
            tci_write_reg32(t0, t1 ^ t2);
653
            break;
654

    
655
            /* Shift/rotate operations (32 bit). */
656

    
657
        case INDEX_op_shl_i32:
658
            t0 = *tb_ptr++;
659
            t1 = tci_read_ri32(&tb_ptr);
660
            t2 = tci_read_ri32(&tb_ptr);
661
            tci_write_reg32(t0, t1 << t2);
662
            break;
663
        case INDEX_op_shr_i32:
664
            t0 = *tb_ptr++;
665
            t1 = tci_read_ri32(&tb_ptr);
666
            t2 = tci_read_ri32(&tb_ptr);
667
            tci_write_reg32(t0, t1 >> t2);
668
            break;
669
        case INDEX_op_sar_i32:
670
            t0 = *tb_ptr++;
671
            t1 = tci_read_ri32(&tb_ptr);
672
            t2 = tci_read_ri32(&tb_ptr);
673
            tci_write_reg32(t0, ((int32_t)t1 >> t2));
674
            break;
675
#if TCG_TARGET_HAS_rot_i32
676
        case INDEX_op_rotl_i32:
677
            t0 = *tb_ptr++;
678
            t1 = tci_read_ri32(&tb_ptr);
679
            t2 = tci_read_ri32(&tb_ptr);
680
            tci_write_reg32(t0, (t1 << t2) | (t1 >> (32 - t2)));
681
            break;
682
        case INDEX_op_rotr_i32:
683
            t0 = *tb_ptr++;
684
            t1 = tci_read_ri32(&tb_ptr);
685
            t2 = tci_read_ri32(&tb_ptr);
686
            tci_write_reg32(t0, (t1 >> t2) | (t1 << (32 - t2)));
687
            break;
688
#endif
689
        case INDEX_op_brcond_i32:
690
            t0 = tci_read_r32(&tb_ptr);
691
            t1 = tci_read_ri32(&tb_ptr);
692
            condition = *tb_ptr++;
693
            label = tci_read_label(&tb_ptr);
694
            if (tci_compare32(t0, t1, condition)) {
695
                assert(tb_ptr == old_code_ptr + op_size);
696
                tb_ptr = (uint8_t *)label;
697
                continue;
698
            }
699
            break;
700
#if TCG_TARGET_REG_BITS == 32
701
        case INDEX_op_add2_i32:
702
            t0 = *tb_ptr++;
703
            t1 = *tb_ptr++;
704
            tmp64 = tci_read_r64(&tb_ptr);
705
            tmp64 += tci_read_r64(&tb_ptr);
706
            tci_write_reg64(t1, t0, tmp64);
707
            break;
708
        case INDEX_op_sub2_i32:
709
            t0 = *tb_ptr++;
710
            t1 = *tb_ptr++;
711
            tmp64 = tci_read_r64(&tb_ptr);
712
            tmp64 -= tci_read_r64(&tb_ptr);
713
            tci_write_reg64(t1, t0, tmp64);
714
            break;
715
        case INDEX_op_brcond2_i32:
716
            tmp64 = tci_read_r64(&tb_ptr);
717
            v64 = tci_read_ri64(&tb_ptr);
718
            condition = *tb_ptr++;
719
            label = tci_read_label(&tb_ptr);
720
            if (tci_compare64(tmp64, v64, condition)) {
721
                assert(tb_ptr == old_code_ptr + op_size);
722
                tb_ptr = (uint8_t *)label;
723
                continue;
724
            }
725
            break;
726
        case INDEX_op_mulu2_i32:
727
            t0 = *tb_ptr++;
728
            t1 = *tb_ptr++;
729
            t2 = tci_read_r32(&tb_ptr);
730
            tmp64 = tci_read_r32(&tb_ptr);
731
            tci_write_reg64(t1, t0, t2 * tmp64);
732
            break;
733
#endif /* TCG_TARGET_REG_BITS == 32 */
734
#if TCG_TARGET_HAS_ext8s_i32
735
        case INDEX_op_ext8s_i32:
736
            t0 = *tb_ptr++;
737
            t1 = tci_read_r8s(&tb_ptr);
738
            tci_write_reg32(t0, t1);
739
            break;
740
#endif
741
#if TCG_TARGET_HAS_ext16s_i32
742
        case INDEX_op_ext16s_i32:
743
            t0 = *tb_ptr++;
744
            t1 = tci_read_r16s(&tb_ptr);
745
            tci_write_reg32(t0, t1);
746
            break;
747
#endif
748
#if TCG_TARGET_HAS_ext8u_i32
749
        case INDEX_op_ext8u_i32:
750
            t0 = *tb_ptr++;
751
            t1 = tci_read_r8(&tb_ptr);
752
            tci_write_reg32(t0, t1);
753
            break;
754
#endif
755
#if TCG_TARGET_HAS_ext16u_i32
756
        case INDEX_op_ext16u_i32:
757
            t0 = *tb_ptr++;
758
            t1 = tci_read_r16(&tb_ptr);
759
            tci_write_reg32(t0, t1);
760
            break;
761
#endif
762
#if TCG_TARGET_HAS_bswap16_i32
763
        case INDEX_op_bswap16_i32:
764
            t0 = *tb_ptr++;
765
            t1 = tci_read_r16(&tb_ptr);
766
            tci_write_reg32(t0, bswap16(t1));
767
            break;
768
#endif
769
#if TCG_TARGET_HAS_bswap32_i32
770
        case INDEX_op_bswap32_i32:
771
            t0 = *tb_ptr++;
772
            t1 = tci_read_r32(&tb_ptr);
773
            tci_write_reg32(t0, bswap32(t1));
774
            break;
775
#endif
776
#if TCG_TARGET_HAS_not_i32
777
        case INDEX_op_not_i32:
778
            t0 = *tb_ptr++;
779
            t1 = tci_read_r32(&tb_ptr);
780
            tci_write_reg32(t0, ~t1);
781
            break;
782
#endif
783
#if TCG_TARGET_HAS_neg_i32
784
        case INDEX_op_neg_i32:
785
            t0 = *tb_ptr++;
786
            t1 = tci_read_r32(&tb_ptr);
787
            tci_write_reg32(t0, -t1);
788
            break;
789
#endif
790
#if TCG_TARGET_REG_BITS == 64
791
        case INDEX_op_mov_i64:
792
            t0 = *tb_ptr++;
793
            t1 = tci_read_r64(&tb_ptr);
794
            tci_write_reg64(t0, t1);
795
            break;
796
        case INDEX_op_movi_i64:
797
            t0 = *tb_ptr++;
798
            t1 = tci_read_i64(&tb_ptr);
799
            tci_write_reg64(t0, t1);
800
            break;
801

    
802
            /* Load/store operations (64 bit). */
803

    
804
        case INDEX_op_ld8u_i64:
805
            t0 = *tb_ptr++;
806
            t1 = tci_read_r(&tb_ptr);
807
            t2 = tci_read_i32(&tb_ptr);
808
            tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
809
            break;
810
        case INDEX_op_ld8s_i64:
811
        case INDEX_op_ld16u_i64:
812
        case INDEX_op_ld16s_i64:
813
            TODO();
814
            break;
815
        case INDEX_op_ld32u_i64:
816
            t0 = *tb_ptr++;
817
            t1 = tci_read_r(&tb_ptr);
818
            t2 = tci_read_i32(&tb_ptr);
819
            tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
820
            break;
821
        case INDEX_op_ld32s_i64:
822
            t0 = *tb_ptr++;
823
            t1 = tci_read_r(&tb_ptr);
824
            t2 = tci_read_i32(&tb_ptr);
825
            tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
826
            break;
827
        case INDEX_op_ld_i64:
828
            t0 = *tb_ptr++;
829
            t1 = tci_read_r(&tb_ptr);
830
            t2 = tci_read_i32(&tb_ptr);
831
            tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
832
            break;
833
        case INDEX_op_st8_i64:
834
            t0 = tci_read_r8(&tb_ptr);
835
            t1 = tci_read_r(&tb_ptr);
836
            t2 = tci_read_i32(&tb_ptr);
837
            *(uint8_t *)(t1 + t2) = t0;
838
            break;
839
        case INDEX_op_st16_i64:
840
            t0 = tci_read_r16(&tb_ptr);
841
            t1 = tci_read_r(&tb_ptr);
842
            t2 = tci_read_i32(&tb_ptr);
843
            *(uint16_t *)(t1 + t2) = t0;
844
            break;
845
        case INDEX_op_st32_i64:
846
            t0 = tci_read_r32(&tb_ptr);
847
            t1 = tci_read_r(&tb_ptr);
848
            t2 = tci_read_i32(&tb_ptr);
849
            *(uint32_t *)(t1 + t2) = t0;
850
            break;
851
        case INDEX_op_st_i64:
852
            t0 = tci_read_r64(&tb_ptr);
853
            t1 = tci_read_r(&tb_ptr);
854
            t2 = tci_read_i32(&tb_ptr);
855
            *(uint64_t *)(t1 + t2) = t0;
856
            break;
857

    
858
            /* Arithmetic operations (64 bit). */
859

    
860
        case INDEX_op_add_i64:
861
            t0 = *tb_ptr++;
862
            t1 = tci_read_ri64(&tb_ptr);
863
            t2 = tci_read_ri64(&tb_ptr);
864
            tci_write_reg64(t0, t1 + t2);
865
            break;
866
        case INDEX_op_sub_i64:
867
            t0 = *tb_ptr++;
868
            t1 = tci_read_ri64(&tb_ptr);
869
            t2 = tci_read_ri64(&tb_ptr);
870
            tci_write_reg64(t0, t1 - t2);
871
            break;
872
        case INDEX_op_mul_i64:
873
            t0 = *tb_ptr++;
874
            t1 = tci_read_ri64(&tb_ptr);
875
            t2 = tci_read_ri64(&tb_ptr);
876
            tci_write_reg64(t0, t1 * t2);
877
            break;
878
#if TCG_TARGET_HAS_div_i64
879
        case INDEX_op_div_i64:
880
        case INDEX_op_divu_i64:
881
        case INDEX_op_rem_i64:
882
        case INDEX_op_remu_i64:
883
            TODO();
884
            break;
885
#elif TCG_TARGET_HAS_div2_i64
886
        case INDEX_op_div2_i64:
887
        case INDEX_op_divu2_i64:
888
            TODO();
889
            break;
890
#endif
891
        case INDEX_op_and_i64:
892
            t0 = *tb_ptr++;
893
            t1 = tci_read_ri64(&tb_ptr);
894
            t2 = tci_read_ri64(&tb_ptr);
895
            tci_write_reg64(t0, t1 & t2);
896
            break;
897
        case INDEX_op_or_i64:
898
            t0 = *tb_ptr++;
899
            t1 = tci_read_ri64(&tb_ptr);
900
            t2 = tci_read_ri64(&tb_ptr);
901
            tci_write_reg64(t0, t1 | t2);
902
            break;
903
        case INDEX_op_xor_i64:
904
            t0 = *tb_ptr++;
905
            t1 = tci_read_ri64(&tb_ptr);
906
            t2 = tci_read_ri64(&tb_ptr);
907
            tci_write_reg64(t0, t1 ^ t2);
908
            break;
909

    
910
            /* Shift/rotate operations (64 bit). */
911

    
912
        case INDEX_op_shl_i64:
913
            t0 = *tb_ptr++;
914
            t1 = tci_read_ri64(&tb_ptr);
915
            t2 = tci_read_ri64(&tb_ptr);
916
            tci_write_reg64(t0, t1 << t2);
917
            break;
918
        case INDEX_op_shr_i64:
919
            t0 = *tb_ptr++;
920
            t1 = tci_read_ri64(&tb_ptr);
921
            t2 = tci_read_ri64(&tb_ptr);
922
            tci_write_reg64(t0, t1 >> t2);
923
            break;
924
        case INDEX_op_sar_i64:
925
            t0 = *tb_ptr++;
926
            t1 = tci_read_ri64(&tb_ptr);
927
            t2 = tci_read_ri64(&tb_ptr);
928
            tci_write_reg64(t0, ((int64_t)t1 >> t2));
929
            break;
930
#if TCG_TARGET_HAS_rot_i64
931
        case INDEX_op_rotl_i64:
932
        case INDEX_op_rotr_i64:
933
            TODO();
934
            break;
935
#endif
936
        case INDEX_op_brcond_i64:
937
            t0 = tci_read_r64(&tb_ptr);
938
            t1 = tci_read_ri64(&tb_ptr);
939
            condition = *tb_ptr++;
940
            label = tci_read_label(&tb_ptr);
941
            if (tci_compare64(t0, t1, condition)) {
942
                assert(tb_ptr == old_code_ptr + op_size);
943
                tb_ptr = (uint8_t *)label;
944
                continue;
945
            }
946
            break;
947
#if TCG_TARGET_HAS_ext8u_i64
948
        case INDEX_op_ext8u_i64:
949
            t0 = *tb_ptr++;
950
            t1 = tci_read_r8(&tb_ptr);
951
            tci_write_reg64(t0, t1);
952
            break;
953
#endif
954
#if TCG_TARGET_HAS_ext8s_i64
955
        case INDEX_op_ext8s_i64:
956
            t0 = *tb_ptr++;
957
            t1 = tci_read_r8s(&tb_ptr);
958
            tci_write_reg64(t0, t1);
959
            break;
960
#endif
961
#if TCG_TARGET_HAS_ext16s_i64
962
        case INDEX_op_ext16s_i64:
963
            t0 = *tb_ptr++;
964
            t1 = tci_read_r16s(&tb_ptr);
965
            tci_write_reg64(t0, t1);
966
            break;
967
#endif
968
#if TCG_TARGET_HAS_ext16u_i64
969
        case INDEX_op_ext16u_i64:
970
            t0 = *tb_ptr++;
971
            t1 = tci_read_r16(&tb_ptr);
972
            tci_write_reg64(t0, t1);
973
            break;
974
#endif
975
#if TCG_TARGET_HAS_ext32s_i64
976
        case INDEX_op_ext32s_i64:
977
            t0 = *tb_ptr++;
978
            t1 = tci_read_r32s(&tb_ptr);
979
            tci_write_reg64(t0, t1);
980
            break;
981
#endif
982
#if TCG_TARGET_HAS_ext32u_i64
983
        case INDEX_op_ext32u_i64:
984
            t0 = *tb_ptr++;
985
            t1 = tci_read_r32(&tb_ptr);
986
            tci_write_reg64(t0, t1);
987
            break;
988
#endif
989
#if TCG_TARGET_HAS_bswap16_i64
990
        case INDEX_op_bswap16_i64:
991
            TODO();
992
            t0 = *tb_ptr++;
993
            t1 = tci_read_r16(&tb_ptr);
994
            tci_write_reg64(t0, bswap16(t1));
995
            break;
996
#endif
997
#if TCG_TARGET_HAS_bswap32_i64
998
        case INDEX_op_bswap32_i64:
999
            t0 = *tb_ptr++;
1000
            t1 = tci_read_r32(&tb_ptr);
1001
            tci_write_reg64(t0, bswap32(t1));
1002
            break;
1003
#endif
1004
#if TCG_TARGET_HAS_bswap64_i64
1005
        case INDEX_op_bswap64_i64:
1006
            TODO();
1007
            t0 = *tb_ptr++;
1008
            t1 = tci_read_r64(&tb_ptr);
1009
            tci_write_reg64(t0, bswap64(t1));
1010
            break;
1011
#endif
1012
#if TCG_TARGET_HAS_not_i64
1013
        case INDEX_op_not_i64:
1014
            t0 = *tb_ptr++;
1015
            t1 = tci_read_r64(&tb_ptr);
1016
            tci_write_reg64(t0, ~t1);
1017
            break;
1018
#endif
1019
#if TCG_TARGET_HAS_neg_i64
1020
        case INDEX_op_neg_i64:
1021
            t0 = *tb_ptr++;
1022
            t1 = tci_read_r64(&tb_ptr);
1023
            tci_write_reg64(t0, -t1);
1024
            break;
1025
#endif
1026
#endif /* TCG_TARGET_REG_BITS == 64 */
1027

    
1028
            /* QEMU specific operations. */
1029

    
1030
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
1031
        case INDEX_op_debug_insn_start:
1032
            TODO();
1033
            break;
1034
#else
1035
        case INDEX_op_debug_insn_start:
1036
            TODO();
1037
            break;
1038
#endif
1039
        case INDEX_op_exit_tb:
1040
            next_tb = *(uint64_t *)tb_ptr;
1041
            goto exit;
1042
            break;
1043
        case INDEX_op_goto_tb:
1044
            t0 = tci_read_i32(&tb_ptr);
1045
            assert(tb_ptr == old_code_ptr + op_size);
1046
            tb_ptr += (int32_t)t0;
1047
            continue;
1048
        case INDEX_op_qemu_ld8u:
1049
            t0 = *tb_ptr++;
1050
            taddr = tci_read_ulong(&tb_ptr);
1051
#ifdef CONFIG_SOFTMMU
1052
            tmp8 = __ldb_mmu(taddr, tci_read_i(&tb_ptr));
1053
#else
1054
            host_addr = (tcg_target_ulong)taddr;
1055
            assert(taddr == host_addr);
1056
            tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1057
#endif
1058
            tci_write_reg8(t0, tmp8);
1059
            break;
1060
        case INDEX_op_qemu_ld8s:
1061
            t0 = *tb_ptr++;
1062
            taddr = tci_read_ulong(&tb_ptr);
1063
#ifdef CONFIG_SOFTMMU
1064
            tmp8 = __ldb_mmu(taddr, tci_read_i(&tb_ptr));
1065
#else
1066
            host_addr = (tcg_target_ulong)taddr;
1067
            assert(taddr == host_addr);
1068
            tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1069
#endif
1070
            tci_write_reg8s(t0, tmp8);
1071
            break;
1072
        case INDEX_op_qemu_ld16u:
1073
            t0 = *tb_ptr++;
1074
            taddr = tci_read_ulong(&tb_ptr);
1075
#ifdef CONFIG_SOFTMMU
1076
            tmp16 = __ldw_mmu(taddr, tci_read_i(&tb_ptr));
1077
#else
1078
            host_addr = (tcg_target_ulong)taddr;
1079
            assert(taddr == host_addr);
1080
            tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1081
#endif
1082
            tci_write_reg16(t0, tmp16);
1083
            break;
1084
        case INDEX_op_qemu_ld16s:
1085
            t0 = *tb_ptr++;
1086
            taddr = tci_read_ulong(&tb_ptr);
1087
#ifdef CONFIG_SOFTMMU
1088
            tmp16 = __ldw_mmu(taddr, tci_read_i(&tb_ptr));
1089
#else
1090
            host_addr = (tcg_target_ulong)taddr;
1091
            assert(taddr == host_addr);
1092
            tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1093
#endif
1094
            tci_write_reg16s(t0, tmp16);
1095
            break;
1096
#if TCG_TARGET_REG_BITS == 64
1097
        case INDEX_op_qemu_ld32u:
1098
            t0 = *tb_ptr++;
1099
            taddr = tci_read_ulong(&tb_ptr);
1100
#ifdef CONFIG_SOFTMMU
1101
            tmp32 = __ldl_mmu(taddr, tci_read_i(&tb_ptr));
1102
#else
1103
            host_addr = (tcg_target_ulong)taddr;
1104
            assert(taddr == host_addr);
1105
            tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1106
#endif
1107
            tci_write_reg32(t0, tmp32);
1108
            break;
1109
        case INDEX_op_qemu_ld32s:
1110
            t0 = *tb_ptr++;
1111
            taddr = tci_read_ulong(&tb_ptr);
1112
#ifdef CONFIG_SOFTMMU
1113
            tmp32 = __ldl_mmu(taddr, tci_read_i(&tb_ptr));
1114
#else
1115
            host_addr = (tcg_target_ulong)taddr;
1116
            assert(taddr == host_addr);
1117
            tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1118
#endif
1119
            tci_write_reg32s(t0, tmp32);
1120
            break;
1121
#endif /* TCG_TARGET_REG_BITS == 64 */
1122
        case INDEX_op_qemu_ld32:
1123
            t0 = *tb_ptr++;
1124
            taddr = tci_read_ulong(&tb_ptr);
1125
#ifdef CONFIG_SOFTMMU
1126
            tmp32 = __ldl_mmu(taddr, tci_read_i(&tb_ptr));
1127
#else
1128
            host_addr = (tcg_target_ulong)taddr;
1129
            assert(taddr == host_addr);
1130
            tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1131
#endif
1132
            tci_write_reg32(t0, tmp32);
1133
            break;
1134
        case INDEX_op_qemu_ld64:
1135
            t0 = *tb_ptr++;
1136
#if TCG_TARGET_REG_BITS == 32
1137
            t1 = *tb_ptr++;
1138
#endif
1139
            taddr = tci_read_ulong(&tb_ptr);
1140
#ifdef CONFIG_SOFTMMU
1141
            tmp64 = __ldq_mmu(taddr, tci_read_i(&tb_ptr));
1142
#else
1143
            host_addr = (tcg_target_ulong)taddr;
1144
            assert(taddr == host_addr);
1145
            tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE));
1146
#endif
1147
            tci_write_reg(t0, tmp64);
1148
#if TCG_TARGET_REG_BITS == 32
1149
            tci_write_reg(t1, tmp64 >> 32);
1150
#endif
1151
            break;
1152
        case INDEX_op_qemu_st8:
1153
            t0 = tci_read_r8(&tb_ptr);
1154
            taddr = tci_read_ulong(&tb_ptr);
1155
#ifdef CONFIG_SOFTMMU
1156
            t2 = tci_read_i(&tb_ptr);
1157
            __stb_mmu(taddr, t0, t2);
1158
#else
1159
            host_addr = (tcg_target_ulong)taddr;
1160
            assert(taddr == host_addr);
1161
            *(uint8_t *)(host_addr + GUEST_BASE) = t0;
1162
#endif
1163
            break;
1164
        case INDEX_op_qemu_st16:
1165
            t0 = tci_read_r16(&tb_ptr);
1166
            taddr = tci_read_ulong(&tb_ptr);
1167
#ifdef CONFIG_SOFTMMU
1168
            t2 = tci_read_i(&tb_ptr);
1169
            __stw_mmu(taddr, t0, t2);
1170
#else
1171
            host_addr = (tcg_target_ulong)taddr;
1172
            assert(taddr == host_addr);
1173
            *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0);
1174
#endif
1175
            break;
1176
        case INDEX_op_qemu_st32:
1177
            t0 = tci_read_r32(&tb_ptr);
1178
            taddr = tci_read_ulong(&tb_ptr);
1179
#ifdef CONFIG_SOFTMMU
1180
            t2 = tci_read_i(&tb_ptr);
1181
            __stl_mmu(taddr, t0, t2);
1182
#else
1183
            host_addr = (tcg_target_ulong)taddr;
1184
            assert(taddr == host_addr);
1185
            *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0);
1186
#endif
1187
            break;
1188
        case INDEX_op_qemu_st64:
1189
            tmp64 = tci_read_r64(&tb_ptr);
1190
            taddr = tci_read_ulong(&tb_ptr);
1191
#ifdef CONFIG_SOFTMMU
1192
            t2 = tci_read_i(&tb_ptr);
1193
            __stq_mmu(taddr, tmp64, t2);
1194
#else
1195
            host_addr = (tcg_target_ulong)taddr;
1196
            assert(taddr == host_addr);
1197
            *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64);
1198
#endif
1199
            break;
1200
        default:
1201
            TODO();
1202
            break;
1203
        }
1204
        assert(tb_ptr == old_code_ptr + op_size);
1205
    }
1206
exit:
1207
    return next_tb;
1208
}