Statistics
| Branch: | Revision:

root / target-m68k / translate.c @ 351326a6

History | View | Annotate | Download (78.9 kB)

1
/*
2
 *  m68k translation
3
 *
4
 *  Copyright (c) 2005-2007 CodeSourcery
5
 *  Written by Paul Brook
6
 *
7
 * This library is free software; you can redistribute it and/or
8
 * modify it under the terms of the GNU Lesser General Public
9
 * License as published by the Free Software Foundation; either
10
 * version 2 of the License, or (at your option) any later version.
11
 *
12
 * This library is distributed in the hope that it will be useful,
13
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
 * General Public License for more details.
16
 *
17
 * You should have received a copy of the GNU Lesser General Public
18
 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25

    
26
#include "config.h"
27
#include "cpu.h"
28
#include "exec-all.h"
29
#include "disas.h"
30
#include "tcg-op.h"
31
#include "qemu-log.h"
32

    
33
#include "helpers.h"
34
#define GEN_HELPER 1
35
#include "helpers.h"
36

    
37
//#define DEBUG_DISPATCH 1
38

    
39
/* Fake floating point.  */
40
#define tcg_gen_mov_f64 tcg_gen_mov_i64
41
#define tcg_gen_qemu_ldf64 tcg_gen_qemu_ld64
42
#define tcg_gen_qemu_stf64 tcg_gen_qemu_st64
43

    
44
#define DEFO32(name, offset) static TCGv QREG_##name;
45
#define DEFO64(name, offset) static TCGv_i64 QREG_##name;
46
#define DEFF64(name, offset) static TCGv_i64 QREG_##name;
47
#include "qregs.def"
48
#undef DEFO32
49
#undef DEFO64
50
#undef DEFF64
51

    
52
static TCGv_ptr cpu_env;
53

    
54
static char cpu_reg_names[3*8*3 + 5*4];
55
static TCGv cpu_dregs[8];
56
static TCGv cpu_aregs[8];
57
static TCGv_i64 cpu_fregs[8];
58
static TCGv_i64 cpu_macc[4];
59

    
60
#define DREG(insn, pos) cpu_dregs[((insn) >> (pos)) & 7]
61
#define AREG(insn, pos) cpu_aregs[((insn) >> (pos)) & 7]
62
#define FREG(insn, pos) cpu_fregs[((insn) >> (pos)) & 7]
63
#define MACREG(acc) cpu_macc[acc]
64
#define QREG_SP cpu_aregs[7]
65

    
66
static TCGv NULL_QREG;
67
#define IS_NULL_QREG(t) (TCGV_EQUAL(t, NULL_QREG))
68
/* Used to distinguish stores from bad addressing modes.  */
69
static TCGv store_dummy;
70

    
71
#include "gen-icount.h"
72

    
73
void m68k_tcg_init(void)
74
{
75
    char *p;
76
    int i;
77

    
78
#define DEFO32(name,  offset) QREG_##name = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, offset), #name);
79
#define DEFO64(name,  offset) QREG_##name = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, offset), #name);
80
#define DEFF64(name,  offset) DEFO64(name, offset)
81
#include "qregs.def"
82
#undef DEFO32
83
#undef DEFO64
84
#undef DEFF64
85

    
86
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
87

    
88
    p = cpu_reg_names;
89
    for (i = 0; i < 8; i++) {
90
        sprintf(p, "D%d", i);
91
        cpu_dregs[i] = tcg_global_mem_new(TCG_AREG0,
92
                                          offsetof(CPUM68KState, dregs[i]), p);
93
        p += 3;
94
        sprintf(p, "A%d", i);
95
        cpu_aregs[i] = tcg_global_mem_new(TCG_AREG0,
96
                                          offsetof(CPUM68KState, aregs[i]), p);
97
        p += 3;
98
        sprintf(p, "F%d", i);
99
        cpu_fregs[i] = tcg_global_mem_new_i64(TCG_AREG0,
100
                                          offsetof(CPUM68KState, fregs[i]), p);
101
        p += 3;
102
    }
103
    for (i = 0; i < 4; i++) {
104
        sprintf(p, "ACC%d", i);
105
        cpu_macc[i] = tcg_global_mem_new_i64(TCG_AREG0,
106
                                         offsetof(CPUM68KState, macc[i]), p);
107
        p += 5;
108
    }
109

    
110
    NULL_QREG = tcg_global_mem_new(TCG_AREG0, -4, "NULL");
111
    store_dummy = tcg_global_mem_new(TCG_AREG0, -8, "NULL");
112

    
113
#define GEN_HELPER 2
114
#include "helpers.h"
115
}
116

    
117
static inline void qemu_assert(int cond, const char *msg)
118
{
119
    if (!cond) {
120
        fprintf (stderr, "badness: %s\n", msg);
121
        abort();
122
    }
123
}
124

    
125
/* internal defines */
126
typedef struct DisasContext {
127
    CPUM68KState *env;
128
    target_ulong insn_pc; /* Start of the current instruction.  */
129
    target_ulong pc;
130
    int is_jmp;
131
    int cc_op;
132
    int user;
133
    uint32_t fpcr;
134
    struct TranslationBlock *tb;
135
    int singlestep_enabled;
136
    int is_mem;
137
    TCGv_i64 mactmp;
138
    int done_mac;
139
} DisasContext;
140

    
141
#define DISAS_JUMP_NEXT 4
142

    
143
#if defined(CONFIG_USER_ONLY)
144
#define IS_USER(s) 1
145
#else
146
#define IS_USER(s) s->user
147
#endif
148

    
149
/* XXX: move that elsewhere */
150
/* ??? Fix exceptions.  */
151
static void *gen_throws_exception;
152
#define gen_last_qop NULL
153

    
154
#define OS_BYTE 0
155
#define OS_WORD 1
156
#define OS_LONG 2
157
#define OS_SINGLE 4
158
#define OS_DOUBLE 5
159

    
160
typedef void (*disas_proc)(DisasContext *, uint16_t);
161

    
162
#ifdef DEBUG_DISPATCH
163
#define DISAS_INSN(name) \
164
  static void real_disas_##name (DisasContext *s, uint16_t insn); \
165
  static void disas_##name (DisasContext *s, uint16_t insn) { \
166
    qemu_log("Dispatch " #name "\n"); \
167
    real_disas_##name(s, insn); } \
168
  static void real_disas_##name (DisasContext *s, uint16_t insn)
169
#else
170
#define DISAS_INSN(name) \
171
  static void disas_##name (DisasContext *s, uint16_t insn)
172
#endif
173

    
174
/* Generate a load from the specified address.  Narrow values are
175
   sign extended to full register width.  */
176
static inline TCGv gen_load(DisasContext * s, int opsize, TCGv addr, int sign)
177
{
178
    TCGv tmp;
179
    int index = IS_USER(s);
180
    s->is_mem = 1;
181
    tmp = tcg_temp_new_i32();
182
    switch(opsize) {
183
    case OS_BYTE:
184
        if (sign)
185
            tcg_gen_qemu_ld8s(tmp, addr, index);
186
        else
187
            tcg_gen_qemu_ld8u(tmp, addr, index);
188
        break;
189
    case OS_WORD:
190
        if (sign)
191
            tcg_gen_qemu_ld16s(tmp, addr, index);
192
        else
193
            tcg_gen_qemu_ld16u(tmp, addr, index);
194
        break;
195
    case OS_LONG:
196
    case OS_SINGLE:
197
        tcg_gen_qemu_ld32u(tmp, addr, index);
198
        break;
199
    default:
200
        qemu_assert(0, "bad load size");
201
    }
202
    gen_throws_exception = gen_last_qop;
203
    return tmp;
204
}
205

    
206
static inline TCGv_i64 gen_load64(DisasContext * s, TCGv addr)
207
{
208
    TCGv_i64 tmp;
209
    int index = IS_USER(s);
210
    s->is_mem = 1;
211
    tmp = tcg_temp_new_i64();
212
    tcg_gen_qemu_ldf64(tmp, addr, index);
213
    gen_throws_exception = gen_last_qop;
214
    return tmp;
215
}
216

    
217
/* Generate a store.  */
218
static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val)
219
{
220
    int index = IS_USER(s);
221
    s->is_mem = 1;
222
    switch(opsize) {
223
    case OS_BYTE:
224
        tcg_gen_qemu_st8(val, addr, index);
225
        break;
226
    case OS_WORD:
227
        tcg_gen_qemu_st16(val, addr, index);
228
        break;
229
    case OS_LONG:
230
    case OS_SINGLE:
231
        tcg_gen_qemu_st32(val, addr, index);
232
        break;
233
    default:
234
        qemu_assert(0, "bad store size");
235
    }
236
    gen_throws_exception = gen_last_qop;
237
}
238

    
239
static inline void gen_store64(DisasContext *s, TCGv addr, TCGv_i64 val)
240
{
241
    int index = IS_USER(s);
242
    s->is_mem = 1;
243
    tcg_gen_qemu_stf64(val, addr, index);
244
    gen_throws_exception = gen_last_qop;
245
}
246

    
247
typedef enum {
248
    EA_STORE,
249
    EA_LOADU,
250
    EA_LOADS
251
} ea_what;
252

    
253
/* Generate an unsigned load if VAL is 0 a signed load if val is -1,
254
   otherwise generate a store.  */
255
static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
256
                     ea_what what)
257
{
258
    if (what == EA_STORE) {
259
        gen_store(s, opsize, addr, val);
260
        return store_dummy;
261
    } else {
262
        return gen_load(s, opsize, addr, what == EA_LOADS);
263
    }
264
}
265

    
266
/* Read a 32-bit immediate constant.  */
267
static inline uint32_t read_im32(DisasContext *s)
268
{
269
    uint32_t im;
270
    im = ((uint32_t)lduw_code(s->pc)) << 16;
271
    s->pc += 2;
272
    im |= lduw_code(s->pc);
273
    s->pc += 2;
274
    return im;
275
}
276

    
277
/* Calculate and address index.  */
278
static TCGv gen_addr_index(uint16_t ext, TCGv tmp)
279
{
280
    TCGv add;
281
    int scale;
282

    
283
    add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
284
    if ((ext & 0x800) == 0) {
285
        tcg_gen_ext16s_i32(tmp, add);
286
        add = tmp;
287
    }
288
    scale = (ext >> 9) & 3;
289
    if (scale != 0) {
290
        tcg_gen_shli_i32(tmp, add, scale);
291
        add = tmp;
292
    }
293
    return add;
294
}
295

    
296
/* Handle a base + index + displacement effective addresss.
297
   A NULL_QREG base means pc-relative.  */
298
static TCGv gen_lea_indexed(DisasContext *s, int opsize, TCGv base)
299
{
300
    uint32_t offset;
301
    uint16_t ext;
302
    TCGv add;
303
    TCGv tmp;
304
    uint32_t bd, od;
305

    
306
    offset = s->pc;
307
    ext = lduw_code(s->pc);
308
    s->pc += 2;
309

    
310
    if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
311
        return NULL_QREG;
312

    
313
    if (ext & 0x100) {
314
        /* full extension word format */
315
        if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
316
            return NULL_QREG;
317

    
318
        if ((ext & 0x30) > 0x10) {
319
            /* base displacement */
320
            if ((ext & 0x30) == 0x20) {
321
                bd = (int16_t)lduw_code(s->pc);
322
                s->pc += 2;
323
            } else {
324
                bd = read_im32(s);
325
            }
326
        } else {
327
            bd = 0;
328
        }
329
        tmp = tcg_temp_new();
330
        if ((ext & 0x44) == 0) {
331
            /* pre-index */
332
            add = gen_addr_index(ext, tmp);
333
        } else {
334
            add = NULL_QREG;
335
        }
336
        if ((ext & 0x80) == 0) {
337
            /* base not suppressed */
338
            if (IS_NULL_QREG(base)) {
339
                base = tcg_const_i32(offset + bd);
340
                bd = 0;
341
            }
342
            if (!IS_NULL_QREG(add)) {
343
                tcg_gen_add_i32(tmp, add, base);
344
                add = tmp;
345
            } else {
346
                add = base;
347
            }
348
        }
349
        if (!IS_NULL_QREG(add)) {
350
            if (bd != 0) {
351
                tcg_gen_addi_i32(tmp, add, bd);
352
                add = tmp;
353
            }
354
        } else {
355
            add = tcg_const_i32(bd);
356
        }
357
        if ((ext & 3) != 0) {
358
            /* memory indirect */
359
            base = gen_load(s, OS_LONG, add, 0);
360
            if ((ext & 0x44) == 4) {
361
                add = gen_addr_index(ext, tmp);
362
                tcg_gen_add_i32(tmp, add, base);
363
                add = tmp;
364
            } else {
365
                add = base;
366
            }
367
            if ((ext & 3) > 1) {
368
                /* outer displacement */
369
                if ((ext & 3) == 2) {
370
                    od = (int16_t)lduw_code(s->pc);
371
                    s->pc += 2;
372
                } else {
373
                    od = read_im32(s);
374
                }
375
            } else {
376
                od = 0;
377
            }
378
            if (od != 0) {
379
                tcg_gen_addi_i32(tmp, add, od);
380
                add = tmp;
381
            }
382
        }
383
    } else {
384
        /* brief extension word format */
385
        tmp = tcg_temp_new();
386
        add = gen_addr_index(ext, tmp);
387
        if (!IS_NULL_QREG(base)) {
388
            tcg_gen_add_i32(tmp, add, base);
389
            if ((int8_t)ext)
390
                tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
391
        } else {
392
            tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
393
        }
394
        add = tmp;
395
    }
396
    return add;
397
}
398

    
399
/* Update the CPU env CC_OP state.  */
400
static inline void gen_flush_cc_op(DisasContext *s)
401
{
402
    if (s->cc_op != CC_OP_DYNAMIC)
403
        tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
404
}
405

    
406
/* Evaluate all the CC flags.  */
407
static inline void gen_flush_flags(DisasContext *s)
408
{
409
    if (s->cc_op == CC_OP_FLAGS)
410
        return;
411
    gen_flush_cc_op(s);
412
    gen_helper_flush_flags(cpu_env, QREG_CC_OP);
413
    s->cc_op = CC_OP_FLAGS;
414
}
415

    
416
static void gen_logic_cc(DisasContext *s, TCGv val)
417
{
418
    tcg_gen_mov_i32(QREG_CC_DEST, val);
419
    s->cc_op = CC_OP_LOGIC;
420
}
421

    
422
static void gen_update_cc_add(TCGv dest, TCGv src)
423
{
424
    tcg_gen_mov_i32(QREG_CC_DEST, dest);
425
    tcg_gen_mov_i32(QREG_CC_SRC, src);
426
}
427

    
428
static inline int opsize_bytes(int opsize)
429
{
430
    switch (opsize) {
431
    case OS_BYTE: return 1;
432
    case OS_WORD: return 2;
433
    case OS_LONG: return 4;
434
    case OS_SINGLE: return 4;
435
    case OS_DOUBLE: return 8;
436
    default:
437
        qemu_assert(0, "bad operand size");
438
        return 0;
439
    }
440
}
441

    
442
/* Assign value to a register.  If the width is less than the register width
443
   only the low part of the register is set.  */
444
static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
445
{
446
    TCGv tmp;
447
    switch (opsize) {
448
    case OS_BYTE:
449
        tcg_gen_andi_i32(reg, reg, 0xffffff00);
450
        tmp = tcg_temp_new();
451
        tcg_gen_ext8u_i32(tmp, val);
452
        tcg_gen_or_i32(reg, reg, tmp);
453
        break;
454
    case OS_WORD:
455
        tcg_gen_andi_i32(reg, reg, 0xffff0000);
456
        tmp = tcg_temp_new();
457
        tcg_gen_ext16u_i32(tmp, val);
458
        tcg_gen_or_i32(reg, reg, tmp);
459
        break;
460
    case OS_LONG:
461
    case OS_SINGLE:
462
        tcg_gen_mov_i32(reg, val);
463
        break;
464
    default:
465
        qemu_assert(0, "Bad operand size");
466
        break;
467
    }
468
}
469

    
470
/* Sign or zero extend a value.  */
471
static inline TCGv gen_extend(TCGv val, int opsize, int sign)
472
{
473
    TCGv tmp;
474

    
475
    switch (opsize) {
476
    case OS_BYTE:
477
        tmp = tcg_temp_new();
478
        if (sign)
479
            tcg_gen_ext8s_i32(tmp, val);
480
        else
481
            tcg_gen_ext8u_i32(tmp, val);
482
        break;
483
    case OS_WORD:
484
        tmp = tcg_temp_new();
485
        if (sign)
486
            tcg_gen_ext16s_i32(tmp, val);
487
        else
488
            tcg_gen_ext16u_i32(tmp, val);
489
        break;
490
    case OS_LONG:
491
    case OS_SINGLE:
492
        tmp = val;
493
        break;
494
    default:
495
        qemu_assert(0, "Bad operand size");
496
    }
497
    return tmp;
498
}
499

    
500
/* Generate code for an "effective address".  Does not adjust the base
501
   register for autoincrement addressing modes.  */
502
static TCGv gen_lea(DisasContext *s, uint16_t insn, int opsize)
503
{
504
    TCGv reg;
505
    TCGv tmp;
506
    uint16_t ext;
507
    uint32_t offset;
508

    
509
    switch ((insn >> 3) & 7) {
510
    case 0: /* Data register direct.  */
511
    case 1: /* Address register direct.  */
512
        return NULL_QREG;
513
    case 2: /* Indirect register */
514
    case 3: /* Indirect postincrement.  */
515
        return AREG(insn, 0);
516
    case 4: /* Indirect predecrememnt.  */
517
        reg = AREG(insn, 0);
518
        tmp = tcg_temp_new();
519
        tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
520
        return tmp;
521
    case 5: /* Indirect displacement.  */
522
        reg = AREG(insn, 0);
523
        tmp = tcg_temp_new();
524
        ext = lduw_code(s->pc);
525
        s->pc += 2;
526
        tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
527
        return tmp;
528
    case 6: /* Indirect index + displacement.  */
529
        reg = AREG(insn, 0);
530
        return gen_lea_indexed(s, opsize, reg);
531
    case 7: /* Other */
532
        switch (insn & 7) {
533
        case 0: /* Absolute short.  */
534
            offset = ldsw_code(s->pc);
535
            s->pc += 2;
536
            return tcg_const_i32(offset);
537
        case 1: /* Absolute long.  */
538
            offset = read_im32(s);
539
            return tcg_const_i32(offset);
540
        case 2: /* pc displacement  */
541
            offset = s->pc;
542
            offset += ldsw_code(s->pc);
543
            s->pc += 2;
544
            return tcg_const_i32(offset);
545
        case 3: /* pc index+displacement.  */
546
            return gen_lea_indexed(s, opsize, NULL_QREG);
547
        case 4: /* Immediate.  */
548
        default:
549
            return NULL_QREG;
550
        }
551
    }
552
    /* Should never happen.  */
553
    return NULL_QREG;
554
}
555

    
556
/* Helper function for gen_ea. Reuse the computed address between the
557
   for read/write operands.  */
558
static inline TCGv gen_ea_once(DisasContext *s, uint16_t insn, int opsize,
559
                              TCGv val, TCGv *addrp, ea_what what)
560
{
561
    TCGv tmp;
562

    
563
    if (addrp && what == EA_STORE) {
564
        tmp = *addrp;
565
    } else {
566
        tmp = gen_lea(s, insn, opsize);
567
        if (IS_NULL_QREG(tmp))
568
            return tmp;
569
        if (addrp)
570
            *addrp = tmp;
571
    }
572
    return gen_ldst(s, opsize, tmp, val, what);
573
}
574

    
575
/* Generate code to load/store a value ito/from an EA.  If VAL > 0 this is
576
   a write otherwise it is a read (0 == sign extend, -1 == zero extend).
577
   ADDRP is non-null for readwrite operands.  */
578
static TCGv gen_ea(DisasContext *s, uint16_t insn, int opsize, TCGv val,
579
                   TCGv *addrp, ea_what what)
580
{
581
    TCGv reg;
582
    TCGv result;
583
    uint32_t offset;
584

    
585
    switch ((insn >> 3) & 7) {
586
    case 0: /* Data register direct.  */
587
        reg = DREG(insn, 0);
588
        if (what == EA_STORE) {
589
            gen_partset_reg(opsize, reg, val);
590
            return store_dummy;
591
        } else {
592
            return gen_extend(reg, opsize, what == EA_LOADS);
593
        }
594
    case 1: /* Address register direct.  */
595
        reg = AREG(insn, 0);
596
        if (what == EA_STORE) {
597
            tcg_gen_mov_i32(reg, val);
598
            return store_dummy;
599
        } else {
600
            return gen_extend(reg, opsize, what == EA_LOADS);
601
        }
602
    case 2: /* Indirect register */
603
        reg = AREG(insn, 0);
604
        return gen_ldst(s, opsize, reg, val, what);
605
    case 3: /* Indirect postincrement.  */
606
        reg = AREG(insn, 0);
607
        result = gen_ldst(s, opsize, reg, val, what);
608
        /* ??? This is not exception safe.  The instruction may still
609
           fault after this point.  */
610
        if (what == EA_STORE || !addrp)
611
            tcg_gen_addi_i32(reg, reg, opsize_bytes(opsize));
612
        return result;
613
    case 4: /* Indirect predecrememnt.  */
614
        {
615
            TCGv tmp;
616
            if (addrp && what == EA_STORE) {
617
                tmp = *addrp;
618
            } else {
619
                tmp = gen_lea(s, insn, opsize);
620
                if (IS_NULL_QREG(tmp))
621
                    return tmp;
622
                if (addrp)
623
                    *addrp = tmp;
624
            }
625
            result = gen_ldst(s, opsize, tmp, val, what);
626
            /* ??? This is not exception safe.  The instruction may still
627
               fault after this point.  */
628
            if (what == EA_STORE || !addrp) {
629
                reg = AREG(insn, 0);
630
                tcg_gen_mov_i32(reg, tmp);
631
            }
632
        }
633
        return result;
634
    case 5: /* Indirect displacement.  */
635
    case 6: /* Indirect index + displacement.  */
636
        return gen_ea_once(s, insn, opsize, val, addrp, what);
637
    case 7: /* Other */
638
        switch (insn & 7) {
639
        case 0: /* Absolute short.  */
640
        case 1: /* Absolute long.  */
641
        case 2: /* pc displacement  */
642
        case 3: /* pc index+displacement.  */
643
            return gen_ea_once(s, insn, opsize, val, addrp, what);
644
        case 4: /* Immediate.  */
645
            /* Sign extend values for consistency.  */
646
            switch (opsize) {
647
            case OS_BYTE:
648
                if (what == EA_LOADS)
649
                    offset = ldsb_code(s->pc + 1);
650
                else
651
                    offset = ldub_code(s->pc + 1);
652
                s->pc += 2;
653
                break;
654
            case OS_WORD:
655
                if (what == EA_LOADS)
656
                    offset = ldsw_code(s->pc);
657
                else
658
                    offset = lduw_code(s->pc);
659
                s->pc += 2;
660
                break;
661
            case OS_LONG:
662
                offset = read_im32(s);
663
                break;
664
            default:
665
                qemu_assert(0, "Bad immediate operand");
666
            }
667
            return tcg_const_i32(offset);
668
        default:
669
            return NULL_QREG;
670
        }
671
    }
672
    /* Should never happen.  */
673
    return NULL_QREG;
674
}
675

    
676
/* This generates a conditional branch, clobbering all temporaries.  */
677
static void gen_jmpcc(DisasContext *s, int cond, int l1)
678
{
679
    TCGv tmp;
680

    
681
    /* TODO: Optimize compare/branch pairs rather than always flushing
682
       flag state to CC_OP_FLAGS.  */
683
    gen_flush_flags(s);
684
    switch (cond) {
685
    case 0: /* T */
686
        tcg_gen_br(l1);
687
        break;
688
    case 1: /* F */
689
        break;
690
    case 2: /* HI (!C && !Z) */
691
        tmp = tcg_temp_new();
692
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
693
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
694
        break;
695
    case 3: /* LS (C || Z) */
696
        tmp = tcg_temp_new();
697
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
698
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
699
        break;
700
    case 4: /* CC (!C) */
701
        tmp = tcg_temp_new();
702
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
703
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
704
        break;
705
    case 5: /* CS (C) */
706
        tmp = tcg_temp_new();
707
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
708
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
709
        break;
710
    case 6: /* NE (!Z) */
711
        tmp = tcg_temp_new();
712
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
713
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
714
        break;
715
    case 7: /* EQ (Z) */
716
        tmp = tcg_temp_new();
717
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
718
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
719
        break;
720
    case 8: /* VC (!V) */
721
        tmp = tcg_temp_new();
722
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
723
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
724
        break;
725
    case 9: /* VS (V) */
726
        tmp = tcg_temp_new();
727
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
728
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
729
        break;
730
    case 10: /* PL (!N) */
731
        tmp = tcg_temp_new();
732
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
733
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
734
        break;
735
    case 11: /* MI (N) */
736
        tmp = tcg_temp_new();
737
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
738
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
739
        break;
740
    case 12: /* GE (!(N ^ V)) */
741
        tmp = tcg_temp_new();
742
        assert(CCF_V == (CCF_N >> 2));
743
        tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
744
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
745
        tcg_gen_andi_i32(tmp, tmp, CCF_V);
746
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
747
        break;
748
    case 13: /* LT (N ^ V) */
749
        tmp = tcg_temp_new();
750
        assert(CCF_V == (CCF_N >> 2));
751
        tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
752
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
753
        tcg_gen_andi_i32(tmp, tmp, CCF_V);
754
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
755
        break;
756
    case 14: /* GT (!(Z || (N ^ V))) */
757
        tmp = tcg_temp_new();
758
        assert(CCF_V == (CCF_N >> 2));
759
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
760
        tcg_gen_shri_i32(tmp, tmp, 2);
761
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
762
        tcg_gen_andi_i32(tmp, tmp, CCF_V | CCF_Z);
763
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
764
        break;
765
    case 15: /* LE (Z || (N ^ V)) */
766
        tmp = tcg_temp_new();
767
        assert(CCF_V == (CCF_N >> 2));
768
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
769
        tcg_gen_shri_i32(tmp, tmp, 2);
770
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
771
        tcg_gen_andi_i32(tmp, tmp, CCF_V | CCF_Z);
772
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
773
        break;
774
    default:
775
        /* Should ever happen.  */
776
        abort();
777
    }
778
}
779

    
780
DISAS_INSN(scc)
781
{
782
    int l1;
783
    int cond;
784
    TCGv reg;
785

    
786
    l1 = gen_new_label();
787
    cond = (insn >> 8) & 0xf;
788
    reg = DREG(insn, 0);
789
    tcg_gen_andi_i32(reg, reg, 0xffffff00);
790
    /* This is safe because we modify the reg directly, with no other values
791
       live.  */
792
    gen_jmpcc(s, cond ^ 1, l1);
793
    tcg_gen_ori_i32(reg, reg, 0xff);
794
    gen_set_label(l1);
795
}
796

    
797
/* Force a TB lookup after an instruction that changes the CPU state.  */
798
static void gen_lookup_tb(DisasContext *s)
799
{
800
    gen_flush_cc_op(s);
801
    tcg_gen_movi_i32(QREG_PC, s->pc);
802
    s->is_jmp = DISAS_UPDATE;
803
}
804

    
805
/* Generate a jump to an immediate address.  */
806
static void gen_jmp_im(DisasContext *s, uint32_t dest)
807
{
808
    gen_flush_cc_op(s);
809
    tcg_gen_movi_i32(QREG_PC, dest);
810
    s->is_jmp = DISAS_JUMP;
811
}
812

    
813
/* Generate a jump to the address in qreg DEST.  */
814
static void gen_jmp(DisasContext *s, TCGv dest)
815
{
816
    gen_flush_cc_op(s);
817
    tcg_gen_mov_i32(QREG_PC, dest);
818
    s->is_jmp = DISAS_JUMP;
819
}
820

    
821
static void gen_exception(DisasContext *s, uint32_t where, int nr)
822
{
823
    gen_flush_cc_op(s);
824
    gen_jmp_im(s, where);
825
    gen_helper_raise_exception(tcg_const_i32(nr));
826
}
827

    
828
static inline void gen_addr_fault(DisasContext *s)
829
{
830
    gen_exception(s, s->insn_pc, EXCP_ADDRESS);
831
}
832

    
833
#define SRC_EA(result, opsize, op_sign, addrp) do { \
834
    result = gen_ea(s, insn, opsize, NULL_QREG, addrp, op_sign ? EA_LOADS : EA_LOADU); \
835
    if (IS_NULL_QREG(result)) { \
836
        gen_addr_fault(s); \
837
        return; \
838
    } \
839
    } while (0)
840

    
841
#define DEST_EA(insn, opsize, val, addrp) do { \
842
    TCGv ea_result = gen_ea(s, insn, opsize, val, addrp, EA_STORE); \
843
    if (IS_NULL_QREG(ea_result)) { \
844
        gen_addr_fault(s); \
845
        return; \
846
    } \
847
    } while (0)
848

    
849
/* Generate a jump to an immediate address.  */
850
static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
851
{
852
    TranslationBlock *tb;
853

    
854
    tb = s->tb;
855
    if (unlikely(s->singlestep_enabled)) {
856
        gen_exception(s, dest, EXCP_DEBUG);
857
    } else if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
858
               (s->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
859
        tcg_gen_goto_tb(n);
860
        tcg_gen_movi_i32(QREG_PC, dest);
861
        tcg_gen_exit_tb((tcg_target_long)tb + n);
862
    } else {
863
        gen_jmp_im(s, dest);
864
        tcg_gen_exit_tb(0);
865
    }
866
    s->is_jmp = DISAS_TB_JUMP;
867
}
868

    
869
DISAS_INSN(undef_mac)
870
{
871
    gen_exception(s, s->pc - 2, EXCP_LINEA);
872
}
873

    
874
DISAS_INSN(undef_fpu)
875
{
876
    gen_exception(s, s->pc - 2, EXCP_LINEF);
877
}
878

    
879
DISAS_INSN(undef)
880
{
881
    gen_exception(s, s->pc - 2, EXCP_UNSUPPORTED);
882
    cpu_abort(cpu_single_env, "Illegal instruction: %04x @ %08x",
883
              insn, s->pc - 2);
884
}
885

    
886
DISAS_INSN(mulw)
887
{
888
    TCGv reg;
889
    TCGv tmp;
890
    TCGv src;
891
    int sign;
892

    
893
    sign = (insn & 0x100) != 0;
894
    reg = DREG(insn, 9);
895
    tmp = tcg_temp_new();
896
    if (sign)
897
        tcg_gen_ext16s_i32(tmp, reg);
898
    else
899
        tcg_gen_ext16u_i32(tmp, reg);
900
    SRC_EA(src, OS_WORD, sign, NULL);
901
    tcg_gen_mul_i32(tmp, tmp, src);
902
    tcg_gen_mov_i32(reg, tmp);
903
    /* Unlike m68k, coldfire always clears the overflow bit.  */
904
    gen_logic_cc(s, tmp);
905
}
906

    
907
DISAS_INSN(divw)
908
{
909
    TCGv reg;
910
    TCGv tmp;
911
    TCGv src;
912
    int sign;
913

    
914
    sign = (insn & 0x100) != 0;
915
    reg = DREG(insn, 9);
916
    if (sign) {
917
        tcg_gen_ext16s_i32(QREG_DIV1, reg);
918
    } else {
919
        tcg_gen_ext16u_i32(QREG_DIV1, reg);
920
    }
921
    SRC_EA(src, OS_WORD, sign, NULL);
922
    tcg_gen_mov_i32(QREG_DIV2, src);
923
    if (sign) {
924
        gen_helper_divs(cpu_env, tcg_const_i32(1));
925
    } else {
926
        gen_helper_divu(cpu_env, tcg_const_i32(1));
927
    }
928

    
929
    tmp = tcg_temp_new();
930
    src = tcg_temp_new();
931
    tcg_gen_ext16u_i32(tmp, QREG_DIV1);
932
    tcg_gen_shli_i32(src, QREG_DIV2, 16);
933
    tcg_gen_or_i32(reg, tmp, src);
934
    s->cc_op = CC_OP_FLAGS;
935
}
936

    
937
DISAS_INSN(divl)
938
{
939
    TCGv num;
940
    TCGv den;
941
    TCGv reg;
942
    uint16_t ext;
943

    
944
    ext = lduw_code(s->pc);
945
    s->pc += 2;
946
    if (ext & 0x87f8) {
947
        gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
948
        return;
949
    }
950
    num = DREG(ext, 12);
951
    reg = DREG(ext, 0);
952
    tcg_gen_mov_i32(QREG_DIV1, num);
953
    SRC_EA(den, OS_LONG, 0, NULL);
954
    tcg_gen_mov_i32(QREG_DIV2, den);
955
    if (ext & 0x0800) {
956
        gen_helper_divs(cpu_env, tcg_const_i32(0));
957
    } else {
958
        gen_helper_divu(cpu_env, tcg_const_i32(0));
959
    }
960
    if ((ext & 7) == ((ext >> 12) & 7)) {
961
        /* div */
962
        tcg_gen_mov_i32 (reg, QREG_DIV1);
963
    } else {
964
        /* rem */
965
        tcg_gen_mov_i32 (reg, QREG_DIV2);
966
    }
967
    s->cc_op = CC_OP_FLAGS;
968
}
969

    
970
DISAS_INSN(addsub)
971
{
972
    TCGv reg;
973
    TCGv dest;
974
    TCGv src;
975
    TCGv tmp;
976
    TCGv addr;
977
    int add;
978

    
979
    add = (insn & 0x4000) != 0;
980
    reg = DREG(insn, 9);
981
    dest = tcg_temp_new();
982
    if (insn & 0x100) {
983
        SRC_EA(tmp, OS_LONG, 0, &addr);
984
        src = reg;
985
    } else {
986
        tmp = reg;
987
        SRC_EA(src, OS_LONG, 0, NULL);
988
    }
989
    if (add) {
990
        tcg_gen_add_i32(dest, tmp, src);
991
        gen_helper_xflag_lt(QREG_CC_X, dest, src);
992
        s->cc_op = CC_OP_ADD;
993
    } else {
994
        gen_helper_xflag_lt(QREG_CC_X, tmp, src);
995
        tcg_gen_sub_i32(dest, tmp, src);
996
        s->cc_op = CC_OP_SUB;
997
    }
998
    gen_update_cc_add(dest, src);
999
    if (insn & 0x100) {
1000
        DEST_EA(insn, OS_LONG, dest, &addr);
1001
    } else {
1002
        tcg_gen_mov_i32(reg, dest);
1003
    }
1004
}
1005

    
1006

    
1007
/* Reverse the order of the bits in REG.  */
1008
DISAS_INSN(bitrev)
1009
{
1010
    TCGv reg;
1011
    reg = DREG(insn, 0);
1012
    gen_helper_bitrev(reg, reg);
1013
}
1014

    
1015
DISAS_INSN(bitop_reg)
1016
{
1017
    int opsize;
1018
    int op;
1019
    TCGv src1;
1020
    TCGv src2;
1021
    TCGv tmp;
1022
    TCGv addr;
1023
    TCGv dest;
1024

    
1025
    if ((insn & 0x38) != 0)
1026
        opsize = OS_BYTE;
1027
    else
1028
        opsize = OS_LONG;
1029
    op = (insn >> 6) & 3;
1030
    SRC_EA(src1, opsize, 0, op ? &addr: NULL);
1031
    src2 = DREG(insn, 9);
1032
    dest = tcg_temp_new();
1033

    
1034
    gen_flush_flags(s);
1035
    tmp = tcg_temp_new();
1036
    if (opsize == OS_BYTE)
1037
        tcg_gen_andi_i32(tmp, src2, 7);
1038
    else
1039
        tcg_gen_andi_i32(tmp, src2, 31);
1040
    src2 = tmp;
1041
    tmp = tcg_temp_new();
1042
    tcg_gen_shr_i32(tmp, src1, src2);
1043
    tcg_gen_andi_i32(tmp, tmp, 1);
1044
    tcg_gen_shli_i32(tmp, tmp, 2);
1045
    /* Clear CCF_Z if bit set.  */
1046
    tcg_gen_ori_i32(QREG_CC_DEST, QREG_CC_DEST, CCF_Z);
1047
    tcg_gen_xor_i32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1048

    
1049
    tcg_gen_shl_i32(tmp, tcg_const_i32(1), src2);
1050
    switch (op) {
1051
    case 1: /* bchg */
1052
        tcg_gen_xor_i32(dest, src1, tmp);
1053
        break;
1054
    case 2: /* bclr */
1055
        tcg_gen_not_i32(tmp, tmp);
1056
        tcg_gen_and_i32(dest, src1, tmp);
1057
        break;
1058
    case 3: /* bset */
1059
        tcg_gen_or_i32(dest, src1, tmp);
1060
        break;
1061
    default: /* btst */
1062
        break;
1063
    }
1064
    if (op)
1065
        DEST_EA(insn, opsize, dest, &addr);
1066
}
1067

    
1068
DISAS_INSN(sats)
1069
{
1070
    TCGv reg;
1071
    reg = DREG(insn, 0);
1072
    gen_flush_flags(s);
1073
    gen_helper_sats(reg, reg, QREG_CC_DEST);
1074
    gen_logic_cc(s, reg);
1075
}
1076

    
1077
static void gen_push(DisasContext *s, TCGv val)
1078
{
1079
    TCGv tmp;
1080

    
1081
    tmp = tcg_temp_new();
1082
    tcg_gen_subi_i32(tmp, QREG_SP, 4);
1083
    gen_store(s, OS_LONG, tmp, val);
1084
    tcg_gen_mov_i32(QREG_SP, tmp);
1085
}
1086

    
1087
DISAS_INSN(movem)
1088
{
1089
    TCGv addr;
1090
    int i;
1091
    uint16_t mask;
1092
    TCGv reg;
1093
    TCGv tmp;
1094
    int is_load;
1095

    
1096
    mask = lduw_code(s->pc);
1097
    s->pc += 2;
1098
    tmp = gen_lea(s, insn, OS_LONG);
1099
    if (IS_NULL_QREG(tmp)) {
1100
        gen_addr_fault(s);
1101
        return;
1102
    }
1103
    addr = tcg_temp_new();
1104
    tcg_gen_mov_i32(addr, tmp);
1105
    is_load = ((insn & 0x0400) != 0);
1106
    for (i = 0; i < 16; i++, mask >>= 1) {
1107
        if (mask & 1) {
1108
            if (i < 8)
1109
                reg = DREG(i, 0);
1110
            else
1111
                reg = AREG(i, 0);
1112
            if (is_load) {
1113
                tmp = gen_load(s, OS_LONG, addr, 0);
1114
                tcg_gen_mov_i32(reg, tmp);
1115
            } else {
1116
                gen_store(s, OS_LONG, addr, reg);
1117
            }
1118
            if (mask != 1)
1119
                tcg_gen_addi_i32(addr, addr, 4);
1120
        }
1121
    }
1122
}
1123

    
1124
DISAS_INSN(bitop_im)
1125
{
1126
    int opsize;
1127
    int op;
1128
    TCGv src1;
1129
    uint32_t mask;
1130
    int bitnum;
1131
    TCGv tmp;
1132
    TCGv addr;
1133

    
1134
    if ((insn & 0x38) != 0)
1135
        opsize = OS_BYTE;
1136
    else
1137
        opsize = OS_LONG;
1138
    op = (insn >> 6) & 3;
1139

    
1140
    bitnum = lduw_code(s->pc);
1141
    s->pc += 2;
1142
    if (bitnum & 0xff00) {
1143
        disas_undef(s, insn);
1144
        return;
1145
    }
1146

    
1147
    SRC_EA(src1, opsize, 0, op ? &addr: NULL);
1148

    
1149
    gen_flush_flags(s);
1150
    if (opsize == OS_BYTE)
1151
        bitnum &= 7;
1152
    else
1153
        bitnum &= 31;
1154
    mask = 1 << bitnum;
1155

    
1156
    tmp = tcg_temp_new();
1157
    assert (CCF_Z == (1 << 2));
1158
    if (bitnum > 2)
1159
        tcg_gen_shri_i32(tmp, src1, bitnum - 2);
1160
    else if (bitnum < 2)
1161
        tcg_gen_shli_i32(tmp, src1, 2 - bitnum);
1162
    else
1163
        tcg_gen_mov_i32(tmp, src1);
1164
    tcg_gen_andi_i32(tmp, tmp, CCF_Z);
1165
    /* Clear CCF_Z if bit set.  */
1166
    tcg_gen_ori_i32(QREG_CC_DEST, QREG_CC_DEST, CCF_Z);
1167
    tcg_gen_xor_i32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1168
    if (op) {
1169
        switch (op) {
1170
        case 1: /* bchg */
1171
            tcg_gen_xori_i32(tmp, src1, mask);
1172
            break;
1173
        case 2: /* bclr */
1174
            tcg_gen_andi_i32(tmp, src1, ~mask);
1175
            break;
1176
        case 3: /* bset */
1177
            tcg_gen_ori_i32(tmp, src1, mask);
1178
            break;
1179
        default: /* btst */
1180
            break;
1181
        }
1182
        DEST_EA(insn, opsize, tmp, &addr);
1183
    }
1184
}
1185

    
1186
DISAS_INSN(arith_im)
1187
{
1188
    int op;
1189
    uint32_t im;
1190
    TCGv src1;
1191
    TCGv dest;
1192
    TCGv addr;
1193

    
1194
    op = (insn >> 9) & 7;
1195
    SRC_EA(src1, OS_LONG, 0, (op == 6) ? NULL : &addr);
1196
    im = read_im32(s);
1197
    dest = tcg_temp_new();
1198
    switch (op) {
1199
    case 0: /* ori */
1200
        tcg_gen_ori_i32(dest, src1, im);
1201
        gen_logic_cc(s, dest);
1202
        break;
1203
    case 1: /* andi */
1204
        tcg_gen_andi_i32(dest, src1, im);
1205
        gen_logic_cc(s, dest);
1206
        break;
1207
    case 2: /* subi */
1208
        tcg_gen_mov_i32(dest, src1);
1209
        gen_helper_xflag_lt(QREG_CC_X, dest, tcg_const_i32(im));
1210
        tcg_gen_subi_i32(dest, dest, im);
1211
        gen_update_cc_add(dest, tcg_const_i32(im));
1212
        s->cc_op = CC_OP_SUB;
1213
        break;
1214
    case 3: /* addi */
1215
        tcg_gen_mov_i32(dest, src1);
1216
        tcg_gen_addi_i32(dest, dest, im);
1217
        gen_update_cc_add(dest, tcg_const_i32(im));
1218
        gen_helper_xflag_lt(QREG_CC_X, dest, tcg_const_i32(im));
1219
        s->cc_op = CC_OP_ADD;
1220
        break;
1221
    case 5: /* eori */
1222
        tcg_gen_xori_i32(dest, src1, im);
1223
        gen_logic_cc(s, dest);
1224
        break;
1225
    case 6: /* cmpi */
1226
        tcg_gen_mov_i32(dest, src1);
1227
        tcg_gen_subi_i32(dest, dest, im);
1228
        gen_update_cc_add(dest, tcg_const_i32(im));
1229
        s->cc_op = CC_OP_SUB;
1230
        break;
1231
    default:
1232
        abort();
1233
    }
1234
    if (op != 6) {
1235
        DEST_EA(insn, OS_LONG, dest, &addr);
1236
    }
1237
}
1238

    
1239
DISAS_INSN(byterev)
1240
{
1241
    TCGv reg;
1242

    
1243
    reg = DREG(insn, 0);
1244
    tcg_gen_bswap32_i32(reg, reg);
1245
}
1246

    
1247
DISAS_INSN(move)
1248
{
1249
    TCGv src;
1250
    TCGv dest;
1251
    int op;
1252
    int opsize;
1253

    
1254
    switch (insn >> 12) {
1255
    case 1: /* move.b */
1256
        opsize = OS_BYTE;
1257
        break;
1258
    case 2: /* move.l */
1259
        opsize = OS_LONG;
1260
        break;
1261
    case 3: /* move.w */
1262
        opsize = OS_WORD;
1263
        break;
1264
    default:
1265
        abort();
1266
    }
1267
    SRC_EA(src, opsize, 1, NULL);
1268
    op = (insn >> 6) & 7;
1269
    if (op == 1) {
1270
        /* movea */
1271
        /* The value will already have been sign extended.  */
1272
        dest = AREG(insn, 9);
1273
        tcg_gen_mov_i32(dest, src);
1274
    } else {
1275
        /* normal move */
1276
        uint16_t dest_ea;
1277
        dest_ea = ((insn >> 9) & 7) | (op << 3);
1278
        DEST_EA(dest_ea, opsize, src, NULL);
1279
        /* This will be correct because loads sign extend.  */
1280
        gen_logic_cc(s, src);
1281
    }
1282
}
1283

    
1284
DISAS_INSN(negx)
1285
{
1286
    TCGv reg;
1287

    
1288
    gen_flush_flags(s);
1289
    reg = DREG(insn, 0);
1290
    gen_helper_subx_cc(reg, cpu_env, tcg_const_i32(0), reg);
1291
}
1292

    
1293
DISAS_INSN(lea)
1294
{
1295
    TCGv reg;
1296
    TCGv tmp;
1297

    
1298
    reg = AREG(insn, 9);
1299
    tmp = gen_lea(s, insn, OS_LONG);
1300
    if (IS_NULL_QREG(tmp)) {
1301
        gen_addr_fault(s);
1302
        return;
1303
    }
1304
    tcg_gen_mov_i32(reg, tmp);
1305
}
1306

    
1307
DISAS_INSN(clr)
1308
{
1309
    int opsize;
1310

    
1311
    switch ((insn >> 6) & 3) {
1312
    case 0: /* clr.b */
1313
        opsize = OS_BYTE;
1314
        break;
1315
    case 1: /* clr.w */
1316
        opsize = OS_WORD;
1317
        break;
1318
    case 2: /* clr.l */
1319
        opsize = OS_LONG;
1320
        break;
1321
    default:
1322
        abort();
1323
    }
1324
    DEST_EA(insn, opsize, tcg_const_i32(0), NULL);
1325
    gen_logic_cc(s, tcg_const_i32(0));
1326
}
1327

    
1328
static TCGv gen_get_ccr(DisasContext *s)
1329
{
1330
    TCGv dest;
1331

    
1332
    gen_flush_flags(s);
1333
    dest = tcg_temp_new();
1334
    tcg_gen_shli_i32(dest, QREG_CC_X, 4);
1335
    tcg_gen_or_i32(dest, dest, QREG_CC_DEST);
1336
    return dest;
1337
}
1338

    
1339
DISAS_INSN(move_from_ccr)
1340
{
1341
    TCGv reg;
1342
    TCGv ccr;
1343

    
1344
    ccr = gen_get_ccr(s);
1345
    reg = DREG(insn, 0);
1346
    gen_partset_reg(OS_WORD, reg, ccr);
1347
}
1348

    
1349
DISAS_INSN(neg)
1350
{
1351
    TCGv reg;
1352
    TCGv src1;
1353

    
1354
    reg = DREG(insn, 0);
1355
    src1 = tcg_temp_new();
1356
    tcg_gen_mov_i32(src1, reg);
1357
    tcg_gen_neg_i32(reg, src1);
1358
    s->cc_op = CC_OP_SUB;
1359
    gen_update_cc_add(reg, src1);
1360
    gen_helper_xflag_lt(QREG_CC_X, tcg_const_i32(0), src1);
1361
    s->cc_op = CC_OP_SUB;
1362
}
1363

    
1364
static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
1365
{
1366
    tcg_gen_movi_i32(QREG_CC_DEST, val & 0xf);
1367
    tcg_gen_movi_i32(QREG_CC_X, (val & 0x10) >> 4);
1368
    if (!ccr_only) {
1369
        gen_helper_set_sr(cpu_env, tcg_const_i32(val & 0xff00));
1370
    }
1371
}
1372

    
1373
static void gen_set_sr(DisasContext *s, uint16_t insn, int ccr_only)
1374
{
1375
    TCGv tmp;
1376
    TCGv reg;
1377

    
1378
    s->cc_op = CC_OP_FLAGS;
1379
    if ((insn & 0x38) == 0)
1380
      {
1381
        tmp = tcg_temp_new();
1382
        reg = DREG(insn, 0);
1383
        tcg_gen_andi_i32(QREG_CC_DEST, reg, 0xf);
1384
        tcg_gen_shri_i32(tmp, reg, 4);
1385
        tcg_gen_andi_i32(QREG_CC_X, tmp, 1);
1386
        if (!ccr_only) {
1387
            gen_helper_set_sr(cpu_env, reg);
1388
        }
1389
      }
1390
    else if ((insn & 0x3f) == 0x3c)
1391
      {
1392
        uint16_t val;
1393
        val = lduw_code(s->pc);
1394
        s->pc += 2;
1395
        gen_set_sr_im(s, val, ccr_only);
1396
      }
1397
    else
1398
        disas_undef(s, insn);
1399
}
1400

    
1401
DISAS_INSN(move_to_ccr)
1402
{
1403
    gen_set_sr(s, insn, 1);
1404
}
1405

    
1406
DISAS_INSN(not)
1407
{
1408
    TCGv reg;
1409

    
1410
    reg = DREG(insn, 0);
1411
    tcg_gen_not_i32(reg, reg);
1412
    gen_logic_cc(s, reg);
1413
}
1414

    
1415
DISAS_INSN(swap)
1416
{
1417
    TCGv src1;
1418
    TCGv src2;
1419
    TCGv reg;
1420

    
1421
    src1 = tcg_temp_new();
1422
    src2 = tcg_temp_new();
1423
    reg = DREG(insn, 0);
1424
    tcg_gen_shli_i32(src1, reg, 16);
1425
    tcg_gen_shri_i32(src2, reg, 16);
1426
    tcg_gen_or_i32(reg, src1, src2);
1427
    gen_logic_cc(s, reg);
1428
}
1429

    
1430
DISAS_INSN(pea)
1431
{
1432
    TCGv tmp;
1433

    
1434
    tmp = gen_lea(s, insn, OS_LONG);
1435
    if (IS_NULL_QREG(tmp)) {
1436
        gen_addr_fault(s);
1437
        return;
1438
    }
1439
    gen_push(s, tmp);
1440
}
1441

    
1442
DISAS_INSN(ext)
1443
{
1444
    int op;
1445
    TCGv reg;
1446
    TCGv tmp;
1447

    
1448
    reg = DREG(insn, 0);
1449
    op = (insn >> 6) & 7;
1450
    tmp = tcg_temp_new();
1451
    if (op == 3)
1452
        tcg_gen_ext16s_i32(tmp, reg);
1453
    else
1454
        tcg_gen_ext8s_i32(tmp, reg);
1455
    if (op == 2)
1456
        gen_partset_reg(OS_WORD, reg, tmp);
1457
    else
1458
        tcg_gen_mov_i32(reg, tmp);
1459
    gen_logic_cc(s, tmp);
1460
}
1461

    
1462
DISAS_INSN(tst)
1463
{
1464
    int opsize;
1465
    TCGv tmp;
1466

    
1467
    switch ((insn >> 6) & 3) {
1468
    case 0: /* tst.b */
1469
        opsize = OS_BYTE;
1470
        break;
1471
    case 1: /* tst.w */
1472
        opsize = OS_WORD;
1473
        break;
1474
    case 2: /* tst.l */
1475
        opsize = OS_LONG;
1476
        break;
1477
    default:
1478
        abort();
1479
    }
1480
    SRC_EA(tmp, opsize, 1, NULL);
1481
    gen_logic_cc(s, tmp);
1482
}
1483

    
1484
DISAS_INSN(pulse)
1485
{
1486
  /* Implemented as a NOP.  */
1487
}
1488

    
1489
DISAS_INSN(illegal)
1490
{
1491
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
1492
}
1493

    
1494
/* ??? This should be atomic.  */
1495
DISAS_INSN(tas)
1496
{
1497
    TCGv dest;
1498
    TCGv src1;
1499
    TCGv addr;
1500

    
1501
    dest = tcg_temp_new();
1502
    SRC_EA(src1, OS_BYTE, 1, &addr);
1503
    gen_logic_cc(s, src1);
1504
    tcg_gen_ori_i32(dest, src1, 0x80);
1505
    DEST_EA(insn, OS_BYTE, dest, &addr);
1506
}
1507

    
1508
DISAS_INSN(mull)
1509
{
1510
    uint16_t ext;
1511
    TCGv reg;
1512
    TCGv src1;
1513
    TCGv dest;
1514

    
1515
    /* The upper 32 bits of the product are discarded, so
1516
       muls.l and mulu.l are functionally equivalent.  */
1517
    ext = lduw_code(s->pc);
1518
    s->pc += 2;
1519
    if (ext & 0x87ff) {
1520
        gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
1521
        return;
1522
    }
1523
    reg = DREG(ext, 12);
1524
    SRC_EA(src1, OS_LONG, 0, NULL);
1525
    dest = tcg_temp_new();
1526
    tcg_gen_mul_i32(dest, src1, reg);
1527
    tcg_gen_mov_i32(reg, dest);
1528
    /* Unlike m68k, coldfire always clears the overflow bit.  */
1529
    gen_logic_cc(s, dest);
1530
}
1531

    
1532
DISAS_INSN(link)
1533
{
1534
    int16_t offset;
1535
    TCGv reg;
1536
    TCGv tmp;
1537

    
1538
    offset = ldsw_code(s->pc);
1539
    s->pc += 2;
1540
    reg = AREG(insn, 0);
1541
    tmp = tcg_temp_new();
1542
    tcg_gen_subi_i32(tmp, QREG_SP, 4);
1543
    gen_store(s, OS_LONG, tmp, reg);
1544
    if ((insn & 7) != 7)
1545
        tcg_gen_mov_i32(reg, tmp);
1546
    tcg_gen_addi_i32(QREG_SP, tmp, offset);
1547
}
1548

    
1549
DISAS_INSN(unlk)
1550
{
1551
    TCGv src;
1552
    TCGv reg;
1553
    TCGv tmp;
1554

    
1555
    src = tcg_temp_new();
1556
    reg = AREG(insn, 0);
1557
    tcg_gen_mov_i32(src, reg);
1558
    tmp = gen_load(s, OS_LONG, src, 0);
1559
    tcg_gen_mov_i32(reg, tmp);
1560
    tcg_gen_addi_i32(QREG_SP, src, 4);
1561
}
1562

    
1563
DISAS_INSN(nop)
1564
{
1565
}
1566

    
1567
DISAS_INSN(rts)
1568
{
1569
    TCGv tmp;
1570

    
1571
    tmp = gen_load(s, OS_LONG, QREG_SP, 0);
1572
    tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
1573
    gen_jmp(s, tmp);
1574
}
1575

    
1576
DISAS_INSN(jump)
1577
{
1578
    TCGv tmp;
1579

    
1580
    /* Load the target address first to ensure correct exception
1581
       behavior.  */
1582
    tmp = gen_lea(s, insn, OS_LONG);
1583
    if (IS_NULL_QREG(tmp)) {
1584
        gen_addr_fault(s);
1585
        return;
1586
    }
1587
    if ((insn & 0x40) == 0) {
1588
        /* jsr */
1589
        gen_push(s, tcg_const_i32(s->pc));
1590
    }
1591
    gen_jmp(s, tmp);
1592
}
1593

    
1594
DISAS_INSN(addsubq)
1595
{
1596
    TCGv src1;
1597
    TCGv src2;
1598
    TCGv dest;
1599
    int val;
1600
    TCGv addr;
1601

    
1602
    SRC_EA(src1, OS_LONG, 0, &addr);
1603
    val = (insn >> 9) & 7;
1604
    if (val == 0)
1605
        val = 8;
1606
    dest = tcg_temp_new();
1607
    tcg_gen_mov_i32(dest, src1);
1608
    if ((insn & 0x38) == 0x08) {
1609
        /* Don't update condition codes if the destination is an
1610
           address register.  */
1611
        if (insn & 0x0100) {
1612
            tcg_gen_subi_i32(dest, dest, val);
1613
        } else {
1614
            tcg_gen_addi_i32(dest, dest, val);
1615
        }
1616
    } else {
1617
        src2 = tcg_const_i32(val);
1618
        if (insn & 0x0100) {
1619
            gen_helper_xflag_lt(QREG_CC_X, dest, src2);
1620
            tcg_gen_subi_i32(dest, dest, val);
1621
            s->cc_op = CC_OP_SUB;
1622
        } else {
1623
            tcg_gen_addi_i32(dest, dest, val);
1624
            gen_helper_xflag_lt(QREG_CC_X, dest, src2);
1625
            s->cc_op = CC_OP_ADD;
1626
        }
1627
        gen_update_cc_add(dest, src2);
1628
    }
1629
    DEST_EA(insn, OS_LONG, dest, &addr);
1630
}
1631

    
1632
DISAS_INSN(tpf)
1633
{
1634
    switch (insn & 7) {
1635
    case 2: /* One extension word.  */
1636
        s->pc += 2;
1637
        break;
1638
    case 3: /* Two extension words.  */
1639
        s->pc += 4;
1640
        break;
1641
    case 4: /* No extension words.  */
1642
        break;
1643
    default:
1644
        disas_undef(s, insn);
1645
    }
1646
}
1647

    
1648
DISAS_INSN(branch)
1649
{
1650
    int32_t offset;
1651
    uint32_t base;
1652
    int op;
1653
    int l1;
1654

    
1655
    base = s->pc;
1656
    op = (insn >> 8) & 0xf;
1657
    offset = (int8_t)insn;
1658
    if (offset == 0) {
1659
        offset = ldsw_code(s->pc);
1660
        s->pc += 2;
1661
    } else if (offset == -1) {
1662
        offset = read_im32(s);
1663
    }
1664
    if (op == 1) {
1665
        /* bsr */
1666
        gen_push(s, tcg_const_i32(s->pc));
1667
    }
1668
    gen_flush_cc_op(s);
1669
    if (op > 1) {
1670
        /* Bcc */
1671
        l1 = gen_new_label();
1672
        gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
1673
        gen_jmp_tb(s, 1, base + offset);
1674
        gen_set_label(l1);
1675
        gen_jmp_tb(s, 0, s->pc);
1676
    } else {
1677
        /* Unconditional branch.  */
1678
        gen_jmp_tb(s, 0, base + offset);
1679
    }
1680
}
1681

    
1682
DISAS_INSN(moveq)
1683
{
1684
    uint32_t val;
1685

    
1686
    val = (int8_t)insn;
1687
    tcg_gen_movi_i32(DREG(insn, 9), val);
1688
    gen_logic_cc(s, tcg_const_i32(val));
1689
}
1690

    
1691
DISAS_INSN(mvzs)
1692
{
1693
    int opsize;
1694
    TCGv src;
1695
    TCGv reg;
1696

    
1697
    if (insn & 0x40)
1698
        opsize = OS_WORD;
1699
    else
1700
        opsize = OS_BYTE;
1701
    SRC_EA(src, opsize, (insn & 0x80) == 0, NULL);
1702
    reg = DREG(insn, 9);
1703
    tcg_gen_mov_i32(reg, src);
1704
    gen_logic_cc(s, src);
1705
}
1706

    
1707
DISAS_INSN(or)
1708
{
1709
    TCGv reg;
1710
    TCGv dest;
1711
    TCGv src;
1712
    TCGv addr;
1713

    
1714
    reg = DREG(insn, 9);
1715
    dest = tcg_temp_new();
1716
    if (insn & 0x100) {
1717
        SRC_EA(src, OS_LONG, 0, &addr);
1718
        tcg_gen_or_i32(dest, src, reg);
1719
        DEST_EA(insn, OS_LONG, dest, &addr);
1720
    } else {
1721
        SRC_EA(src, OS_LONG, 0, NULL);
1722
        tcg_gen_or_i32(dest, src, reg);
1723
        tcg_gen_mov_i32(reg, dest);
1724
    }
1725
    gen_logic_cc(s, dest);
1726
}
1727

    
1728
DISAS_INSN(suba)
1729
{
1730
    TCGv src;
1731
    TCGv reg;
1732

    
1733
    SRC_EA(src, OS_LONG, 0, NULL);
1734
    reg = AREG(insn, 9);
1735
    tcg_gen_sub_i32(reg, reg, src);
1736
}
1737

    
1738
DISAS_INSN(subx)
1739
{
1740
    TCGv reg;
1741
    TCGv src;
1742

    
1743
    gen_flush_flags(s);
1744
    reg = DREG(insn, 9);
1745
    src = DREG(insn, 0);
1746
    gen_helper_subx_cc(reg, cpu_env, reg, src);
1747
}
1748

    
1749
DISAS_INSN(mov3q)
1750
{
1751
    TCGv src;
1752
    int val;
1753

    
1754
    val = (insn >> 9) & 7;
1755
    if (val == 0)
1756
        val = -1;
1757
    src = tcg_const_i32(val);
1758
    gen_logic_cc(s, src);
1759
    DEST_EA(insn, OS_LONG, src, NULL);
1760
}
1761

    
1762
DISAS_INSN(cmp)
1763
{
1764
    int op;
1765
    TCGv src;
1766
    TCGv reg;
1767
    TCGv dest;
1768
    int opsize;
1769

    
1770
    op = (insn >> 6) & 3;
1771
    switch (op) {
1772
    case 0: /* cmp.b */
1773
        opsize = OS_BYTE;
1774
        s->cc_op = CC_OP_CMPB;
1775
        break;
1776
    case 1: /* cmp.w */
1777
        opsize = OS_WORD;
1778
        s->cc_op = CC_OP_CMPW;
1779
        break;
1780
    case 2: /* cmp.l */
1781
        opsize = OS_LONG;
1782
        s->cc_op = CC_OP_SUB;
1783
        break;
1784
    default:
1785
        abort();
1786
    }
1787
    SRC_EA(src, opsize, 1, NULL);
1788
    reg = DREG(insn, 9);
1789
    dest = tcg_temp_new();
1790
    tcg_gen_sub_i32(dest, reg, src);
1791
    gen_update_cc_add(dest, src);
1792
}
1793

    
1794
DISAS_INSN(cmpa)
1795
{
1796
    int opsize;
1797
    TCGv src;
1798
    TCGv reg;
1799
    TCGv dest;
1800

    
1801
    if (insn & 0x100) {
1802
        opsize = OS_LONG;
1803
    } else {
1804
        opsize = OS_WORD;
1805
    }
1806
    SRC_EA(src, opsize, 1, NULL);
1807
    reg = AREG(insn, 9);
1808
    dest = tcg_temp_new();
1809
    tcg_gen_sub_i32(dest, reg, src);
1810
    gen_update_cc_add(dest, src);
1811
    s->cc_op = CC_OP_SUB;
1812
}
1813

    
1814
DISAS_INSN(eor)
1815
{
1816
    TCGv src;
1817
    TCGv reg;
1818
    TCGv dest;
1819
    TCGv addr;
1820

    
1821
    SRC_EA(src, OS_LONG, 0, &addr);
1822
    reg = DREG(insn, 9);
1823
    dest = tcg_temp_new();
1824
    tcg_gen_xor_i32(dest, src, reg);
1825
    gen_logic_cc(s, dest);
1826
    DEST_EA(insn, OS_LONG, dest, &addr);
1827
}
1828

    
1829
DISAS_INSN(and)
1830
{
1831
    TCGv src;
1832
    TCGv reg;
1833
    TCGv dest;
1834
    TCGv addr;
1835

    
1836
    reg = DREG(insn, 9);
1837
    dest = tcg_temp_new();
1838
    if (insn & 0x100) {
1839
        SRC_EA(src, OS_LONG, 0, &addr);
1840
        tcg_gen_and_i32(dest, src, reg);
1841
        DEST_EA(insn, OS_LONG, dest, &addr);
1842
    } else {
1843
        SRC_EA(src, OS_LONG, 0, NULL);
1844
        tcg_gen_and_i32(dest, src, reg);
1845
        tcg_gen_mov_i32(reg, dest);
1846
    }
1847
    gen_logic_cc(s, dest);
1848
}
1849

    
1850
DISAS_INSN(adda)
1851
{
1852
    TCGv src;
1853
    TCGv reg;
1854

    
1855
    SRC_EA(src, OS_LONG, 0, NULL);
1856
    reg = AREG(insn, 9);
1857
    tcg_gen_add_i32(reg, reg, src);
1858
}
1859

    
1860
DISAS_INSN(addx)
1861
{
1862
    TCGv reg;
1863
    TCGv src;
1864

    
1865
    gen_flush_flags(s);
1866
    reg = DREG(insn, 9);
1867
    src = DREG(insn, 0);
1868
    gen_helper_addx_cc(reg, cpu_env, reg, src);
1869
    s->cc_op = CC_OP_FLAGS;
1870
}
1871

    
1872
/* TODO: This could be implemented without helper functions.  */
1873
DISAS_INSN(shift_im)
1874
{
1875
    TCGv reg;
1876
    int tmp;
1877
    TCGv shift;
1878

    
1879
    reg = DREG(insn, 0);
1880
    tmp = (insn >> 9) & 7;
1881
    if (tmp == 0)
1882
        tmp = 8;
1883
    shift = tcg_const_i32(tmp);
1884
    /* No need to flush flags becuse we know we will set C flag.  */
1885
    if (insn & 0x100) {
1886
        gen_helper_shl_cc(reg, cpu_env, reg, shift);
1887
    } else {
1888
        if (insn & 8) {
1889
            gen_helper_shr_cc(reg, cpu_env, reg, shift);
1890
        } else {
1891
            gen_helper_sar_cc(reg, cpu_env, reg, shift);
1892
        }
1893
    }
1894
    s->cc_op = CC_OP_SHIFT;
1895
}
1896

    
1897
DISAS_INSN(shift_reg)
1898
{
1899
    TCGv reg;
1900
    TCGv shift;
1901

    
1902
    reg = DREG(insn, 0);
1903
    shift = DREG(insn, 9);
1904
    /* Shift by zero leaves C flag unmodified.   */
1905
    gen_flush_flags(s);
1906
    if (insn & 0x100) {
1907
        gen_helper_shl_cc(reg, cpu_env, reg, shift);
1908
    } else {
1909
        if (insn & 8) {
1910
            gen_helper_shr_cc(reg, cpu_env, reg, shift);
1911
        } else {
1912
            gen_helper_sar_cc(reg, cpu_env, reg, shift);
1913
        }
1914
    }
1915
    s->cc_op = CC_OP_SHIFT;
1916
}
1917

    
1918
DISAS_INSN(ff1)
1919
{
1920
    TCGv reg;
1921
    reg = DREG(insn, 0);
1922
    gen_logic_cc(s, reg);
1923
    gen_helper_ff1(reg, reg);
1924
}
1925

    
1926
static TCGv gen_get_sr(DisasContext *s)
1927
{
1928
    TCGv ccr;
1929
    TCGv sr;
1930

    
1931
    ccr = gen_get_ccr(s);
1932
    sr = tcg_temp_new();
1933
    tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
1934
    tcg_gen_or_i32(sr, sr, ccr);
1935
    return sr;
1936
}
1937

    
1938
DISAS_INSN(strldsr)
1939
{
1940
    uint16_t ext;
1941
    uint32_t addr;
1942

    
1943
    addr = s->pc - 2;
1944
    ext = lduw_code(s->pc);
1945
    s->pc += 2;
1946
    if (ext != 0x46FC) {
1947
        gen_exception(s, addr, EXCP_UNSUPPORTED);
1948
        return;
1949
    }
1950
    ext = lduw_code(s->pc);
1951
    s->pc += 2;
1952
    if (IS_USER(s) || (ext & SR_S) == 0) {
1953
        gen_exception(s, addr, EXCP_PRIVILEGE);
1954
        return;
1955
    }
1956
    gen_push(s, gen_get_sr(s));
1957
    gen_set_sr_im(s, ext, 0);
1958
}
1959

    
1960
DISAS_INSN(move_from_sr)
1961
{
1962
    TCGv reg;
1963
    TCGv sr;
1964

    
1965
    if (IS_USER(s)) {
1966
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1967
        return;
1968
    }
1969
    sr = gen_get_sr(s);
1970
    reg = DREG(insn, 0);
1971
    gen_partset_reg(OS_WORD, reg, sr);
1972
}
1973

    
1974
DISAS_INSN(move_to_sr)
1975
{
1976
    if (IS_USER(s)) {
1977
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1978
        return;
1979
    }
1980
    gen_set_sr(s, insn, 0);
1981
    gen_lookup_tb(s);
1982
}
1983

    
1984
DISAS_INSN(move_from_usp)
1985
{
1986
    if (IS_USER(s)) {
1987
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1988
        return;
1989
    }
1990
    /* TODO: Implement USP.  */
1991
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
1992
}
1993

    
1994
DISAS_INSN(move_to_usp)
1995
{
1996
    if (IS_USER(s)) {
1997
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1998
        return;
1999
    }
2000
    /* TODO: Implement USP.  */
2001
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2002
}
2003

    
2004
DISAS_INSN(halt)
2005
{
2006
    gen_exception(s, s->pc, EXCP_HALT_INSN);
2007
}
2008

    
2009
DISAS_INSN(stop)
2010
{
2011
    uint16_t ext;
2012

    
2013
    if (IS_USER(s)) {
2014
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2015
        return;
2016
    }
2017

    
2018
    ext = lduw_code(s->pc);
2019
    s->pc += 2;
2020

    
2021
    gen_set_sr_im(s, ext, 0);
2022
    tcg_gen_movi_i32(QREG_HALTED, 1);
2023
    gen_exception(s, s->pc, EXCP_HLT);
2024
}
2025

    
2026
DISAS_INSN(rte)
2027
{
2028
    if (IS_USER(s)) {
2029
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2030
        return;
2031
    }
2032
    gen_exception(s, s->pc - 2, EXCP_RTE);
2033
}
2034

    
2035
DISAS_INSN(movec)
2036
{
2037
    uint16_t ext;
2038
    TCGv reg;
2039

    
2040
    if (IS_USER(s)) {
2041
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2042
        return;
2043
    }
2044

    
2045
    ext = lduw_code(s->pc);
2046
    s->pc += 2;
2047

    
2048
    if (ext & 0x8000) {
2049
        reg = AREG(ext, 12);
2050
    } else {
2051
        reg = DREG(ext, 12);
2052
    }
2053
    gen_helper_movec(cpu_env, tcg_const_i32(ext & 0xfff), reg);
2054
    gen_lookup_tb(s);
2055
}
2056

    
2057
DISAS_INSN(intouch)
2058
{
2059
    if (IS_USER(s)) {
2060
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2061
        return;
2062
    }
2063
    /* ICache fetch.  Implement as no-op.  */
2064
}
2065

    
2066
DISAS_INSN(cpushl)
2067
{
2068
    if (IS_USER(s)) {
2069
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2070
        return;
2071
    }
2072
    /* Cache push/invalidate.  Implement as no-op.  */
2073
}
2074

    
2075
DISAS_INSN(wddata)
2076
{
2077
    gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2078
}
2079

    
2080
DISAS_INSN(wdebug)
2081
{
2082
    if (IS_USER(s)) {
2083
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2084
        return;
2085
    }
2086
    /* TODO: Implement wdebug.  */
2087
    qemu_assert(0, "WDEBUG not implemented");
2088
}
2089

    
2090
DISAS_INSN(trap)
2091
{
2092
    gen_exception(s, s->pc - 2, EXCP_TRAP0 + (insn & 0xf));
2093
}
2094

    
2095
/* ??? FP exceptions are not implemented.  Most exceptions are deferred until
2096
   immediately before the next FP instruction is executed.  */
2097
DISAS_INSN(fpu)
2098
{
2099
    uint16_t ext;
2100
    int32_t offset;
2101
    int opmode;
2102
    TCGv_i64 src;
2103
    TCGv_i64 dest;
2104
    TCGv_i64 res;
2105
    TCGv tmp32;
2106
    int round;
2107
    int set_dest;
2108
    int opsize;
2109

    
2110
    ext = lduw_code(s->pc);
2111
    s->pc += 2;
2112
    opmode = ext & 0x7f;
2113
    switch ((ext >> 13) & 7) {
2114
    case 0: case 2:
2115
        break;
2116
    case 1:
2117
        goto undef;
2118
    case 3: /* fmove out */
2119
        src = FREG(ext, 7);
2120
        tmp32 = tcg_temp_new_i32();
2121
        /* fmove */
2122
        /* ??? TODO: Proper behavior on overflow.  */
2123
        switch ((ext >> 10) & 7) {
2124
        case 0:
2125
            opsize = OS_LONG;
2126
            gen_helper_f64_to_i32(tmp32, cpu_env, src);
2127
            break;
2128
        case 1:
2129
            opsize = OS_SINGLE;
2130
            gen_helper_f64_to_f32(tmp32, cpu_env, src);
2131
            break;
2132
        case 4:
2133
            opsize = OS_WORD;
2134
            gen_helper_f64_to_i32(tmp32, cpu_env, src);
2135
            break;
2136
        case 5: /* OS_DOUBLE */
2137
            tcg_gen_mov_i32(tmp32, AREG(insn, 0));
2138
            switch ((insn >> 3) & 7) {
2139
            case 2:
2140
            case 3:
2141
                break;
2142
            case 4:
2143
                tcg_gen_addi_i32(tmp32, tmp32, -8);
2144
                break;
2145
            case 5:
2146
                offset = ldsw_code(s->pc);
2147
                s->pc += 2;
2148
                tcg_gen_addi_i32(tmp32, tmp32, offset);
2149
                break;
2150
            default:
2151
                goto undef;
2152
            }
2153
            gen_store64(s, tmp32, src);
2154
            switch ((insn >> 3) & 7) {
2155
            case 3:
2156
                tcg_gen_addi_i32(tmp32, tmp32, 8);
2157
                tcg_gen_mov_i32(AREG(insn, 0), tmp32);
2158
                break;
2159
            case 4:
2160
                tcg_gen_mov_i32(AREG(insn, 0), tmp32);
2161
                break;
2162
            }
2163
            tcg_temp_free_i32(tmp32);
2164
            return;
2165
        case 6:
2166
            opsize = OS_BYTE;
2167
            gen_helper_f64_to_i32(tmp32, cpu_env, src);
2168
            break;
2169
        default:
2170
            goto undef;
2171
        }
2172
        DEST_EA(insn, opsize, tmp32, NULL);
2173
        tcg_temp_free_i32(tmp32);
2174
        return;
2175
    case 4: /* fmove to control register.  */
2176
        switch ((ext >> 10) & 7) {
2177
        case 4: /* FPCR */
2178
            /* Not implemented.  Ignore writes.  */
2179
            break;
2180
        case 1: /* FPIAR */
2181
        case 2: /* FPSR */
2182
        default:
2183
            cpu_abort(NULL, "Unimplemented: fmove to control %d",
2184
                      (ext >> 10) & 7);
2185
        }
2186
        break;
2187
    case 5: /* fmove from control register.  */
2188
        switch ((ext >> 10) & 7) {
2189
        case 4: /* FPCR */
2190
            /* Not implemented.  Always return zero.  */
2191
            tmp32 = tcg_const_i32(0);
2192
            break;
2193
        case 1: /* FPIAR */
2194
        case 2: /* FPSR */
2195
        default:
2196
            cpu_abort(NULL, "Unimplemented: fmove from control %d",
2197
                      (ext >> 10) & 7);
2198
            goto undef;
2199
        }
2200
        DEST_EA(insn, OS_LONG, tmp32, NULL);
2201
        break;
2202
    case 6: /* fmovem */
2203
    case 7:
2204
        {
2205
            TCGv addr;
2206
            uint16_t mask;
2207
            int i;
2208
            if ((ext & 0x1f00) != 0x1000 || (ext & 0xff) == 0)
2209
                goto undef;
2210
            tmp32 = gen_lea(s, insn, OS_LONG);
2211
            if (IS_NULL_QREG(tmp32)) {
2212
                gen_addr_fault(s);
2213
                return;
2214
            }
2215
            addr = tcg_temp_new_i32();
2216
            tcg_gen_mov_i32(addr, tmp32);
2217
            mask = 0x80;
2218
            for (i = 0; i < 8; i++) {
2219
                if (ext & mask) {
2220
                    s->is_mem = 1;
2221
                    dest = FREG(i, 0);
2222
                    if (ext & (1 << 13)) {
2223
                        /* store */
2224
                        tcg_gen_qemu_stf64(dest, addr, IS_USER(s));
2225
                    } else {
2226
                        /* load */
2227
                        tcg_gen_qemu_ldf64(dest, addr, IS_USER(s));
2228
                    }
2229
                    if (ext & (mask - 1))
2230
                        tcg_gen_addi_i32(addr, addr, 8);
2231
                }
2232
                mask >>= 1;
2233
            }
2234
            tcg_temp_free_i32(addr);
2235
        }
2236
        return;
2237
    }
2238
    if (ext & (1 << 14)) {
2239
        /* Source effective address.  */
2240
        switch ((ext >> 10) & 7) {
2241
        case 0: opsize = OS_LONG; break;
2242
        case 1: opsize = OS_SINGLE; break;
2243
        case 4: opsize = OS_WORD; break;
2244
        case 5: opsize = OS_DOUBLE; break;
2245
        case 6: opsize = OS_BYTE; break;
2246
        default:
2247
            goto undef;
2248
        }
2249
        if (opsize == OS_DOUBLE) {
2250
            tmp32 = tcg_temp_new_i32();
2251
            tcg_gen_mov_i32(tmp32, AREG(insn, 0));
2252
            switch ((insn >> 3) & 7) {
2253
            case 2:
2254
            case 3:
2255
                break;
2256
            case 4:
2257
                tcg_gen_addi_i32(tmp32, tmp32, -8);
2258
                break;
2259
            case 5:
2260
                offset = ldsw_code(s->pc);
2261
                s->pc += 2;
2262
                tcg_gen_addi_i32(tmp32, tmp32, offset);
2263
                break;
2264
            case 7:
2265
                offset = ldsw_code(s->pc);
2266
                offset += s->pc - 2;
2267
                s->pc += 2;
2268
                tcg_gen_addi_i32(tmp32, tmp32, offset);
2269
                break;
2270
            default:
2271
                goto undef;
2272
            }
2273
            src = gen_load64(s, tmp32);
2274
            switch ((insn >> 3) & 7) {
2275
            case 3:
2276
                tcg_gen_addi_i32(tmp32, tmp32, 8);
2277
                tcg_gen_mov_i32(AREG(insn, 0), tmp32);
2278
                break;
2279
            case 4:
2280
                tcg_gen_mov_i32(AREG(insn, 0), tmp32);
2281
                break;
2282
            }
2283
            tcg_temp_free_i32(tmp32);
2284
        } else {
2285
            SRC_EA(tmp32, opsize, 1, NULL);
2286
            src = tcg_temp_new_i64();
2287
            switch (opsize) {
2288
            case OS_LONG:
2289
            case OS_WORD:
2290
            case OS_BYTE:
2291
                gen_helper_i32_to_f64(src, cpu_env, tmp32);
2292
                break;
2293
            case OS_SINGLE:
2294
                gen_helper_f32_to_f64(src, cpu_env, tmp32);
2295
                break;
2296
            }
2297
        }
2298
    } else {
2299
        /* Source register.  */
2300
        src = FREG(ext, 10);
2301
    }
2302
    dest = FREG(ext, 7);
2303
    res = tcg_temp_new_i64();
2304
    if (opmode != 0x3a)
2305
        tcg_gen_mov_f64(res, dest);
2306
    round = 1;
2307
    set_dest = 1;
2308
    switch (opmode) {
2309
    case 0: case 0x40: case 0x44: /* fmove */
2310
        tcg_gen_mov_f64(res, src);
2311
        break;
2312
    case 1: /* fint */
2313
        gen_helper_iround_f64(res, cpu_env, src);
2314
        round = 0;
2315
        break;
2316
    case 3: /* fintrz */
2317
        gen_helper_itrunc_f64(res, cpu_env, src);
2318
        round = 0;
2319
        break;
2320
    case 4: case 0x41: case 0x45: /* fsqrt */
2321
        gen_helper_sqrt_f64(res, cpu_env, src);
2322
        break;
2323
    case 0x18: case 0x58: case 0x5c: /* fabs */
2324
        gen_helper_abs_f64(res, src);
2325
        break;
2326
    case 0x1a: case 0x5a: case 0x5e: /* fneg */
2327
        gen_helper_chs_f64(res, src);
2328
        break;
2329
    case 0x20: case 0x60: case 0x64: /* fdiv */
2330
        gen_helper_div_f64(res, cpu_env, res, src);
2331
        break;
2332
    case 0x22: case 0x62: case 0x66: /* fadd */
2333
        gen_helper_add_f64(res, cpu_env, res, src);
2334
        break;
2335
    case 0x23: case 0x63: case 0x67: /* fmul */
2336
        gen_helper_mul_f64(res, cpu_env, res, src);
2337
        break;
2338
    case 0x28: case 0x68: case 0x6c: /* fsub */
2339
        gen_helper_sub_f64(res, cpu_env, res, src);
2340
        break;
2341
    case 0x38: /* fcmp */
2342
        gen_helper_sub_cmp_f64(res, cpu_env, res, src);
2343
        set_dest = 0;
2344
        round = 0;
2345
        break;
2346
    case 0x3a: /* ftst */
2347
        tcg_gen_mov_f64(res, src);
2348
        set_dest = 0;
2349
        round = 0;
2350
        break;
2351
    default:
2352
        goto undef;
2353
    }
2354
    if (ext & (1 << 14)) {
2355
        tcg_temp_free_i64(src);
2356
    }
2357
    if (round) {
2358
        if (opmode & 0x40) {
2359
            if ((opmode & 0x4) != 0)
2360
                round = 0;
2361
        } else if ((s->fpcr & M68K_FPCR_PREC) == 0) {
2362
            round = 0;
2363
        }
2364
    }
2365
    if (round) {
2366
        TCGv tmp = tcg_temp_new_i32();
2367
        gen_helper_f64_to_f32(tmp, cpu_env, res);
2368
        gen_helper_f32_to_f64(res, cpu_env, tmp);
2369
        tcg_temp_free_i32(tmp);
2370
    }
2371
    tcg_gen_mov_f64(QREG_FP_RESULT, res);
2372
    if (set_dest) {
2373
        tcg_gen_mov_f64(dest, res);
2374
    }
2375
    tcg_temp_free_i64(res);
2376
    return;
2377
undef:
2378
    /* FIXME: Is this right for offset addressing modes?  */
2379
    s->pc -= 2;
2380
    disas_undef_fpu(s, insn);
2381
}
2382

    
2383
DISAS_INSN(fbcc)
2384
{
2385
    uint32_t offset;
2386
    uint32_t addr;
2387
    TCGv flag;
2388
    int l1;
2389

    
2390
    addr = s->pc;
2391
    offset = ldsw_code(s->pc);
2392
    s->pc += 2;
2393
    if (insn & (1 << 6)) {
2394
        offset = (offset << 16) | lduw_code(s->pc);
2395
        s->pc += 2;
2396
    }
2397

    
2398
    l1 = gen_new_label();
2399
    /* TODO: Raise BSUN exception.  */
2400
    flag = tcg_temp_new();
2401
    gen_helper_compare_f64(flag, cpu_env, QREG_FP_RESULT);
2402
    /* Jump to l1 if condition is true.  */
2403
    switch (insn & 0xf) {
2404
    case 0: /* f */
2405
        break;
2406
    case 1: /* eq (=0) */
2407
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(0), l1);
2408
        break;
2409
    case 2: /* ogt (=1) */
2410
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(1), l1);
2411
        break;
2412
    case 3: /* oge (=0 or =1) */
2413
        tcg_gen_brcond_i32(TCG_COND_LEU, flag, tcg_const_i32(1), l1);
2414
        break;
2415
    case 4: /* olt (=-1) */
2416
        tcg_gen_brcond_i32(TCG_COND_LT, flag, tcg_const_i32(0), l1);
2417
        break;
2418
    case 5: /* ole (=-1 or =0) */
2419
        tcg_gen_brcond_i32(TCG_COND_LE, flag, tcg_const_i32(0), l1);
2420
        break;
2421
    case 6: /* ogl (=-1 or =1) */
2422
        tcg_gen_andi_i32(flag, flag, 1);
2423
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(0), l1);
2424
        break;
2425
    case 7: /* or (=2) */
2426
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(2), l1);
2427
        break;
2428
    case 8: /* un (<2) */
2429
        tcg_gen_brcond_i32(TCG_COND_LT, flag, tcg_const_i32(2), l1);
2430
        break;
2431
    case 9: /* ueq (=0 or =2) */
2432
        tcg_gen_andi_i32(flag, flag, 1);
2433
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(0), l1);
2434
        break;
2435
    case 10: /* ugt (>0) */
2436
        tcg_gen_brcond_i32(TCG_COND_GT, flag, tcg_const_i32(0), l1);
2437
        break;
2438
    case 11: /* uge (>=0) */
2439
        tcg_gen_brcond_i32(TCG_COND_GE, flag, tcg_const_i32(0), l1);
2440
        break;
2441
    case 12: /* ult (=-1 or =2) */
2442
        tcg_gen_brcond_i32(TCG_COND_GEU, flag, tcg_const_i32(2), l1);
2443
        break;
2444
    case 13: /* ule (!=1) */
2445
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(1), l1);
2446
        break;
2447
    case 14: /* ne (!=0) */
2448
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(0), l1);
2449
        break;
2450
    case 15: /* t */
2451
        tcg_gen_br(l1);
2452
        break;
2453
    }
2454
    gen_jmp_tb(s, 0, s->pc);
2455
    gen_set_label(l1);
2456
    gen_jmp_tb(s, 1, addr + offset);
2457
}
2458

    
2459
DISAS_INSN(frestore)
2460
{
2461
    /* TODO: Implement frestore.  */
2462
    qemu_assert(0, "FRESTORE not implemented");
2463
}
2464

    
2465
DISAS_INSN(fsave)
2466
{
2467
    /* TODO: Implement fsave.  */
2468
    qemu_assert(0, "FSAVE not implemented");
2469
}
2470

    
2471
static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
2472
{
2473
    TCGv tmp = tcg_temp_new();
2474
    if (s->env->macsr & MACSR_FI) {
2475
        if (upper)
2476
            tcg_gen_andi_i32(tmp, val, 0xffff0000);
2477
        else
2478
            tcg_gen_shli_i32(tmp, val, 16);
2479
    } else if (s->env->macsr & MACSR_SU) {
2480
        if (upper)
2481
            tcg_gen_sari_i32(tmp, val, 16);
2482
        else
2483
            tcg_gen_ext16s_i32(tmp, val);
2484
    } else {
2485
        if (upper)
2486
            tcg_gen_shri_i32(tmp, val, 16);
2487
        else
2488
            tcg_gen_ext16u_i32(tmp, val);
2489
    }
2490
    return tmp;
2491
}
2492

    
2493
static void gen_mac_clear_flags(void)
2494
{
2495
    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
2496
                     ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
2497
}
2498

    
2499
DISAS_INSN(mac)
2500
{
2501
    TCGv rx;
2502
    TCGv ry;
2503
    uint16_t ext;
2504
    int acc;
2505
    TCGv tmp;
2506
    TCGv addr;
2507
    TCGv loadval;
2508
    int dual;
2509
    TCGv saved_flags;
2510

    
2511
    if (!s->done_mac) {
2512
        s->mactmp = tcg_temp_new_i64();
2513
        s->done_mac = 1;
2514
    }
2515

    
2516
    ext = lduw_code(s->pc);
2517
    s->pc += 2;
2518

    
2519
    acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
2520
    dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
2521
    if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
2522
        disas_undef(s, insn);
2523
        return;
2524
    }
2525
    if (insn & 0x30) {
2526
        /* MAC with load.  */
2527
        tmp = gen_lea(s, insn, OS_LONG);
2528
        addr = tcg_temp_new();
2529
        tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
2530
        /* Load the value now to ensure correct exception behavior.
2531
           Perform writeback after reading the MAC inputs.  */
2532
        loadval = gen_load(s, OS_LONG, addr, 0);
2533

    
2534
        acc ^= 1;
2535
        rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
2536
        ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
2537
    } else {
2538
        loadval = addr = NULL_QREG;
2539
        rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
2540
        ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2541
    }
2542

    
2543
    gen_mac_clear_flags();
2544
#if 0
2545
    l1 = -1;
2546
    /* Disabled because conditional branches clobber temporary vars.  */
2547
    if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
2548
        /* Skip the multiply if we know we will ignore it.  */
2549
        l1 = gen_new_label();
2550
        tmp = tcg_temp_new();
2551
        tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
2552
        gen_op_jmp_nz32(tmp, l1);
2553
    }
2554
#endif
2555

    
2556
    if ((ext & 0x0800) == 0) {
2557
        /* Word.  */
2558
        rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
2559
        ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
2560
    }
2561
    if (s->env->macsr & MACSR_FI) {
2562
        gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
2563
    } else {
2564
        if (s->env->macsr & MACSR_SU)
2565
            gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
2566
        else
2567
            gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
2568
        switch ((ext >> 9) & 3) {
2569
        case 1:
2570
            tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
2571
            break;
2572
        case 3:
2573
            tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
2574
            break;
2575
        }
2576
    }
2577

    
2578
    if (dual) {
2579
        /* Save the overflow flag from the multiply.  */
2580
        saved_flags = tcg_temp_new();
2581
        tcg_gen_mov_i32(saved_flags, QREG_MACSR);
2582
    } else {
2583
        saved_flags = NULL_QREG;
2584
    }
2585

    
2586
#if 0
2587
    /* Disabled because conditional branches clobber temporary vars.  */
2588
    if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
2589
        /* Skip the accumulate if the value is already saturated.  */
2590
        l1 = gen_new_label();
2591
        tmp = tcg_temp_new();
2592
        gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
2593
        gen_op_jmp_nz32(tmp, l1);
2594
    }
2595
#endif
2596

    
2597
    if (insn & 0x100)
2598
        tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
2599
    else
2600
        tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
2601

    
2602
    if (s->env->macsr & MACSR_FI)
2603
        gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
2604
    else if (s->env->macsr & MACSR_SU)
2605
        gen_helper_macsats(cpu_env, tcg_const_i32(acc));
2606
    else
2607
        gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
2608

    
2609
#if 0
2610
    /* Disabled because conditional branches clobber temporary vars.  */
2611
    if (l1 != -1)
2612
        gen_set_label(l1);
2613
#endif
2614

    
2615
    if (dual) {
2616
        /* Dual accumulate variant.  */
2617
        acc = (ext >> 2) & 3;
2618
        /* Restore the overflow flag from the multiplier.  */
2619
        tcg_gen_mov_i32(QREG_MACSR, saved_flags);
2620
#if 0
2621
        /* Disabled because conditional branches clobber temporary vars.  */
2622
        if ((s->env->macsr & MACSR_OMC) != 0) {
2623
            /* Skip the accumulate if the value is already saturated.  */
2624
            l1 = gen_new_label();
2625
            tmp = tcg_temp_new();
2626
            gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
2627
            gen_op_jmp_nz32(tmp, l1);
2628
        }
2629
#endif
2630
        if (ext & 2)
2631
            tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
2632
        else
2633
            tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
2634
        if (s->env->macsr & MACSR_FI)
2635
            gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
2636
        else if (s->env->macsr & MACSR_SU)
2637
            gen_helper_macsats(cpu_env, tcg_const_i32(acc));
2638
        else
2639
            gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
2640
#if 0
2641
        /* Disabled because conditional branches clobber temporary vars.  */
2642
        if (l1 != -1)
2643
            gen_set_label(l1);
2644
#endif
2645
    }
2646
    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
2647

    
2648
    if (insn & 0x30) {
2649
        TCGv rw;
2650
        rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
2651
        tcg_gen_mov_i32(rw, loadval);
2652
        /* FIXME: Should address writeback happen with the masked or
2653
           unmasked value?  */
2654
        switch ((insn >> 3) & 7) {
2655
        case 3: /* Post-increment.  */
2656
            tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
2657
            break;
2658
        case 4: /* Pre-decrement.  */
2659
            tcg_gen_mov_i32(AREG(insn, 0), addr);
2660
        }
2661
    }
2662
}
2663

    
2664
DISAS_INSN(from_mac)
2665
{
2666
    TCGv rx;
2667
    TCGv_i64 acc;
2668
    int accnum;
2669

    
2670
    rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2671
    accnum = (insn >> 9) & 3;
2672
    acc = MACREG(accnum);
2673
    if (s->env->macsr & MACSR_FI) {
2674
        gen_helper_get_macf(rx, cpu_env, acc);
2675
    } else if ((s->env->macsr & MACSR_OMC) == 0) {
2676
        tcg_gen_trunc_i64_i32(rx, acc);
2677
    } else if (s->env->macsr & MACSR_SU) {
2678
        gen_helper_get_macs(rx, acc);
2679
    } else {
2680
        gen_helper_get_macu(rx, acc);
2681
    }
2682
    if (insn & 0x40) {
2683
        tcg_gen_movi_i64(acc, 0);
2684
        tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
2685
    }
2686
}
2687

    
2688
DISAS_INSN(move_mac)
2689
{
2690
    /* FIXME: This can be done without a helper.  */
2691
    int src;
2692
    TCGv dest;
2693
    src = insn & 3;
2694
    dest = tcg_const_i32((insn >> 9) & 3);
2695
    gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
2696
    gen_mac_clear_flags();
2697
    gen_helper_mac_set_flags(cpu_env, dest);
2698
}
2699

    
2700
DISAS_INSN(from_macsr)
2701
{
2702
    TCGv reg;
2703

    
2704
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2705
    tcg_gen_mov_i32(reg, QREG_MACSR);
2706
}
2707

    
2708
DISAS_INSN(from_mask)
2709
{
2710
    TCGv reg;
2711
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2712
    tcg_gen_mov_i32(reg, QREG_MAC_MASK);
2713
}
2714

    
2715
DISAS_INSN(from_mext)
2716
{
2717
    TCGv reg;
2718
    TCGv acc;
2719
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2720
    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
2721
    if (s->env->macsr & MACSR_FI)
2722
        gen_helper_get_mac_extf(reg, cpu_env, acc);
2723
    else
2724
        gen_helper_get_mac_exti(reg, cpu_env, acc);
2725
}
2726

    
2727
DISAS_INSN(macsr_to_ccr)
2728
{
2729
    tcg_gen_movi_i32(QREG_CC_X, 0);
2730
    tcg_gen_andi_i32(QREG_CC_DEST, QREG_MACSR, 0xf);
2731
    s->cc_op = CC_OP_FLAGS;
2732
}
2733

    
2734
DISAS_INSN(to_mac)
2735
{
2736
    TCGv_i64 acc;
2737
    TCGv val;
2738
    int accnum;
2739
    accnum = (insn >> 9) & 3;
2740
    acc = MACREG(accnum);
2741
    SRC_EA(val, OS_LONG, 0, NULL);
2742
    if (s->env->macsr & MACSR_FI) {
2743
        tcg_gen_ext_i32_i64(acc, val);
2744
        tcg_gen_shli_i64(acc, acc, 8);
2745
    } else if (s->env->macsr & MACSR_SU) {
2746
        tcg_gen_ext_i32_i64(acc, val);
2747
    } else {
2748
        tcg_gen_extu_i32_i64(acc, val);
2749
    }
2750
    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
2751
    gen_mac_clear_flags();
2752
    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
2753
}
2754

    
2755
DISAS_INSN(to_macsr)
2756
{
2757
    TCGv val;
2758
    SRC_EA(val, OS_LONG, 0, NULL);
2759
    gen_helper_set_macsr(cpu_env, val);
2760
    gen_lookup_tb(s);
2761
}
2762

    
2763
DISAS_INSN(to_mask)
2764
{
2765
    TCGv val;
2766
    SRC_EA(val, OS_LONG, 0, NULL);
2767
    tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
2768
}
2769

    
2770
DISAS_INSN(to_mext)
2771
{
2772
    TCGv val;
2773
    TCGv acc;
2774
    SRC_EA(val, OS_LONG, 0, NULL);
2775
    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
2776
    if (s->env->macsr & MACSR_FI)
2777
        gen_helper_set_mac_extf(cpu_env, val, acc);
2778
    else if (s->env->macsr & MACSR_SU)
2779
        gen_helper_set_mac_exts(cpu_env, val, acc);
2780
    else
2781
        gen_helper_set_mac_extu(cpu_env, val, acc);
2782
}
2783

    
2784
static disas_proc opcode_table[65536];
2785

    
2786
static void
2787
register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
2788
{
2789
  int i;
2790
  int from;
2791
  int to;
2792

    
2793
  /* Sanity check.  All set bits must be included in the mask.  */
2794
  if (opcode & ~mask) {
2795
      fprintf(stderr,
2796
              "qemu internal error: bogus opcode definition %04x/%04x\n",
2797
              opcode, mask);
2798
      abort();
2799
  }
2800
  /* This could probably be cleverer.  For now just optimize the case where
2801
     the top bits are known.  */
2802
  /* Find the first zero bit in the mask.  */
2803
  i = 0x8000;
2804
  while ((i & mask) != 0)
2805
      i >>= 1;
2806
  /* Iterate over all combinations of this and lower bits.  */
2807
  if (i == 0)
2808
      i = 1;
2809
  else
2810
      i <<= 1;
2811
  from = opcode & ~(i - 1);
2812
  to = from + i;
2813
  for (i = from; i < to; i++) {
2814
      if ((i & mask) == opcode)
2815
          opcode_table[i] = proc;
2816
  }
2817
}
2818

    
2819
/* Register m68k opcode handlers.  Order is important.
2820
   Later insn override earlier ones.  */
2821
void register_m68k_insns (CPUM68KState *env)
2822
{
2823
#define INSN(name, opcode, mask, feature) do { \
2824
    if (m68k_feature(env, M68K_FEATURE_##feature)) \
2825
        register_opcode(disas_##name, 0x##opcode, 0x##mask); \
2826
    } while(0)
2827
    INSN(undef,     0000, 0000, CF_ISA_A);
2828
    INSN(arith_im,  0080, fff8, CF_ISA_A);
2829
    INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
2830
    INSN(bitop_reg, 0100, f1c0, CF_ISA_A);
2831
    INSN(bitop_reg, 0140, f1c0, CF_ISA_A);
2832
    INSN(bitop_reg, 0180, f1c0, CF_ISA_A);
2833
    INSN(bitop_reg, 01c0, f1c0, CF_ISA_A);
2834
    INSN(arith_im,  0280, fff8, CF_ISA_A);
2835
    INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
2836
    INSN(arith_im,  0480, fff8, CF_ISA_A);
2837
    INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
2838
    INSN(arith_im,  0680, fff8, CF_ISA_A);
2839
    INSN(bitop_im,  0800, ffc0, CF_ISA_A);
2840
    INSN(bitop_im,  0840, ffc0, CF_ISA_A);
2841
    INSN(bitop_im,  0880, ffc0, CF_ISA_A);
2842
    INSN(bitop_im,  08c0, ffc0, CF_ISA_A);
2843
    INSN(arith_im,  0a80, fff8, CF_ISA_A);
2844
    INSN(arith_im,  0c00, ff38, CF_ISA_A);
2845
    INSN(move,      1000, f000, CF_ISA_A);
2846
    INSN(move,      2000, f000, CF_ISA_A);
2847
    INSN(move,      3000, f000, CF_ISA_A);
2848
    INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
2849
    INSN(negx,      4080, fff8, CF_ISA_A);
2850
    INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
2851
    INSN(lea,       41c0, f1c0, CF_ISA_A);
2852
    INSN(clr,       4200, ff00, CF_ISA_A);
2853
    INSN(undef,     42c0, ffc0, CF_ISA_A);
2854
    INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
2855
    INSN(neg,       4480, fff8, CF_ISA_A);
2856
    INSN(move_to_ccr, 44c0, ffc0, CF_ISA_A);
2857
    INSN(not,       4680, fff8, CF_ISA_A);
2858
    INSN(move_to_sr, 46c0, ffc0, CF_ISA_A);
2859
    INSN(pea,       4840, ffc0, CF_ISA_A);
2860
    INSN(swap,      4840, fff8, CF_ISA_A);
2861
    INSN(movem,     48c0, fbc0, CF_ISA_A);
2862
    INSN(ext,       4880, fff8, CF_ISA_A);
2863
    INSN(ext,       48c0, fff8, CF_ISA_A);
2864
    INSN(ext,       49c0, fff8, CF_ISA_A);
2865
    INSN(tst,       4a00, ff00, CF_ISA_A);
2866
    INSN(tas,       4ac0, ffc0, CF_ISA_B);
2867
    INSN(halt,      4ac8, ffff, CF_ISA_A);
2868
    INSN(pulse,     4acc, ffff, CF_ISA_A);
2869
    INSN(illegal,   4afc, ffff, CF_ISA_A);
2870
    INSN(mull,      4c00, ffc0, CF_ISA_A);
2871
    INSN(divl,      4c40, ffc0, CF_ISA_A);
2872
    INSN(sats,      4c80, fff8, CF_ISA_B);
2873
    INSN(trap,      4e40, fff0, CF_ISA_A);
2874
    INSN(link,      4e50, fff8, CF_ISA_A);
2875
    INSN(unlk,      4e58, fff8, CF_ISA_A);
2876
    INSN(move_to_usp, 4e60, fff8, USP);
2877
    INSN(move_from_usp, 4e68, fff8, USP);
2878
    INSN(nop,       4e71, ffff, CF_ISA_A);
2879
    INSN(stop,      4e72, ffff, CF_ISA_A);
2880
    INSN(rte,       4e73, ffff, CF_ISA_A);
2881
    INSN(rts,       4e75, ffff, CF_ISA_A);
2882
    INSN(movec,     4e7b, ffff, CF_ISA_A);
2883
    INSN(jump,      4e80, ffc0, CF_ISA_A);
2884
    INSN(jump,      4ec0, ffc0, CF_ISA_A);
2885
    INSN(addsubq,   5180, f1c0, CF_ISA_A);
2886
    INSN(scc,       50c0, f0f8, CF_ISA_A);
2887
    INSN(addsubq,   5080, f1c0, CF_ISA_A);
2888
    INSN(tpf,       51f8, fff8, CF_ISA_A);
2889

    
2890
    /* Branch instructions.  */
2891
    INSN(branch,    6000, f000, CF_ISA_A);
2892
    /* Disable long branch instructions, then add back the ones we want.  */
2893
    INSN(undef,     60ff, f0ff, CF_ISA_A); /* All long branches.  */
2894
    INSN(branch,    60ff, f0ff, CF_ISA_B);
2895
    INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
2896
    INSN(branch,    60ff, ffff, BRAL);
2897

    
2898
    INSN(moveq,     7000, f100, CF_ISA_A);
2899
    INSN(mvzs,      7100, f100, CF_ISA_B);
2900
    INSN(or,        8000, f000, CF_ISA_A);
2901
    INSN(divw,      80c0, f0c0, CF_ISA_A);
2902
    INSN(addsub,    9000, f000, CF_ISA_A);
2903
    INSN(subx,      9180, f1f8, CF_ISA_A);
2904
    INSN(suba,      91c0, f1c0, CF_ISA_A);
2905

    
2906
    INSN(undef_mac, a000, f000, CF_ISA_A);
2907
    INSN(mac,       a000, f100, CF_EMAC);
2908
    INSN(from_mac,  a180, f9b0, CF_EMAC);
2909
    INSN(move_mac,  a110, f9fc, CF_EMAC);
2910
    INSN(from_macsr,a980, f9f0, CF_EMAC);
2911
    INSN(from_mask, ad80, fff0, CF_EMAC);
2912
    INSN(from_mext, ab80, fbf0, CF_EMAC);
2913
    INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
2914
    INSN(to_mac,    a100, f9c0, CF_EMAC);
2915
    INSN(to_macsr,  a900, ffc0, CF_EMAC);
2916
    INSN(to_mext,   ab00, fbc0, CF_EMAC);
2917
    INSN(to_mask,   ad00, ffc0, CF_EMAC);
2918

    
2919
    INSN(mov3q,     a140, f1c0, CF_ISA_B);
2920
    INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
2921
    INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
2922
    INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
2923
    INSN(cmp,       b080, f1c0, CF_ISA_A);
2924
    INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
2925
    INSN(eor,       b180, f1c0, CF_ISA_A);
2926
    INSN(and,       c000, f000, CF_ISA_A);
2927
    INSN(mulw,      c0c0, f0c0, CF_ISA_A);
2928
    INSN(addsub,    d000, f000, CF_ISA_A);
2929
    INSN(addx,      d180, f1f8, CF_ISA_A);
2930
    INSN(adda,      d1c0, f1c0, CF_ISA_A);
2931
    INSN(shift_im,  e080, f0f0, CF_ISA_A);
2932
    INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
2933
    INSN(undef_fpu, f000, f000, CF_ISA_A);
2934
    INSN(fpu,       f200, ffc0, CF_FPU);
2935
    INSN(fbcc,      f280, ffc0, CF_FPU);
2936
    INSN(frestore,  f340, ffc0, CF_FPU);
2937
    INSN(fsave,     f340, ffc0, CF_FPU);
2938
    INSN(intouch,   f340, ffc0, CF_ISA_A);
2939
    INSN(cpushl,    f428, ff38, CF_ISA_A);
2940
    INSN(wddata,    fb00, ff00, CF_ISA_A);
2941
    INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
2942
#undef INSN
2943
}
2944

    
2945
/* ??? Some of this implementation is not exception safe.  We should always
2946
   write back the result to memory before setting the condition codes.  */
2947
static void disas_m68k_insn(CPUState * env, DisasContext *s)
2948
{
2949
    uint16_t insn;
2950

    
2951
    insn = lduw_code(s->pc);
2952
    s->pc += 2;
2953

    
2954
    opcode_table[insn](s, insn);
2955
}
2956

    
2957
/* generate intermediate code for basic block 'tb'.  */
2958
static inline void
2959
gen_intermediate_code_internal(CPUState *env, TranslationBlock *tb,
2960
                               int search_pc)
2961
{
2962
    DisasContext dc1, *dc = &dc1;
2963
    uint16_t *gen_opc_end;
2964
    CPUBreakpoint *bp;
2965
    int j, lj;
2966
    target_ulong pc_start;
2967
    int pc_offset;
2968
    int num_insns;
2969
    int max_insns;
2970

    
2971
    /* generate intermediate code */
2972
    pc_start = tb->pc;
2973

    
2974
    dc->tb = tb;
2975

    
2976
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2977

    
2978
    dc->env = env;
2979
    dc->is_jmp = DISAS_NEXT;
2980
    dc->pc = pc_start;
2981
    dc->cc_op = CC_OP_DYNAMIC;
2982
    dc->singlestep_enabled = env->singlestep_enabled;
2983
    dc->fpcr = env->fpcr;
2984
    dc->user = (env->sr & SR_S) == 0;
2985
    dc->is_mem = 0;
2986
    dc->done_mac = 0;
2987
    lj = -1;
2988
    num_insns = 0;
2989
    max_insns = tb->cflags & CF_COUNT_MASK;
2990
    if (max_insns == 0)
2991
        max_insns = CF_COUNT_MASK;
2992

    
2993
    gen_icount_start();
2994
    do {
2995
        pc_offset = dc->pc - pc_start;
2996
        gen_throws_exception = NULL;
2997
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
2998
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
2999
                if (bp->pc == dc->pc) {
3000
                    gen_exception(dc, dc->pc, EXCP_DEBUG);
3001
                    dc->is_jmp = DISAS_JUMP;
3002
                    break;
3003
                }
3004
            }
3005
            if (dc->is_jmp)
3006
                break;
3007
        }
3008
        if (search_pc) {
3009
            j = gen_opc_ptr - gen_opc_buf;
3010
            if (lj < j) {
3011
                lj++;
3012
                while (lj < j)
3013
                    gen_opc_instr_start[lj++] = 0;
3014
            }
3015
            gen_opc_pc[lj] = dc->pc;
3016
            gen_opc_instr_start[lj] = 1;
3017
            gen_opc_icount[lj] = num_insns;
3018
        }
3019
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
3020
            gen_io_start();
3021
        dc->insn_pc = dc->pc;
3022
        disas_m68k_insn(env, dc);
3023
        num_insns++;
3024
    } while (!dc->is_jmp && gen_opc_ptr < gen_opc_end &&
3025
             !env->singlestep_enabled &&
3026
             !singlestep &&
3027
             (pc_offset) < (TARGET_PAGE_SIZE - 32) &&
3028
             num_insns < max_insns);
3029

    
3030
    if (tb->cflags & CF_LAST_IO)
3031
        gen_io_end();
3032
    if (unlikely(env->singlestep_enabled)) {
3033
        /* Make sure the pc is updated, and raise a debug exception.  */
3034
        if (!dc->is_jmp) {
3035
            gen_flush_cc_op(dc);
3036
            tcg_gen_movi_i32(QREG_PC, dc->pc);
3037
        }
3038
        gen_helper_raise_exception(tcg_const_i32(EXCP_DEBUG));
3039
    } else {
3040
        switch(dc->is_jmp) {
3041
        case DISAS_NEXT:
3042
            gen_flush_cc_op(dc);
3043
            gen_jmp_tb(dc, 0, dc->pc);
3044
            break;
3045
        default:
3046
        case DISAS_JUMP:
3047
        case DISAS_UPDATE:
3048
            gen_flush_cc_op(dc);
3049
            /* indicate that the hash table must be used to find the next TB */
3050
            tcg_gen_exit_tb(0);
3051
            break;
3052
        case DISAS_TB_JUMP:
3053
            /* nothing more to generate */
3054
            break;
3055
        }
3056
    }
3057
    gen_icount_end(tb, num_insns);
3058
    *gen_opc_ptr = INDEX_op_end;
3059

    
3060
#ifdef DEBUG_DISAS
3061
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
3062
        qemu_log("----------------\n");
3063
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
3064
        log_target_disas(pc_start, dc->pc - pc_start, 0);
3065
        qemu_log("\n");
3066
    }
3067
#endif
3068
    if (search_pc) {
3069
        j = gen_opc_ptr - gen_opc_buf;
3070
        lj++;
3071
        while (lj <= j)
3072
            gen_opc_instr_start[lj++] = 0;
3073
    } else {
3074
        tb->size = dc->pc - pc_start;
3075
        tb->icount = num_insns;
3076
    }
3077

    
3078
    //optimize_flags();
3079
    //expand_target_qops();
3080
}
3081

    
3082
void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
3083
{
3084
    gen_intermediate_code_internal(env, tb, 0);
3085
}
3086

    
3087
void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
3088
{
3089
    gen_intermediate_code_internal(env, tb, 1);
3090
}
3091

    
3092
void cpu_dump_state(CPUState *env, FILE *f, fprintf_function cpu_fprintf,
3093
                    int flags)
3094
{
3095
    int i;
3096
    uint16_t sr;
3097
    CPU_DoubleU u;
3098
    for (i = 0; i < 8; i++)
3099
      {
3100
        u.d = env->fregs[i];
3101
        cpu_fprintf (f, "D%d = %08x   A%d = %08x   F%d = %08x%08x (%12g)\n",
3102
                     i, env->dregs[i], i, env->aregs[i],
3103
                     i, u.l.upper, u.l.lower, *(double *)&u.d);
3104
      }
3105
    cpu_fprintf (f, "PC = %08x   ", env->pc);
3106
    sr = env->sr;
3107
    cpu_fprintf (f, "SR = %04x %c%c%c%c%c ", sr, (sr & 0x10) ? 'X' : '-',
3108
                 (sr & CCF_N) ? 'N' : '-', (sr & CCF_Z) ? 'Z' : '-',
3109
                 (sr & CCF_V) ? 'V' : '-', (sr & CCF_C) ? 'C' : '-');
3110
    cpu_fprintf (f, "FPRESULT = %12g\n", *(double *)&env->fp_result);
3111
}
3112

    
3113
void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
3114
{
3115
    env->pc = gen_opc_pc[pc_pos];
3116
}