Statistics
| Branch: | Revision:

root / target-m68k / translate.c @ 56aebc89

History | View | Annotate | Download (78.5 kB)

1
/*
2
 *  m68k translation
3
 *
4
 *  Copyright (c) 2005-2007 CodeSourcery
5
 *  Written by Paul Brook
6
 *
7
 * This library is free software; you can redistribute it and/or
8
 * modify it under the terms of the GNU Lesser General Public
9
 * License as published by the Free Software Foundation; either
10
 * version 2 of the License, or (at your option) any later version.
11
 *
12
 * This library is distributed in the hope that it will be useful,
13
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
 * General Public License for more details.
16
 *
17
 * You should have received a copy of the GNU Lesser General Public
18
 * License along with this library; if not, write to the Free Software
19
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
20
 */
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26
#include <assert.h>
27

    
28
#include "config.h"
29
#include "cpu.h"
30
#include "exec-all.h"
31
#include "disas.h"
32
#include "tcg-op.h"
33
#include "qemu-log.h"
34

    
35
#define GEN_HELPER 1
36
#include "helpers.h"
37

    
38
//#define DEBUG_DISPATCH 1
39

    
40
/* Fake floating point.  */
41
#define TCG_TYPE_F32 TCG_TYPE_I32
42
#define TCG_TYPE_F64 TCG_TYPE_I64
43
#define tcg_gen_mov_f64 tcg_gen_mov_i64
44
#define tcg_gen_qemu_ldf32 tcg_gen_qemu_ld32u
45
#define tcg_gen_qemu_ldf64 tcg_gen_qemu_ld64
46
#define tcg_gen_qemu_stf32 tcg_gen_qemu_st32
47
#define tcg_gen_qemu_stf64 tcg_gen_qemu_st64
48
#define gen_helper_pack_32_f32 tcg_gen_mov_i32
49
#define gen_helper_pack_f32_32 tcg_gen_mov_i32
50

    
51
#define DEFO32(name, offset) static TCGv QREG_##name;
52
#define DEFO64(name, offset) static TCGv QREG_##name;
53
#define DEFF64(name, offset) static TCGv QREG_##name;
54
#include "qregs.def"
55
#undef DEFO32
56
#undef DEFO64
57
#undef DEFF64
58

    
59
static TCGv cpu_env;
60

    
61
static char cpu_reg_names[3*8*3 + 5*4];
62
static TCGv cpu_dregs[8];
63
static TCGv cpu_aregs[8];
64
static TCGv cpu_fregs[8];
65
static TCGv cpu_macc[4];
66

    
67
#define DREG(insn, pos) cpu_dregs[((insn) >> (pos)) & 7]
68
#define AREG(insn, pos) cpu_aregs[((insn) >> (pos)) & 7]
69
#define FREG(insn, pos) cpu_fregs[((insn) >> (pos)) & 7]
70
#define MACREG(acc) cpu_macc[acc]
71
#define QREG_SP cpu_aregs[7]
72

    
73
static TCGv NULL_QREG;
74
#define IS_NULL_QREG(t) (GET_TCGV(t) == GET_TCGV(NULL_QREG))
75
/* Used to distinguish stores from bad addressing modes.  */
76
static TCGv store_dummy;
77

    
78
#include "gen-icount.h"
79

    
80
void m68k_tcg_init(void)
81
{
82
    char *p;
83
    int i;
84

    
85
#define DEFO32(name,  offset) QREG_##name = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, offsetof(CPUState, offset), #name);
86
#define DEFO64(name,  offset) QREG_##name = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0, offsetof(CPUState, offset), #name);
87
#define DEFF64(name,  offset) DEFO64(name, offset)
88
#include "qregs.def"
89
#undef DEFO32
90
#undef DEFO64
91
#undef DEFF64
92

    
93
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
94

    
95
    p = cpu_reg_names;
96
    for (i = 0; i < 8; i++) {
97
        sprintf(p, "D%d", i);
98
        cpu_dregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
99
                                          offsetof(CPUM68KState, dregs[i]), p);
100
        p += 3;
101
        sprintf(p, "A%d", i);
102
        cpu_aregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
103
                                          offsetof(CPUM68KState, aregs[i]), p);
104
        p += 3;
105
        sprintf(p, "F%d", i);
106
        cpu_fregs[i] = tcg_global_mem_new(TCG_TYPE_F64, TCG_AREG0,
107
                                          offsetof(CPUM68KState, fregs[i]), p);
108
        p += 3;
109
    }
110
    for (i = 0; i < 4; i++) {
111
        sprintf(p, "ACC%d", i);
112
        cpu_macc[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
113
                                         offsetof(CPUM68KState, macc[i]), p);
114
        p += 5;
115
    }
116

    
117
    NULL_QREG = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, -4, "NULL");
118
    store_dummy = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, -8, "NULL");
119

    
120
#define DEF_HELPER(name, ret, args) \
121
    tcg_register_helper(HELPER(name), #name);
122
#include "helpers.h"
123
}
124

    
125
static inline void qemu_assert(int cond, const char *msg)
126
{
127
    if (!cond) {
128
        fprintf (stderr, "badness: %s\n", msg);
129
        abort();
130
    }
131
}
132

    
133
/* internal defines */
134
typedef struct DisasContext {
135
    CPUM68KState *env;
136
    target_ulong insn_pc; /* Start of the current instruction.  */
137
    target_ulong pc;
138
    int is_jmp;
139
    int cc_op;
140
    int user;
141
    uint32_t fpcr;
142
    struct TranslationBlock *tb;
143
    int singlestep_enabled;
144
    int is_mem;
145
    TCGv mactmp;
146
} DisasContext;
147

    
148
#define DISAS_JUMP_NEXT 4
149

    
150
#if defined(CONFIG_USER_ONLY)
151
#define IS_USER(s) 1
152
#else
153
#define IS_USER(s) s->user
154
#endif
155

    
156
/* XXX: move that elsewhere */
157
/* ??? Fix exceptions.  */
158
static void *gen_throws_exception;
159
#define gen_last_qop NULL
160

    
161
#define OS_BYTE 0
162
#define OS_WORD 1
163
#define OS_LONG 2
164
#define OS_SINGLE 4
165
#define OS_DOUBLE 5
166

    
167
typedef void (*disas_proc)(DisasContext *, uint16_t);
168

    
169
#ifdef DEBUG_DISPATCH
170
#define DISAS_INSN(name) \
171
  static void real_disas_##name (DisasContext *s, uint16_t insn); \
172
  static void disas_##name (DisasContext *s, uint16_t insn) { \
173
    if (logfile) fprintf(logfile, "Dispatch " #name "\n"); \
174
    real_disas_##name(s, insn); } \
175
  static void real_disas_##name (DisasContext *s, uint16_t insn)
176
#else
177
#define DISAS_INSN(name) \
178
  static void disas_##name (DisasContext *s, uint16_t insn)
179
#endif
180

    
181
/* FIXME: Remove this.  */
182
#define gen_im32(val) tcg_const_i32(val)
183

    
184
#define QMODE_I32 TCG_TYPE_I32
185
#define QMODE_I64 TCG_TYPE_I64
186
#define QMODE_F32 TCG_TYPE_F32
187
#define QMODE_F64 TCG_TYPE_F64
188
static inline TCGv gen_new_qreg(int mode)
189
{
190
    return tcg_temp_new(mode);
191
}
192

    
193
/* Generate a load from the specified address.  Narrow values are
194
   sign extended to full register width.  */
195
static inline TCGv gen_load(DisasContext * s, int opsize, TCGv addr, int sign)
196
{
197
    TCGv tmp;
198
    int index = IS_USER(s);
199
    s->is_mem = 1;
200
    switch(opsize) {
201
    case OS_BYTE:
202
        tmp = gen_new_qreg(QMODE_I32);
203
        if (sign)
204
            tcg_gen_qemu_ld8s(tmp, addr, index);
205
        else
206
            tcg_gen_qemu_ld8u(tmp, addr, index);
207
        break;
208
    case OS_WORD:
209
        tmp = gen_new_qreg(QMODE_I32);
210
        if (sign)
211
            tcg_gen_qemu_ld16s(tmp, addr, index);
212
        else
213
            tcg_gen_qemu_ld16u(tmp, addr, index);
214
        break;
215
    case OS_LONG:
216
        tmp = gen_new_qreg(QMODE_I32);
217
        tcg_gen_qemu_ld32u(tmp, addr, index);
218
        break;
219
    case OS_SINGLE:
220
        tmp = gen_new_qreg(QMODE_F32);
221
        tcg_gen_qemu_ldf32(tmp, addr, index);
222
        break;
223
    case OS_DOUBLE:
224
        tmp  = gen_new_qreg(QMODE_F64);
225
        tcg_gen_qemu_ldf64(tmp, addr, index);
226
        break;
227
    default:
228
        qemu_assert(0, "bad load size");
229
    }
230
    gen_throws_exception = gen_last_qop;
231
    return tmp;
232
}
233

    
234
/* Generate a store.  */
235
static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val)
236
{
237
    int index = IS_USER(s);
238
    s->is_mem = 1;
239
    switch(opsize) {
240
    case OS_BYTE:
241
        tcg_gen_qemu_st8(val, addr, index);
242
        break;
243
    case OS_WORD:
244
        tcg_gen_qemu_st16(val, addr, index);
245
        break;
246
    case OS_LONG:
247
        tcg_gen_qemu_st32(val, addr, index);
248
        break;
249
    case OS_SINGLE:
250
        tcg_gen_qemu_stf32(val, addr, index);
251
        break;
252
    case OS_DOUBLE:
253
        tcg_gen_qemu_stf64(val, addr, index);
254
        break;
255
    default:
256
        qemu_assert(0, "bad store size");
257
    }
258
    gen_throws_exception = gen_last_qop;
259
}
260

    
261
typedef enum {
262
    EA_STORE,
263
    EA_LOADU,
264
    EA_LOADS
265
} ea_what;
266

    
267
/* Generate an unsigned load if VAL is 0 a signed load if val is -1,
268
   otherwise generate a store.  */
269
static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
270
                     ea_what what)
271
{
272
    if (what == EA_STORE) {
273
        gen_store(s, opsize, addr, val);
274
        return store_dummy;
275
    } else {
276
        return gen_load(s, opsize, addr, what == EA_LOADS);
277
    }
278
}
279

    
280
/* Read a 32-bit immediate constant.  */
281
static inline uint32_t read_im32(DisasContext *s)
282
{
283
    uint32_t im;
284
    im = ((uint32_t)lduw_code(s->pc)) << 16;
285
    s->pc += 2;
286
    im |= lduw_code(s->pc);
287
    s->pc += 2;
288
    return im;
289
}
290

    
291
/* Calculate and address index.  */
292
static TCGv gen_addr_index(uint16_t ext, TCGv tmp)
293
{
294
    TCGv add;
295
    int scale;
296

    
297
    add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
298
    if ((ext & 0x800) == 0) {
299
        tcg_gen_ext16s_i32(tmp, add);
300
        add = tmp;
301
    }
302
    scale = (ext >> 9) & 3;
303
    if (scale != 0) {
304
        tcg_gen_shli_i32(tmp, add, scale);
305
        add = tmp;
306
    }
307
    return add;
308
}
309

    
310
/* Handle a base + index + displacement effective addresss.
311
   A NULL_QREG base means pc-relative.  */
312
static TCGv gen_lea_indexed(DisasContext *s, int opsize, TCGv base)
313
{
314
    uint32_t offset;
315
    uint16_t ext;
316
    TCGv add;
317
    TCGv tmp;
318
    uint32_t bd, od;
319

    
320
    offset = s->pc;
321
    ext = lduw_code(s->pc);
322
    s->pc += 2;
323

    
324
    if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
325
        return NULL_QREG;
326

    
327
    if (ext & 0x100) {
328
        /* full extension word format */
329
        if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
330
            return NULL_QREG;
331

    
332
        if ((ext & 0x30) > 0x10) {
333
            /* base displacement */
334
            if ((ext & 0x30) == 0x20) {
335
                bd = (int16_t)lduw_code(s->pc);
336
                s->pc += 2;
337
            } else {
338
                bd = read_im32(s);
339
            }
340
        } else {
341
            bd = 0;
342
        }
343
        tmp = gen_new_qreg(QMODE_I32);
344
        if ((ext & 0x44) == 0) {
345
            /* pre-index */
346
            add = gen_addr_index(ext, tmp);
347
        } else {
348
            add = NULL_QREG;
349
        }
350
        if ((ext & 0x80) == 0) {
351
            /* base not suppressed */
352
            if (IS_NULL_QREG(base)) {
353
                base = gen_im32(offset + bd);
354
                bd = 0;
355
            }
356
            if (!IS_NULL_QREG(add)) {
357
                tcg_gen_add_i32(tmp, add, base);
358
                add = tmp;
359
            } else {
360
                add = base;
361
            }
362
        }
363
        if (!IS_NULL_QREG(add)) {
364
            if (bd != 0) {
365
                tcg_gen_addi_i32(tmp, add, bd);
366
                add = tmp;
367
            }
368
        } else {
369
            add = gen_im32(bd);
370
        }
371
        if ((ext & 3) != 0) {
372
            /* memory indirect */
373
            base = gen_load(s, OS_LONG, add, 0);
374
            if ((ext & 0x44) == 4) {
375
                add = gen_addr_index(ext, tmp);
376
                tcg_gen_add_i32(tmp, add, base);
377
                add = tmp;
378
            } else {
379
                add = base;
380
            }
381
            if ((ext & 3) > 1) {
382
                /* outer displacement */
383
                if ((ext & 3) == 2) {
384
                    od = (int16_t)lduw_code(s->pc);
385
                    s->pc += 2;
386
                } else {
387
                    od = read_im32(s);
388
                }
389
            } else {
390
                od = 0;
391
            }
392
            if (od != 0) {
393
                tcg_gen_addi_i32(tmp, add, od);
394
                add = tmp;
395
            }
396
        }
397
    } else {
398
        /* brief extension word format */
399
        tmp = gen_new_qreg(QMODE_I32);
400
        add = gen_addr_index(ext, tmp);
401
        if (!IS_NULL_QREG(base)) {
402
            tcg_gen_add_i32(tmp, add, base);
403
            if ((int8_t)ext)
404
                tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
405
        } else {
406
            tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
407
        }
408
        add = tmp;
409
    }
410
    return add;
411
}
412

    
413
/* Update the CPU env CC_OP state.  */
414
static inline void gen_flush_cc_op(DisasContext *s)
415
{
416
    if (s->cc_op != CC_OP_DYNAMIC)
417
        tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
418
}
419

    
420
/* Evaluate all the CC flags.  */
421
static inline void gen_flush_flags(DisasContext *s)
422
{
423
    if (s->cc_op == CC_OP_FLAGS)
424
        return;
425
    gen_flush_cc_op(s);
426
    gen_helper_flush_flags(cpu_env, QREG_CC_OP);
427
    s->cc_op = CC_OP_FLAGS;
428
}
429

    
430
static void gen_logic_cc(DisasContext *s, TCGv val)
431
{
432
    tcg_gen_mov_i32(QREG_CC_DEST, val);
433
    s->cc_op = CC_OP_LOGIC;
434
}
435

    
436
static void gen_update_cc_add(TCGv dest, TCGv src)
437
{
438
    tcg_gen_mov_i32(QREG_CC_DEST, dest);
439
    tcg_gen_mov_i32(QREG_CC_SRC, src);
440
}
441

    
442
static inline int opsize_bytes(int opsize)
443
{
444
    switch (opsize) {
445
    case OS_BYTE: return 1;
446
    case OS_WORD: return 2;
447
    case OS_LONG: return 4;
448
    case OS_SINGLE: return 4;
449
    case OS_DOUBLE: return 8;
450
    default:
451
        qemu_assert(0, "bad operand size");
452
        return 0;
453
    }
454
}
455

    
456
/* Assign value to a register.  If the width is less than the register width
457
   only the low part of the register is set.  */
458
static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
459
{
460
    TCGv tmp;
461
    switch (opsize) {
462
    case OS_BYTE:
463
        tcg_gen_andi_i32(reg, reg, 0xffffff00);
464
        tmp = gen_new_qreg(QMODE_I32);
465
        tcg_gen_ext8u_i32(tmp, val);
466
        tcg_gen_or_i32(reg, reg, tmp);
467
        break;
468
    case OS_WORD:
469
        tcg_gen_andi_i32(reg, reg, 0xffff0000);
470
        tmp = gen_new_qreg(QMODE_I32);
471
        tcg_gen_ext16u_i32(tmp, val);
472
        tcg_gen_or_i32(reg, reg, tmp);
473
        break;
474
    case OS_LONG:
475
        tcg_gen_mov_i32(reg, val);
476
        break;
477
    case OS_SINGLE:
478
        gen_helper_pack_32_f32(reg, val);
479
        break;
480
    default:
481
        qemu_assert(0, "Bad operand size");
482
        break;
483
    }
484
}
485

    
486
/* Sign or zero extend a value.  */
487
static inline TCGv gen_extend(TCGv val, int opsize, int sign)
488
{
489
    TCGv tmp;
490

    
491
    switch (opsize) {
492
    case OS_BYTE:
493
        tmp = gen_new_qreg(QMODE_I32);
494
        if (sign)
495
            tcg_gen_ext8s_i32(tmp, val);
496
        else
497
            tcg_gen_ext8u_i32(tmp, val);
498
        break;
499
    case OS_WORD:
500
        tmp = gen_new_qreg(QMODE_I32);
501
        if (sign)
502
            tcg_gen_ext16s_i32(tmp, val);
503
        else
504
            tcg_gen_ext16u_i32(tmp, val);
505
        break;
506
    case OS_LONG:
507
        tmp = val;
508
        break;
509
    case OS_SINGLE:
510
        tmp = gen_new_qreg(QMODE_F32);
511
        gen_helper_pack_f32_32(tmp, val);
512
        break;
513
    default:
514
        qemu_assert(0, "Bad operand size");
515
    }
516
    return tmp;
517
}
518

    
519
/* Generate code for an "effective address".  Does not adjust the base
520
   register for autoincrememnt addressing modes.  */
521
static TCGv gen_lea(DisasContext *s, uint16_t insn, int opsize)
522
{
523
    TCGv reg;
524
    TCGv tmp;
525
    uint16_t ext;
526
    uint32_t offset;
527

    
528
    switch ((insn >> 3) & 7) {
529
    case 0: /* Data register direct.  */
530
    case 1: /* Address register direct.  */
531
        return NULL_QREG;
532
    case 2: /* Indirect register */
533
    case 3: /* Indirect postincrement.  */
534
        return AREG(insn, 0);
535
    case 4: /* Indirect predecrememnt.  */
536
        reg = AREG(insn, 0);
537
        tmp = gen_new_qreg(QMODE_I32);
538
        tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
539
        return tmp;
540
    case 5: /* Indirect displacement.  */
541
        reg = AREG(insn, 0);
542
        tmp = gen_new_qreg(QMODE_I32);
543
        ext = lduw_code(s->pc);
544
        s->pc += 2;
545
        tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
546
        return tmp;
547
    case 6: /* Indirect index + displacement.  */
548
        reg = AREG(insn, 0);
549
        return gen_lea_indexed(s, opsize, reg);
550
    case 7: /* Other */
551
        switch (insn & 7) {
552
        case 0: /* Absolute short.  */
553
            offset = ldsw_code(s->pc);
554
            s->pc += 2;
555
            return gen_im32(offset);
556
        case 1: /* Absolute long.  */
557
            offset = read_im32(s);
558
            return gen_im32(offset);
559
        case 2: /* pc displacement  */
560
            tmp = gen_new_qreg(QMODE_I32);
561
            offset = s->pc;
562
            offset += ldsw_code(s->pc);
563
            s->pc += 2;
564
            return gen_im32(offset);
565
        case 3: /* pc index+displacement.  */
566
            return gen_lea_indexed(s, opsize, NULL_QREG);
567
        case 4: /* Immediate.  */
568
        default:
569
            return NULL_QREG;
570
        }
571
    }
572
    /* Should never happen.  */
573
    return NULL_QREG;
574
}
575

    
576
/* Helper function for gen_ea. Reuse the computed address between the
577
   for read/write operands.  */
578
static inline TCGv gen_ea_once(DisasContext *s, uint16_t insn, int opsize,
579
                              TCGv val, TCGv *addrp, ea_what what)
580
{
581
    TCGv tmp;
582

    
583
    if (addrp && what == EA_STORE) {
584
        tmp = *addrp;
585
    } else {
586
        tmp = gen_lea(s, insn, opsize);
587
        if (IS_NULL_QREG(tmp))
588
            return tmp;
589
        if (addrp)
590
            *addrp = tmp;
591
    }
592
    return gen_ldst(s, opsize, tmp, val, what);
593
}
594

    
595
/* Generate code to load/store a value ito/from an EA.  If VAL > 0 this is
596
   a write otherwise it is a read (0 == sign extend, -1 == zero extend).
597
   ADDRP is non-null for readwrite operands.  */
598
static TCGv gen_ea(DisasContext *s, uint16_t insn, int opsize, TCGv val,
599
                   TCGv *addrp, ea_what what)
600
{
601
    TCGv reg;
602
    TCGv result;
603
    uint32_t offset;
604

    
605
    switch ((insn >> 3) & 7) {
606
    case 0: /* Data register direct.  */
607
        reg = DREG(insn, 0);
608
        if (what == EA_STORE) {
609
            gen_partset_reg(opsize, reg, val);
610
            return store_dummy;
611
        } else {
612
            return gen_extend(reg, opsize, what == EA_LOADS);
613
        }
614
    case 1: /* Address register direct.  */
615
        reg = AREG(insn, 0);
616
        if (what == EA_STORE) {
617
            tcg_gen_mov_i32(reg, val);
618
            return store_dummy;
619
        } else {
620
            return gen_extend(reg, opsize, what == EA_LOADS);
621
        }
622
    case 2: /* Indirect register */
623
        reg = AREG(insn, 0);
624
        return gen_ldst(s, opsize, reg, val, what);
625
    case 3: /* Indirect postincrement.  */
626
        reg = AREG(insn, 0);
627
        result = gen_ldst(s, opsize, reg, val, what);
628
        /* ??? This is not exception safe.  The instruction may still
629
           fault after this point.  */
630
        if (what == EA_STORE || !addrp)
631
            tcg_gen_addi_i32(reg, reg, opsize_bytes(opsize));
632
        return result;
633
    case 4: /* Indirect predecrememnt.  */
634
        {
635
            TCGv tmp;
636
            if (addrp && what == EA_STORE) {
637
                tmp = *addrp;
638
            } else {
639
                tmp = gen_lea(s, insn, opsize);
640
                if (IS_NULL_QREG(tmp))
641
                    return tmp;
642
                if (addrp)
643
                    *addrp = tmp;
644
            }
645
            result = gen_ldst(s, opsize, tmp, val, what);
646
            /* ??? This is not exception safe.  The instruction may still
647
               fault after this point.  */
648
            if (what == EA_STORE || !addrp) {
649
                reg = AREG(insn, 0);
650
                tcg_gen_mov_i32(reg, tmp);
651
            }
652
        }
653
        return result;
654
    case 5: /* Indirect displacement.  */
655
    case 6: /* Indirect index + displacement.  */
656
        return gen_ea_once(s, insn, opsize, val, addrp, what);
657
    case 7: /* Other */
658
        switch (insn & 7) {
659
        case 0: /* Absolute short.  */
660
        case 1: /* Absolute long.  */
661
        case 2: /* pc displacement  */
662
        case 3: /* pc index+displacement.  */
663
            return gen_ea_once(s, insn, opsize, val, addrp, what);
664
        case 4: /* Immediate.  */
665
            /* Sign extend values for consistency.  */
666
            switch (opsize) {
667
            case OS_BYTE:
668
                if (what == EA_LOADS)
669
                    offset = ldsb_code(s->pc + 1);
670
                else
671
                    offset = ldub_code(s->pc + 1);
672
                s->pc += 2;
673
                break;
674
            case OS_WORD:
675
                if (what == EA_LOADS)
676
                    offset = ldsw_code(s->pc);
677
                else
678
                    offset = lduw_code(s->pc);
679
                s->pc += 2;
680
                break;
681
            case OS_LONG:
682
                offset = read_im32(s);
683
                break;
684
            default:
685
                qemu_assert(0, "Bad immediate operand");
686
            }
687
            return tcg_const_i32(offset);
688
        default:
689
            return NULL_QREG;
690
        }
691
    }
692
    /* Should never happen.  */
693
    return NULL_QREG;
694
}
695

    
696
/* This generates a conditional branch, clobbering all temporaries.  */
697
static void gen_jmpcc(DisasContext *s, int cond, int l1)
698
{
699
    TCGv tmp;
700

    
701
    /* TODO: Optimize compare/branch pairs rather than always flushing
702
       flag state to CC_OP_FLAGS.  */
703
    gen_flush_flags(s);
704
    switch (cond) {
705
    case 0: /* T */
706
        tcg_gen_br(l1);
707
        break;
708
    case 1: /* F */
709
        break;
710
    case 2: /* HI (!C && !Z) */
711
        tmp = gen_new_qreg(QMODE_I32);
712
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
713
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
714
        break;
715
    case 3: /* LS (C || Z) */
716
        tmp = gen_new_qreg(QMODE_I32);
717
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
718
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
719
        break;
720
    case 4: /* CC (!C) */
721
        tmp = gen_new_qreg(QMODE_I32);
722
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
723
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
724
        break;
725
    case 5: /* CS (C) */
726
        tmp = gen_new_qreg(QMODE_I32);
727
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
728
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
729
        break;
730
    case 6: /* NE (!Z) */
731
        tmp = gen_new_qreg(QMODE_I32);
732
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
733
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
734
        break;
735
    case 7: /* EQ (Z) */
736
        tmp = gen_new_qreg(QMODE_I32);
737
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
738
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
739
        break;
740
    case 8: /* VC (!V) */
741
        tmp = gen_new_qreg(QMODE_I32);
742
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
743
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
744
        break;
745
    case 9: /* VS (V) */
746
        tmp = gen_new_qreg(QMODE_I32);
747
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
748
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
749
        break;
750
    case 10: /* PL (!N) */
751
        tmp = gen_new_qreg(QMODE_I32);
752
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
753
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
754
        break;
755
    case 11: /* MI (N) */
756
        tmp = gen_new_qreg(QMODE_I32);
757
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
758
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
759
        break;
760
    case 12: /* GE (!(N ^ V)) */
761
        tmp = gen_new_qreg(QMODE_I32);
762
        assert(CCF_V == (CCF_N >> 2));
763
        tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
764
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
765
        tcg_gen_andi_i32(tmp, tmp, CCF_V);
766
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
767
        break;
768
    case 13: /* LT (N ^ V) */
769
        tmp = gen_new_qreg(QMODE_I32);
770
        assert(CCF_V == (CCF_N >> 2));
771
        tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
772
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
773
        tcg_gen_andi_i32(tmp, tmp, CCF_V);
774
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
775
        break;
776
    case 14: /* GT (!(Z || (N ^ V))) */
777
        tmp = gen_new_qreg(QMODE_I32);
778
        assert(CCF_V == (CCF_N >> 2));
779
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
780
        tcg_gen_shri_i32(tmp, tmp, 2);
781
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
782
        tcg_gen_andi_i32(tmp, tmp, CCF_V | CCF_Z);
783
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
784
        break;
785
    case 15: /* LE (Z || (N ^ V)) */
786
        tmp = gen_new_qreg(QMODE_I32);
787
        assert(CCF_V == (CCF_N >> 2));
788
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
789
        tcg_gen_shri_i32(tmp, tmp, 2);
790
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
791
        tcg_gen_andi_i32(tmp, tmp, CCF_V | CCF_Z);
792
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
793
        break;
794
    default:
795
        /* Should ever happen.  */
796
        abort();
797
    }
798
}
799

    
800
DISAS_INSN(scc)
801
{
802
    int l1;
803
    int cond;
804
    TCGv reg;
805

    
806
    l1 = gen_new_label();
807
    cond = (insn >> 8) & 0xf;
808
    reg = DREG(insn, 0);
809
    tcg_gen_andi_i32(reg, reg, 0xffffff00);
810
    /* This is safe because we modify the reg directly, with no other values
811
       live.  */
812
    gen_jmpcc(s, cond ^ 1, l1);
813
    tcg_gen_ori_i32(reg, reg, 0xff);
814
    gen_set_label(l1);
815
}
816

    
817
/* Force a TB lookup after an instruction that changes the CPU state.  */
818
static void gen_lookup_tb(DisasContext *s)
819
{
820
    gen_flush_cc_op(s);
821
    tcg_gen_movi_i32(QREG_PC, s->pc);
822
    s->is_jmp = DISAS_UPDATE;
823
}
824

    
825
/* Generate a jump to an immediate address.  */
826
static void gen_jmp_im(DisasContext *s, uint32_t dest)
827
{
828
    gen_flush_cc_op(s);
829
    tcg_gen_movi_i32(QREG_PC, dest);
830
    s->is_jmp = DISAS_JUMP;
831
}
832

    
833
/* Generate a jump to the address in qreg DEST.  */
834
static void gen_jmp(DisasContext *s, TCGv dest)
835
{
836
    gen_flush_cc_op(s);
837
    tcg_gen_mov_i32(QREG_PC, dest);
838
    s->is_jmp = DISAS_JUMP;
839
}
840

    
841
static void gen_exception(DisasContext *s, uint32_t where, int nr)
842
{
843
    gen_flush_cc_op(s);
844
    gen_jmp_im(s, where);
845
    gen_helper_raise_exception(tcg_const_i32(nr));
846
}
847

    
848
static inline void gen_addr_fault(DisasContext *s)
849
{
850
    gen_exception(s, s->insn_pc, EXCP_ADDRESS);
851
}
852

    
853
#define SRC_EA(result, opsize, op_sign, addrp) do { \
854
    result = gen_ea(s, insn, opsize, NULL_QREG, addrp, op_sign ? EA_LOADS : EA_LOADU); \
855
    if (IS_NULL_QREG(result)) { \
856
        gen_addr_fault(s); \
857
        return; \
858
    } \
859
    } while (0)
860

    
861
#define DEST_EA(insn, opsize, val, addrp) do { \
862
    TCGv ea_result = gen_ea(s, insn, opsize, val, addrp, EA_STORE); \
863
    if (IS_NULL_QREG(ea_result)) { \
864
        gen_addr_fault(s); \
865
        return; \
866
    } \
867
    } while (0)
868

    
869
/* Generate a jump to an immediate address.  */
870
static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
871
{
872
    TranslationBlock *tb;
873

    
874
    tb = s->tb;
875
    if (unlikely(s->singlestep_enabled)) {
876
        gen_exception(s, dest, EXCP_DEBUG);
877
    } else if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
878
               (s->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
879
        tcg_gen_goto_tb(n);
880
        tcg_gen_movi_i32(QREG_PC, dest);
881
        tcg_gen_exit_tb((long)tb + n);
882
    } else {
883
        gen_jmp_im(s, dest);
884
        tcg_gen_exit_tb(0);
885
    }
886
    s->is_jmp = DISAS_TB_JUMP;
887
}
888

    
889
DISAS_INSN(undef_mac)
890
{
891
    gen_exception(s, s->pc - 2, EXCP_LINEA);
892
}
893

    
894
DISAS_INSN(undef_fpu)
895
{
896
    gen_exception(s, s->pc - 2, EXCP_LINEF);
897
}
898

    
899
DISAS_INSN(undef)
900
{
901
    gen_exception(s, s->pc - 2, EXCP_UNSUPPORTED);
902
    cpu_abort(cpu_single_env, "Illegal instruction: %04x @ %08x",
903
              insn, s->pc - 2);
904
}
905

    
906
DISAS_INSN(mulw)
907
{
908
    TCGv reg;
909
    TCGv tmp;
910
    TCGv src;
911
    int sign;
912

    
913
    sign = (insn & 0x100) != 0;
914
    reg = DREG(insn, 9);
915
    tmp = gen_new_qreg(QMODE_I32);
916
    if (sign)
917
        tcg_gen_ext16s_i32(tmp, reg);
918
    else
919
        tcg_gen_ext16u_i32(tmp, reg);
920
    SRC_EA(src, OS_WORD, sign, NULL);
921
    tcg_gen_mul_i32(tmp, tmp, src);
922
    tcg_gen_mov_i32(reg, tmp);
923
    /* Unlike m68k, coldfire always clears the overflow bit.  */
924
    gen_logic_cc(s, tmp);
925
}
926

    
927
DISAS_INSN(divw)
928
{
929
    TCGv reg;
930
    TCGv tmp;
931
    TCGv src;
932
    int sign;
933

    
934
    sign = (insn & 0x100) != 0;
935
    reg = DREG(insn, 9);
936
    if (sign) {
937
        tcg_gen_ext16s_i32(QREG_DIV1, reg);
938
    } else {
939
        tcg_gen_ext16u_i32(QREG_DIV1, reg);
940
    }
941
    SRC_EA(src, OS_WORD, sign, NULL);
942
    tcg_gen_mov_i32(QREG_DIV2, src);
943
    if (sign) {
944
        gen_helper_divs(cpu_env, tcg_const_i32(1));
945
    } else {
946
        gen_helper_divu(cpu_env, tcg_const_i32(1));
947
    }
948

    
949
    tmp = gen_new_qreg(QMODE_I32);
950
    src = gen_new_qreg(QMODE_I32);
951
    tcg_gen_ext16u_i32(tmp, QREG_DIV1);
952
    tcg_gen_shli_i32(src, QREG_DIV2, 16);
953
    tcg_gen_or_i32(reg, tmp, src);
954
    s->cc_op = CC_OP_FLAGS;
955
}
956

    
957
DISAS_INSN(divl)
958
{
959
    TCGv num;
960
    TCGv den;
961
    TCGv reg;
962
    uint16_t ext;
963

    
964
    ext = lduw_code(s->pc);
965
    s->pc += 2;
966
    if (ext & 0x87f8) {
967
        gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
968
        return;
969
    }
970
    num = DREG(ext, 12);
971
    reg = DREG(ext, 0);
972
    tcg_gen_mov_i32(QREG_DIV1, num);
973
    SRC_EA(den, OS_LONG, 0, NULL);
974
    tcg_gen_mov_i32(QREG_DIV2, den);
975
    if (ext & 0x0800) {
976
        gen_helper_divs(cpu_env, tcg_const_i32(0));
977
    } else {
978
        gen_helper_divu(cpu_env, tcg_const_i32(0));
979
    }
980
    if ((ext & 7) == ((ext >> 12) & 7)) {
981
        /* div */
982
        tcg_gen_mov_i32 (reg, QREG_DIV1);
983
    } else {
984
        /* rem */
985
        tcg_gen_mov_i32 (reg, QREG_DIV2);
986
    }
987
    s->cc_op = CC_OP_FLAGS;
988
}
989

    
990
DISAS_INSN(addsub)
991
{
992
    TCGv reg;
993
    TCGv dest;
994
    TCGv src;
995
    TCGv tmp;
996
    TCGv addr;
997
    int add;
998

    
999
    add = (insn & 0x4000) != 0;
1000
    reg = DREG(insn, 9);
1001
    dest = gen_new_qreg(QMODE_I32);
1002
    if (insn & 0x100) {
1003
        SRC_EA(tmp, OS_LONG, 0, &addr);
1004
        src = reg;
1005
    } else {
1006
        tmp = reg;
1007
        SRC_EA(src, OS_LONG, 0, NULL);
1008
    }
1009
    if (add) {
1010
        tcg_gen_add_i32(dest, tmp, src);
1011
        gen_helper_xflag_lt(QREG_CC_X, dest, src);
1012
        s->cc_op = CC_OP_ADD;
1013
    } else {
1014
        gen_helper_xflag_lt(QREG_CC_X, tmp, src);
1015
        tcg_gen_sub_i32(dest, tmp, src);
1016
        s->cc_op = CC_OP_SUB;
1017
    }
1018
    gen_update_cc_add(dest, src);
1019
    if (insn & 0x100) {
1020
        DEST_EA(insn, OS_LONG, dest, &addr);
1021
    } else {
1022
        tcg_gen_mov_i32(reg, dest);
1023
    }
1024
}
1025

    
1026

    
1027
/* Reverse the order of the bits in REG.  */
1028
DISAS_INSN(bitrev)
1029
{
1030
    TCGv reg;
1031
    reg = DREG(insn, 0);
1032
    gen_helper_bitrev(reg, reg);
1033
}
1034

    
1035
DISAS_INSN(bitop_reg)
1036
{
1037
    int opsize;
1038
    int op;
1039
    TCGv src1;
1040
    TCGv src2;
1041
    TCGv tmp;
1042
    TCGv addr;
1043
    TCGv dest;
1044

    
1045
    if ((insn & 0x38) != 0)
1046
        opsize = OS_BYTE;
1047
    else
1048
        opsize = OS_LONG;
1049
    op = (insn >> 6) & 3;
1050
    SRC_EA(src1, opsize, 0, op ? &addr: NULL);
1051
    src2 = DREG(insn, 9);
1052
    dest = gen_new_qreg(QMODE_I32);
1053

    
1054
    gen_flush_flags(s);
1055
    tmp = gen_new_qreg(QMODE_I32);
1056
    if (opsize == OS_BYTE)
1057
        tcg_gen_andi_i32(tmp, src2, 7);
1058
    else
1059
        tcg_gen_andi_i32(tmp, src2, 31);
1060
    src2 = tmp;
1061
    tmp = gen_new_qreg(QMODE_I32);
1062
    tcg_gen_shr_i32(tmp, src1, src2);
1063
    tcg_gen_andi_i32(tmp, tmp, 1);
1064
    tcg_gen_shli_i32(tmp, tmp, 2);
1065
    /* Clear CCF_Z if bit set.  */
1066
    tcg_gen_ori_i32(QREG_CC_DEST, QREG_CC_DEST, CCF_Z);
1067
    tcg_gen_xor_i32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1068

    
1069
    tcg_gen_shl_i32(tmp, tcg_const_i32(1), src2);
1070
    switch (op) {
1071
    case 1: /* bchg */
1072
        tcg_gen_xor_i32(dest, src1, tmp);
1073
        break;
1074
    case 2: /* bclr */
1075
        tcg_gen_not_i32(tmp, tmp);
1076
        tcg_gen_and_i32(dest, src1, tmp);
1077
        break;
1078
    case 3: /* bset */
1079
        tcg_gen_or_i32(dest, src1, tmp);
1080
        break;
1081
    default: /* btst */
1082
        break;
1083
    }
1084
    if (op)
1085
        DEST_EA(insn, opsize, dest, &addr);
1086
}
1087

    
1088
DISAS_INSN(sats)
1089
{
1090
    TCGv reg;
1091
    reg = DREG(insn, 0);
1092
    gen_flush_flags(s);
1093
    gen_helper_sats(reg, reg, QREG_CC_DEST);
1094
    gen_logic_cc(s, reg);
1095
}
1096

    
1097
static void gen_push(DisasContext *s, TCGv val)
1098
{
1099
    TCGv tmp;
1100

    
1101
    tmp = gen_new_qreg(QMODE_I32);
1102
    tcg_gen_subi_i32(tmp, QREG_SP, 4);
1103
    gen_store(s, OS_LONG, tmp, val);
1104
    tcg_gen_mov_i32(QREG_SP, tmp);
1105
}
1106

    
1107
DISAS_INSN(movem)
1108
{
1109
    TCGv addr;
1110
    int i;
1111
    uint16_t mask;
1112
    TCGv reg;
1113
    TCGv tmp;
1114
    int is_load;
1115

    
1116
    mask = lduw_code(s->pc);
1117
    s->pc += 2;
1118
    tmp = gen_lea(s, insn, OS_LONG);
1119
    if (IS_NULL_QREG(tmp)) {
1120
        gen_addr_fault(s);
1121
        return;
1122
    }
1123
    addr = gen_new_qreg(QMODE_I32);
1124
    tcg_gen_mov_i32(addr, tmp);
1125
    is_load = ((insn & 0x0400) != 0);
1126
    for (i = 0; i < 16; i++, mask >>= 1) {
1127
        if (mask & 1) {
1128
            if (i < 8)
1129
                reg = DREG(i, 0);
1130
            else
1131
                reg = AREG(i, 0);
1132
            if (is_load) {
1133
                tmp = gen_load(s, OS_LONG, addr, 0);
1134
                tcg_gen_mov_i32(reg, tmp);
1135
            } else {
1136
                gen_store(s, OS_LONG, addr, reg);
1137
            }
1138
            if (mask != 1)
1139
                tcg_gen_addi_i32(addr, addr, 4);
1140
        }
1141
    }
1142
}
1143

    
1144
DISAS_INSN(bitop_im)
1145
{
1146
    int opsize;
1147
    int op;
1148
    TCGv src1;
1149
    uint32_t mask;
1150
    int bitnum;
1151
    TCGv tmp;
1152
    TCGv addr;
1153

    
1154
    if ((insn & 0x38) != 0)
1155
        opsize = OS_BYTE;
1156
    else
1157
        opsize = OS_LONG;
1158
    op = (insn >> 6) & 3;
1159

    
1160
    bitnum = lduw_code(s->pc);
1161
    s->pc += 2;
1162
    if (bitnum & 0xff00) {
1163
        disas_undef(s, insn);
1164
        return;
1165
    }
1166

    
1167
    SRC_EA(src1, opsize, 0, op ? &addr: NULL);
1168

    
1169
    gen_flush_flags(s);
1170
    if (opsize == OS_BYTE)
1171
        bitnum &= 7;
1172
    else
1173
        bitnum &= 31;
1174
    mask = 1 << bitnum;
1175

    
1176
    tmp = gen_new_qreg(QMODE_I32);
1177
    assert (CCF_Z == (1 << 2));
1178
    if (bitnum > 2)
1179
        tcg_gen_shri_i32(tmp, src1, bitnum - 2);
1180
    else if (bitnum < 2)
1181
        tcg_gen_shli_i32(tmp, src1, 2 - bitnum);
1182
    else
1183
        tcg_gen_mov_i32(tmp, src1);
1184
    tcg_gen_andi_i32(tmp, tmp, CCF_Z);
1185
    /* Clear CCF_Z if bit set.  */
1186
    tcg_gen_ori_i32(QREG_CC_DEST, QREG_CC_DEST, CCF_Z);
1187
    tcg_gen_xor_i32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1188
    if (op) {
1189
        switch (op) {
1190
        case 1: /* bchg */
1191
            tcg_gen_xori_i32(tmp, src1, mask);
1192
            break;
1193
        case 2: /* bclr */
1194
            tcg_gen_andi_i32(tmp, src1, ~mask);
1195
            break;
1196
        case 3: /* bset */
1197
            tcg_gen_ori_i32(tmp, src1, mask);
1198
            break;
1199
        default: /* btst */
1200
            break;
1201
        }
1202
        DEST_EA(insn, opsize, tmp, &addr);
1203
    }
1204
}
1205

    
1206
DISAS_INSN(arith_im)
1207
{
1208
    int op;
1209
    uint32_t im;
1210
    TCGv src1;
1211
    TCGv dest;
1212
    TCGv addr;
1213

    
1214
    op = (insn >> 9) & 7;
1215
    SRC_EA(src1, OS_LONG, 0, (op == 6) ? NULL : &addr);
1216
    im = read_im32(s);
1217
    dest = gen_new_qreg(QMODE_I32);
1218
    switch (op) {
1219
    case 0: /* ori */
1220
        tcg_gen_ori_i32(dest, src1, im);
1221
        gen_logic_cc(s, dest);
1222
        break;
1223
    case 1: /* andi */
1224
        tcg_gen_andi_i32(dest, src1, im);
1225
        gen_logic_cc(s, dest);
1226
        break;
1227
    case 2: /* subi */
1228
        tcg_gen_mov_i32(dest, src1);
1229
        gen_helper_xflag_lt(QREG_CC_X, dest, gen_im32(im));
1230
        tcg_gen_subi_i32(dest, dest, im);
1231
        gen_update_cc_add(dest, gen_im32(im));
1232
        s->cc_op = CC_OP_SUB;
1233
        break;
1234
    case 3: /* addi */
1235
        tcg_gen_mov_i32(dest, src1);
1236
        tcg_gen_addi_i32(dest, dest, im);
1237
        gen_update_cc_add(dest, gen_im32(im));
1238
        gen_helper_xflag_lt(QREG_CC_X, dest, gen_im32(im));
1239
        s->cc_op = CC_OP_ADD;
1240
        break;
1241
    case 5: /* eori */
1242
        tcg_gen_xori_i32(dest, src1, im);
1243
        gen_logic_cc(s, dest);
1244
        break;
1245
    case 6: /* cmpi */
1246
        tcg_gen_mov_i32(dest, src1);
1247
        tcg_gen_subi_i32(dest, dest, im);
1248
        gen_update_cc_add(dest, gen_im32(im));
1249
        s->cc_op = CC_OP_SUB;
1250
        break;
1251
    default:
1252
        abort();
1253
    }
1254
    if (op != 6) {
1255
        DEST_EA(insn, OS_LONG, dest, &addr);
1256
    }
1257
}
1258

    
1259
DISAS_INSN(byterev)
1260
{
1261
    TCGv reg;
1262

    
1263
    reg = DREG(insn, 0);
1264
    tcg_gen_bswap_i32(reg, reg);
1265
}
1266

    
1267
DISAS_INSN(move)
1268
{
1269
    TCGv src;
1270
    TCGv dest;
1271
    int op;
1272
    int opsize;
1273

    
1274
    switch (insn >> 12) {
1275
    case 1: /* move.b */
1276
        opsize = OS_BYTE;
1277
        break;
1278
    case 2: /* move.l */
1279
        opsize = OS_LONG;
1280
        break;
1281
    case 3: /* move.w */
1282
        opsize = OS_WORD;
1283
        break;
1284
    default:
1285
        abort();
1286
    }
1287
    SRC_EA(src, opsize, 1, NULL);
1288
    op = (insn >> 6) & 7;
1289
    if (op == 1) {
1290
        /* movea */
1291
        /* The value will already have been sign extended.  */
1292
        dest = AREG(insn, 9);
1293
        tcg_gen_mov_i32(dest, src);
1294
    } else {
1295
        /* normal move */
1296
        uint16_t dest_ea;
1297
        dest_ea = ((insn >> 9) & 7) | (op << 3);
1298
        DEST_EA(dest_ea, opsize, src, NULL);
1299
        /* This will be correct because loads sign extend.  */
1300
        gen_logic_cc(s, src);
1301
    }
1302
}
1303

    
1304
DISAS_INSN(negx)
1305
{
1306
    TCGv reg;
1307

    
1308
    gen_flush_flags(s);
1309
    reg = DREG(insn, 0);
1310
    gen_helper_subx_cc(reg, cpu_env, tcg_const_i32(0), reg);
1311
}
1312

    
1313
DISAS_INSN(lea)
1314
{
1315
    TCGv reg;
1316
    TCGv tmp;
1317

    
1318
    reg = AREG(insn, 9);
1319
    tmp = gen_lea(s, insn, OS_LONG);
1320
    if (IS_NULL_QREG(tmp)) {
1321
        gen_addr_fault(s);
1322
        return;
1323
    }
1324
    tcg_gen_mov_i32(reg, tmp);
1325
}
1326

    
1327
DISAS_INSN(clr)
1328
{
1329
    int opsize;
1330

    
1331
    switch ((insn >> 6) & 3) {
1332
    case 0: /* clr.b */
1333
        opsize = OS_BYTE;
1334
        break;
1335
    case 1: /* clr.w */
1336
        opsize = OS_WORD;
1337
        break;
1338
    case 2: /* clr.l */
1339
        opsize = OS_LONG;
1340
        break;
1341
    default:
1342
        abort();
1343
    }
1344
    DEST_EA(insn, opsize, gen_im32(0), NULL);
1345
    gen_logic_cc(s, gen_im32(0));
1346
}
1347

    
1348
static TCGv gen_get_ccr(DisasContext *s)
1349
{
1350
    TCGv dest;
1351

    
1352
    gen_flush_flags(s);
1353
    dest = gen_new_qreg(QMODE_I32);
1354
    tcg_gen_shli_i32(dest, QREG_CC_X, 4);
1355
    tcg_gen_or_i32(dest, dest, QREG_CC_DEST);
1356
    return dest;
1357
}
1358

    
1359
DISAS_INSN(move_from_ccr)
1360
{
1361
    TCGv reg;
1362
    TCGv ccr;
1363

    
1364
    ccr = gen_get_ccr(s);
1365
    reg = DREG(insn, 0);
1366
    gen_partset_reg(OS_WORD, reg, ccr);
1367
}
1368

    
1369
DISAS_INSN(neg)
1370
{
1371
    TCGv reg;
1372
    TCGv src1;
1373

    
1374
    reg = DREG(insn, 0);
1375
    src1 = gen_new_qreg(QMODE_I32);
1376
    tcg_gen_mov_i32(src1, reg);
1377
    tcg_gen_neg_i32(reg, src1);
1378
    s->cc_op = CC_OP_SUB;
1379
    gen_update_cc_add(reg, src1);
1380
    gen_helper_xflag_lt(QREG_CC_X, tcg_const_i32(0), src1);
1381
    s->cc_op = CC_OP_SUB;
1382
}
1383

    
1384
static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
1385
{
1386
    tcg_gen_movi_i32(QREG_CC_DEST, val & 0xf);
1387
    tcg_gen_movi_i32(QREG_CC_X, (val & 0x10) >> 4);
1388
    if (!ccr_only) {
1389
        gen_helper_set_sr(cpu_env, tcg_const_i32(val & 0xff00));
1390
    }
1391
}
1392

    
1393
static void gen_set_sr(DisasContext *s, uint16_t insn, int ccr_only)
1394
{
1395
    TCGv tmp;
1396
    TCGv reg;
1397

    
1398
    s->cc_op = CC_OP_FLAGS;
1399
    if ((insn & 0x38) == 0)
1400
      {
1401
        tmp = gen_new_qreg(QMODE_I32);
1402
        reg = DREG(insn, 0);
1403
        tcg_gen_andi_i32(QREG_CC_DEST, reg, 0xf);
1404
        tcg_gen_shri_i32(tmp, reg, 4);
1405
        tcg_gen_andi_i32(QREG_CC_X, tmp, 1);
1406
        if (!ccr_only) {
1407
            gen_helper_set_sr(cpu_env, reg);
1408
        }
1409
      }
1410
    else if ((insn & 0x3f) == 0x3c)
1411
      {
1412
        uint16_t val;
1413
        val = lduw_code(s->pc);
1414
        s->pc += 2;
1415
        gen_set_sr_im(s, val, ccr_only);
1416
      }
1417
    else
1418
        disas_undef(s, insn);
1419
}
1420

    
1421
DISAS_INSN(move_to_ccr)
1422
{
1423
    gen_set_sr(s, insn, 1);
1424
}
1425

    
1426
DISAS_INSN(not)
1427
{
1428
    TCGv reg;
1429

    
1430
    reg = DREG(insn, 0);
1431
    tcg_gen_not_i32(reg, reg);
1432
    gen_logic_cc(s, reg);
1433
}
1434

    
1435
DISAS_INSN(swap)
1436
{
1437
    TCGv src1;
1438
    TCGv src2;
1439
    TCGv reg;
1440

    
1441
    src1 = gen_new_qreg(QMODE_I32);
1442
    src2 = gen_new_qreg(QMODE_I32);
1443
    reg = DREG(insn, 0);
1444
    tcg_gen_shli_i32(src1, reg, 16);
1445
    tcg_gen_shri_i32(src2, reg, 16);
1446
    tcg_gen_or_i32(reg, src1, src2);
1447
    gen_logic_cc(s, reg);
1448
}
1449

    
1450
DISAS_INSN(pea)
1451
{
1452
    TCGv tmp;
1453

    
1454
    tmp = gen_lea(s, insn, OS_LONG);
1455
    if (IS_NULL_QREG(tmp)) {
1456
        gen_addr_fault(s);
1457
        return;
1458
    }
1459
    gen_push(s, tmp);
1460
}
1461

    
1462
DISAS_INSN(ext)
1463
{
1464
    int op;
1465
    TCGv reg;
1466
    TCGv tmp;
1467

    
1468
    reg = DREG(insn, 0);
1469
    op = (insn >> 6) & 7;
1470
    tmp = gen_new_qreg(QMODE_I32);
1471
    if (op == 3)
1472
        tcg_gen_ext16s_i32(tmp, reg);
1473
    else
1474
        tcg_gen_ext8s_i32(tmp, reg);
1475
    if (op == 2)
1476
        gen_partset_reg(OS_WORD, reg, tmp);
1477
    else
1478
        tcg_gen_mov_i32(reg, tmp);
1479
    gen_logic_cc(s, tmp);
1480
}
1481

    
1482
DISAS_INSN(tst)
1483
{
1484
    int opsize;
1485
    TCGv tmp;
1486

    
1487
    switch ((insn >> 6) & 3) {
1488
    case 0: /* tst.b */
1489
        opsize = OS_BYTE;
1490
        break;
1491
    case 1: /* tst.w */
1492
        opsize = OS_WORD;
1493
        break;
1494
    case 2: /* tst.l */
1495
        opsize = OS_LONG;
1496
        break;
1497
    default:
1498
        abort();
1499
    }
1500
    SRC_EA(tmp, opsize, 1, NULL);
1501
    gen_logic_cc(s, tmp);
1502
}
1503

    
1504
DISAS_INSN(pulse)
1505
{
1506
  /* Implemented as a NOP.  */
1507
}
1508

    
1509
DISAS_INSN(illegal)
1510
{
1511
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
1512
}
1513

    
1514
/* ??? This should be atomic.  */
1515
DISAS_INSN(tas)
1516
{
1517
    TCGv dest;
1518
    TCGv src1;
1519
    TCGv addr;
1520

    
1521
    dest = gen_new_qreg(QMODE_I32);
1522
    SRC_EA(src1, OS_BYTE, 1, &addr);
1523
    gen_logic_cc(s, src1);
1524
    tcg_gen_ori_i32(dest, src1, 0x80);
1525
    DEST_EA(insn, OS_BYTE, dest, &addr);
1526
}
1527

    
1528
DISAS_INSN(mull)
1529
{
1530
    uint16_t ext;
1531
    TCGv reg;
1532
    TCGv src1;
1533
    TCGv dest;
1534

    
1535
    /* The upper 32 bits of the product are discarded, so
1536
       muls.l and mulu.l are functionally equivalent.  */
1537
    ext = lduw_code(s->pc);
1538
    s->pc += 2;
1539
    if (ext & 0x87ff) {
1540
        gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
1541
        return;
1542
    }
1543
    reg = DREG(ext, 12);
1544
    SRC_EA(src1, OS_LONG, 0, NULL);
1545
    dest = gen_new_qreg(QMODE_I32);
1546
    tcg_gen_mul_i32(dest, src1, reg);
1547
    tcg_gen_mov_i32(reg, dest);
1548
    /* Unlike m68k, coldfire always clears the overflow bit.  */
1549
    gen_logic_cc(s, dest);
1550
}
1551

    
1552
DISAS_INSN(link)
1553
{
1554
    int16_t offset;
1555
    TCGv reg;
1556
    TCGv tmp;
1557

    
1558
    offset = ldsw_code(s->pc);
1559
    s->pc += 2;
1560
    reg = AREG(insn, 0);
1561
    tmp = gen_new_qreg(QMODE_I32);
1562
    tcg_gen_subi_i32(tmp, QREG_SP, 4);
1563
    gen_store(s, OS_LONG, tmp, reg);
1564
    if ((insn & 7) != 7)
1565
        tcg_gen_mov_i32(reg, tmp);
1566
    tcg_gen_addi_i32(QREG_SP, tmp, offset);
1567
}
1568

    
1569
DISAS_INSN(unlk)
1570
{
1571
    TCGv src;
1572
    TCGv reg;
1573
    TCGv tmp;
1574

    
1575
    src = gen_new_qreg(QMODE_I32);
1576
    reg = AREG(insn, 0);
1577
    tcg_gen_mov_i32(src, reg);
1578
    tmp = gen_load(s, OS_LONG, src, 0);
1579
    tcg_gen_mov_i32(reg, tmp);
1580
    tcg_gen_addi_i32(QREG_SP, src, 4);
1581
}
1582

    
1583
DISAS_INSN(nop)
1584
{
1585
}
1586

    
1587
DISAS_INSN(rts)
1588
{
1589
    TCGv tmp;
1590

    
1591
    tmp = gen_load(s, OS_LONG, QREG_SP, 0);
1592
    tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
1593
    gen_jmp(s, tmp);
1594
}
1595

    
1596
DISAS_INSN(jump)
1597
{
1598
    TCGv tmp;
1599

    
1600
    /* Load the target address first to ensure correct exception
1601
       behavior.  */
1602
    tmp = gen_lea(s, insn, OS_LONG);
1603
    if (IS_NULL_QREG(tmp)) {
1604
        gen_addr_fault(s);
1605
        return;
1606
    }
1607
    if ((insn & 0x40) == 0) {
1608
        /* jsr */
1609
        gen_push(s, gen_im32(s->pc));
1610
    }
1611
    gen_jmp(s, tmp);
1612
}
1613

    
1614
DISAS_INSN(addsubq)
1615
{
1616
    TCGv src1;
1617
    TCGv src2;
1618
    TCGv dest;
1619
    int val;
1620
    TCGv addr;
1621

    
1622
    SRC_EA(src1, OS_LONG, 0, &addr);
1623
    val = (insn >> 9) & 7;
1624
    if (val == 0)
1625
        val = 8;
1626
    dest = gen_new_qreg(QMODE_I32);
1627
    tcg_gen_mov_i32(dest, src1);
1628
    if ((insn & 0x38) == 0x08) {
1629
        /* Don't update condition codes if the destination is an
1630
           address register.  */
1631
        if (insn & 0x0100) {
1632
            tcg_gen_subi_i32(dest, dest, val);
1633
        } else {
1634
            tcg_gen_addi_i32(dest, dest, val);
1635
        }
1636
    } else {
1637
        src2 = gen_im32(val);
1638
        if (insn & 0x0100) {
1639
            gen_helper_xflag_lt(QREG_CC_X, dest, src2);
1640
            tcg_gen_subi_i32(dest, dest, val);
1641
            s->cc_op = CC_OP_SUB;
1642
        } else {
1643
            tcg_gen_addi_i32(dest, dest, val);
1644
            gen_helper_xflag_lt(QREG_CC_X, dest, src2);
1645
            s->cc_op = CC_OP_ADD;
1646
        }
1647
        gen_update_cc_add(dest, src2);
1648
    }
1649
    DEST_EA(insn, OS_LONG, dest, &addr);
1650
}
1651

    
1652
DISAS_INSN(tpf)
1653
{
1654
    switch (insn & 7) {
1655
    case 2: /* One extension word.  */
1656
        s->pc += 2;
1657
        break;
1658
    case 3: /* Two extension words.  */
1659
        s->pc += 4;
1660
        break;
1661
    case 4: /* No extension words.  */
1662
        break;
1663
    default:
1664
        disas_undef(s, insn);
1665
    }
1666
}
1667

    
1668
DISAS_INSN(branch)
1669
{
1670
    int32_t offset;
1671
    uint32_t base;
1672
    int op;
1673
    int l1;
1674

    
1675
    base = s->pc;
1676
    op = (insn >> 8) & 0xf;
1677
    offset = (int8_t)insn;
1678
    if (offset == 0) {
1679
        offset = ldsw_code(s->pc);
1680
        s->pc += 2;
1681
    } else if (offset == -1) {
1682
        offset = read_im32(s);
1683
    }
1684
    if (op == 1) {
1685
        /* bsr */
1686
        gen_push(s, gen_im32(s->pc));
1687
    }
1688
    gen_flush_cc_op(s);
1689
    if (op > 1) {
1690
        /* Bcc */
1691
        l1 = gen_new_label();
1692
        gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
1693
        gen_jmp_tb(s, 1, base + offset);
1694
        gen_set_label(l1);
1695
        gen_jmp_tb(s, 0, s->pc);
1696
    } else {
1697
        /* Unconditional branch.  */
1698
        gen_jmp_tb(s, 0, base + offset);
1699
    }
1700
}
1701

    
1702
DISAS_INSN(moveq)
1703
{
1704
    uint32_t val;
1705

    
1706
    val = (int8_t)insn;
1707
    tcg_gen_movi_i32(DREG(insn, 9), val);
1708
    gen_logic_cc(s, tcg_const_i32(val));
1709
}
1710

    
1711
DISAS_INSN(mvzs)
1712
{
1713
    int opsize;
1714
    TCGv src;
1715
    TCGv reg;
1716

    
1717
    if (insn & 0x40)
1718
        opsize = OS_WORD;
1719
    else
1720
        opsize = OS_BYTE;
1721
    SRC_EA(src, opsize, (insn & 0x80) == 0, NULL);
1722
    reg = DREG(insn, 9);
1723
    tcg_gen_mov_i32(reg, src);
1724
    gen_logic_cc(s, src);
1725
}
1726

    
1727
DISAS_INSN(or)
1728
{
1729
    TCGv reg;
1730
    TCGv dest;
1731
    TCGv src;
1732
    TCGv addr;
1733

    
1734
    reg = DREG(insn, 9);
1735
    dest = gen_new_qreg(QMODE_I32);
1736
    if (insn & 0x100) {
1737
        SRC_EA(src, OS_LONG, 0, &addr);
1738
        tcg_gen_or_i32(dest, src, reg);
1739
        DEST_EA(insn, OS_LONG, dest, &addr);
1740
    } else {
1741
        SRC_EA(src, OS_LONG, 0, NULL);
1742
        tcg_gen_or_i32(dest, src, reg);
1743
        tcg_gen_mov_i32(reg, dest);
1744
    }
1745
    gen_logic_cc(s, dest);
1746
}
1747

    
1748
DISAS_INSN(suba)
1749
{
1750
    TCGv src;
1751
    TCGv reg;
1752

    
1753
    SRC_EA(src, OS_LONG, 0, NULL);
1754
    reg = AREG(insn, 9);
1755
    tcg_gen_sub_i32(reg, reg, src);
1756
}
1757

    
1758
DISAS_INSN(subx)
1759
{
1760
    TCGv reg;
1761
    TCGv src;
1762

    
1763
    gen_flush_flags(s);
1764
    reg = DREG(insn, 9);
1765
    src = DREG(insn, 0);
1766
    gen_helper_subx_cc(reg, cpu_env, reg, src);
1767
}
1768

    
1769
DISAS_INSN(mov3q)
1770
{
1771
    TCGv src;
1772
    int val;
1773

    
1774
    val = (insn >> 9) & 7;
1775
    if (val == 0)
1776
        val = -1;
1777
    src = gen_im32(val);
1778
    gen_logic_cc(s, src);
1779
    DEST_EA(insn, OS_LONG, src, NULL);
1780
}
1781

    
1782
DISAS_INSN(cmp)
1783
{
1784
    int op;
1785
    TCGv src;
1786
    TCGv reg;
1787
    TCGv dest;
1788
    int opsize;
1789

    
1790
    op = (insn >> 6) & 3;
1791
    switch (op) {
1792
    case 0: /* cmp.b */
1793
        opsize = OS_BYTE;
1794
        s->cc_op = CC_OP_CMPB;
1795
        break;
1796
    case 1: /* cmp.w */
1797
        opsize = OS_WORD;
1798
        s->cc_op = CC_OP_CMPW;
1799
        break;
1800
    case 2: /* cmp.l */
1801
        opsize = OS_LONG;
1802
        s->cc_op = CC_OP_SUB;
1803
        break;
1804
    default:
1805
        abort();
1806
    }
1807
    SRC_EA(src, opsize, 1, NULL);
1808
    reg = DREG(insn, 9);
1809
    dest = gen_new_qreg(QMODE_I32);
1810
    tcg_gen_sub_i32(dest, reg, src);
1811
    gen_update_cc_add(dest, src);
1812
}
1813

    
1814
DISAS_INSN(cmpa)
1815
{
1816
    int opsize;
1817
    TCGv src;
1818
    TCGv reg;
1819
    TCGv dest;
1820

    
1821
    if (insn & 0x100) {
1822
        opsize = OS_LONG;
1823
    } else {
1824
        opsize = OS_WORD;
1825
    }
1826
    SRC_EA(src, opsize, 1, NULL);
1827
    reg = AREG(insn, 9);
1828
    dest = gen_new_qreg(QMODE_I32);
1829
    tcg_gen_sub_i32(dest, reg, src);
1830
    gen_update_cc_add(dest, src);
1831
    s->cc_op = CC_OP_SUB;
1832
}
1833

    
1834
DISAS_INSN(eor)
1835
{
1836
    TCGv src;
1837
    TCGv reg;
1838
    TCGv dest;
1839
    TCGv addr;
1840

    
1841
    SRC_EA(src, OS_LONG, 0, &addr);
1842
    reg = DREG(insn, 9);
1843
    dest = gen_new_qreg(QMODE_I32);
1844
    tcg_gen_xor_i32(dest, src, reg);
1845
    gen_logic_cc(s, dest);
1846
    DEST_EA(insn, OS_LONG, dest, &addr);
1847
}
1848

    
1849
DISAS_INSN(and)
1850
{
1851
    TCGv src;
1852
    TCGv reg;
1853
    TCGv dest;
1854
    TCGv addr;
1855

    
1856
    reg = DREG(insn, 9);
1857
    dest = gen_new_qreg(QMODE_I32);
1858
    if (insn & 0x100) {
1859
        SRC_EA(src, OS_LONG, 0, &addr);
1860
        tcg_gen_and_i32(dest, src, reg);
1861
        DEST_EA(insn, OS_LONG, dest, &addr);
1862
    } else {
1863
        SRC_EA(src, OS_LONG, 0, NULL);
1864
        tcg_gen_and_i32(dest, src, reg);
1865
        tcg_gen_mov_i32(reg, dest);
1866
    }
1867
    gen_logic_cc(s, dest);
1868
}
1869

    
1870
DISAS_INSN(adda)
1871
{
1872
    TCGv src;
1873
    TCGv reg;
1874

    
1875
    SRC_EA(src, OS_LONG, 0, NULL);
1876
    reg = AREG(insn, 9);
1877
    tcg_gen_add_i32(reg, reg, src);
1878
}
1879

    
1880
DISAS_INSN(addx)
1881
{
1882
    TCGv reg;
1883
    TCGv src;
1884

    
1885
    gen_flush_flags(s);
1886
    reg = DREG(insn, 9);
1887
    src = DREG(insn, 0);
1888
    gen_helper_addx_cc(reg, cpu_env, reg, src);
1889
    s->cc_op = CC_OP_FLAGS;
1890
}
1891

    
1892
/* TODO: This could be implemented without helper functions.  */
1893
DISAS_INSN(shift_im)
1894
{
1895
    TCGv reg;
1896
    int tmp;
1897
    TCGv shift;
1898

    
1899
    reg = DREG(insn, 0);
1900
    tmp = (insn >> 9) & 7;
1901
    if (tmp == 0)
1902
        tmp = 8;
1903
    shift = gen_im32(tmp);
1904
    /* No need to flush flags becuse we know we will set C flag.  */
1905
    if (insn & 0x100) {
1906
        gen_helper_shl_cc(reg, cpu_env, reg, shift);
1907
    } else {
1908
        if (insn & 8) {
1909
            gen_helper_shr_cc(reg, cpu_env, reg, shift);
1910
        } else {
1911
            gen_helper_sar_cc(reg, cpu_env, reg, shift);
1912
        }
1913
    }
1914
    s->cc_op = CC_OP_SHIFT;
1915
}
1916

    
1917
DISAS_INSN(shift_reg)
1918
{
1919
    TCGv reg;
1920
    TCGv shift;
1921

    
1922
    reg = DREG(insn, 0);
1923
    shift = DREG(insn, 9);
1924
    /* Shift by zero leaves C flag unmodified.   */
1925
    gen_flush_flags(s);
1926
    if (insn & 0x100) {
1927
        gen_helper_shl_cc(reg, cpu_env, reg, shift);
1928
    } else {
1929
        if (insn & 8) {
1930
            gen_helper_shr_cc(reg, cpu_env, reg, shift);
1931
        } else {
1932
            gen_helper_sar_cc(reg, cpu_env, reg, shift);
1933
        }
1934
    }
1935
    s->cc_op = CC_OP_SHIFT;
1936
}
1937

    
1938
DISAS_INSN(ff1)
1939
{
1940
    TCGv reg;
1941
    reg = DREG(insn, 0);
1942
    gen_logic_cc(s, reg);
1943
    gen_helper_ff1(reg, reg);
1944
}
1945

    
1946
static TCGv gen_get_sr(DisasContext *s)
1947
{
1948
    TCGv ccr;
1949
    TCGv sr;
1950

    
1951
    ccr = gen_get_ccr(s);
1952
    sr = gen_new_qreg(QMODE_I32);
1953
    tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
1954
    tcg_gen_or_i32(sr, sr, ccr);
1955
    return sr;
1956
}
1957

    
1958
DISAS_INSN(strldsr)
1959
{
1960
    uint16_t ext;
1961
    uint32_t addr;
1962

    
1963
    addr = s->pc - 2;
1964
    ext = lduw_code(s->pc);
1965
    s->pc += 2;
1966
    if (ext != 0x46FC) {
1967
        gen_exception(s, addr, EXCP_UNSUPPORTED);
1968
        return;
1969
    }
1970
    ext = lduw_code(s->pc);
1971
    s->pc += 2;
1972
    if (IS_USER(s) || (ext & SR_S) == 0) {
1973
        gen_exception(s, addr, EXCP_PRIVILEGE);
1974
        return;
1975
    }
1976
    gen_push(s, gen_get_sr(s));
1977
    gen_set_sr_im(s, ext, 0);
1978
}
1979

    
1980
DISAS_INSN(move_from_sr)
1981
{
1982
    TCGv reg;
1983
    TCGv sr;
1984

    
1985
    if (IS_USER(s)) {
1986
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1987
        return;
1988
    }
1989
    sr = gen_get_sr(s);
1990
    reg = DREG(insn, 0);
1991
    gen_partset_reg(OS_WORD, reg, sr);
1992
}
1993

    
1994
DISAS_INSN(move_to_sr)
1995
{
1996
    if (IS_USER(s)) {
1997
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1998
        return;
1999
    }
2000
    gen_set_sr(s, insn, 0);
2001
    gen_lookup_tb(s);
2002
}
2003

    
2004
DISAS_INSN(move_from_usp)
2005
{
2006
    if (IS_USER(s)) {
2007
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2008
        return;
2009
    }
2010
    /* TODO: Implement USP.  */
2011
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2012
}
2013

    
2014
DISAS_INSN(move_to_usp)
2015
{
2016
    if (IS_USER(s)) {
2017
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2018
        return;
2019
    }
2020
    /* TODO: Implement USP.  */
2021
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2022
}
2023

    
2024
DISAS_INSN(halt)
2025
{
2026
    gen_exception(s, s->pc, EXCP_HALT_INSN);
2027
}
2028

    
2029
DISAS_INSN(stop)
2030
{
2031
    uint16_t ext;
2032

    
2033
    if (IS_USER(s)) {
2034
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2035
        return;
2036
    }
2037

    
2038
    ext = lduw_code(s->pc);
2039
    s->pc += 2;
2040

    
2041
    gen_set_sr_im(s, ext, 0);
2042
    tcg_gen_movi_i32(QREG_HALTED, 1);
2043
    gen_exception(s, s->pc, EXCP_HLT);
2044
}
2045

    
2046
DISAS_INSN(rte)
2047
{
2048
    if (IS_USER(s)) {
2049
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2050
        return;
2051
    }
2052
    gen_exception(s, s->pc - 2, EXCP_RTE);
2053
}
2054

    
2055
DISAS_INSN(movec)
2056
{
2057
    uint16_t ext;
2058
    TCGv reg;
2059

    
2060
    if (IS_USER(s)) {
2061
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2062
        return;
2063
    }
2064

    
2065
    ext = lduw_code(s->pc);
2066
    s->pc += 2;
2067

    
2068
    if (ext & 0x8000) {
2069
        reg = AREG(ext, 12);
2070
    } else {
2071
        reg = DREG(ext, 12);
2072
    }
2073
    gen_helper_movec(cpu_env, tcg_const_i32(ext & 0xfff), reg);
2074
    gen_lookup_tb(s);
2075
}
2076

    
2077
DISAS_INSN(intouch)
2078
{
2079
    if (IS_USER(s)) {
2080
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2081
        return;
2082
    }
2083
    /* ICache fetch.  Implement as no-op.  */
2084
}
2085

    
2086
DISAS_INSN(cpushl)
2087
{
2088
    if (IS_USER(s)) {
2089
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2090
        return;
2091
    }
2092
    /* Cache push/invalidate.  Implement as no-op.  */
2093
}
2094

    
2095
DISAS_INSN(wddata)
2096
{
2097
    gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2098
}
2099

    
2100
DISAS_INSN(wdebug)
2101
{
2102
    if (IS_USER(s)) {
2103
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2104
        return;
2105
    }
2106
    /* TODO: Implement wdebug.  */
2107
    qemu_assert(0, "WDEBUG not implemented");
2108
}
2109

    
2110
DISAS_INSN(trap)
2111
{
2112
    gen_exception(s, s->pc - 2, EXCP_TRAP0 + (insn & 0xf));
2113
}
2114

    
2115
/* ??? FP exceptions are not implemented.  Most exceptions are deferred until
2116
   immediately before the next FP instruction is executed.  */
2117
DISAS_INSN(fpu)
2118
{
2119
    uint16_t ext;
2120
    int opmode;
2121
    TCGv src;
2122
    TCGv dest;
2123
    TCGv res;
2124
    int round;
2125
    int opsize;
2126

    
2127
    ext = lduw_code(s->pc);
2128
    s->pc += 2;
2129
    opmode = ext & 0x7f;
2130
    switch ((ext >> 13) & 7) {
2131
    case 0: case 2:
2132
        break;
2133
    case 1:
2134
        goto undef;
2135
    case 3: /* fmove out */
2136
        src = FREG(ext, 7);
2137
        /* fmove */
2138
        /* ??? TODO: Proper behavior on overflow.  */
2139
        switch ((ext >> 10) & 7) {
2140
        case 0:
2141
            opsize = OS_LONG;
2142
            res = gen_new_qreg(QMODE_I32);
2143
            gen_helper_f64_to_i32(res, cpu_env, src);
2144
            break;
2145
        case 1:
2146
            opsize = OS_SINGLE;
2147
            res = gen_new_qreg(QMODE_F32);
2148
            gen_helper_f64_to_f32(res, cpu_env, src);
2149
            break;
2150
        case 4:
2151
            opsize = OS_WORD;
2152
            res = gen_new_qreg(QMODE_I32);
2153
            gen_helper_f64_to_i32(res, cpu_env, src);
2154
            break;
2155
        case 5:
2156
            opsize = OS_DOUBLE;
2157
            res = src;
2158
            break;
2159
        case 6:
2160
            opsize = OS_BYTE;
2161
            res = gen_new_qreg(QMODE_I32);
2162
            gen_helper_f64_to_i32(res, cpu_env, src);
2163
            break;
2164
        default:
2165
            goto undef;
2166
        }
2167
        DEST_EA(insn, opsize, res, NULL);
2168
        return;
2169
    case 4: /* fmove to control register.  */
2170
        switch ((ext >> 10) & 7) {
2171
        case 4: /* FPCR */
2172
            /* Not implemented.  Ignore writes.  */
2173
            break;
2174
        case 1: /* FPIAR */
2175
        case 2: /* FPSR */
2176
        default:
2177
            cpu_abort(NULL, "Unimplemented: fmove to control %d",
2178
                      (ext >> 10) & 7);
2179
        }
2180
        break;
2181
    case 5: /* fmove from control register.  */
2182
        switch ((ext >> 10) & 7) {
2183
        case 4: /* FPCR */
2184
            /* Not implemented.  Always return zero.  */
2185
            res = gen_im32(0);
2186
            break;
2187
        case 1: /* FPIAR */
2188
        case 2: /* FPSR */
2189
        default:
2190
            cpu_abort(NULL, "Unimplemented: fmove from control %d",
2191
                      (ext >> 10) & 7);
2192
            goto undef;
2193
        }
2194
        DEST_EA(insn, OS_LONG, res, NULL);
2195
        break;
2196
    case 6: /* fmovem */
2197
    case 7:
2198
        {
2199
            TCGv addr;
2200
            uint16_t mask;
2201
            int i;
2202
            if ((ext & 0x1f00) != 0x1000 || (ext & 0xff) == 0)
2203
                goto undef;
2204
            src = gen_lea(s, insn, OS_LONG);
2205
            if (IS_NULL_QREG(src)) {
2206
                gen_addr_fault(s);
2207
                return;
2208
            }
2209
            addr = gen_new_qreg(QMODE_I32);
2210
            tcg_gen_mov_i32(addr, src);
2211
            mask = 0x80;
2212
            for (i = 0; i < 8; i++) {
2213
                if (ext & mask) {
2214
                    s->is_mem = 1;
2215
                    dest = FREG(i, 0);
2216
                    if (ext & (1 << 13)) {
2217
                        /* store */
2218
                        tcg_gen_qemu_stf64(dest, addr, IS_USER(s));
2219
                    } else {
2220
                        /* load */
2221
                        tcg_gen_qemu_ldf64(dest, addr, IS_USER(s));
2222
                    }
2223
                    if (ext & (mask - 1))
2224
                        tcg_gen_addi_i32(addr, addr, 8);
2225
                }
2226
                mask >>= 1;
2227
            }
2228
        }
2229
        return;
2230
    }
2231
    if (ext & (1 << 14)) {
2232
        TCGv tmp;
2233

    
2234
        /* Source effective address.  */
2235
        switch ((ext >> 10) & 7) {
2236
        case 0: opsize = OS_LONG; break;
2237
        case 1: opsize = OS_SINGLE; break;
2238
        case 4: opsize = OS_WORD; break;
2239
        case 5: opsize = OS_DOUBLE; break;
2240
        case 6: opsize = OS_BYTE; break;
2241
        default:
2242
            goto undef;
2243
        }
2244
        SRC_EA(tmp, opsize, 1, NULL);
2245
        if (opsize == OS_DOUBLE) {
2246
            src = tmp;
2247
        } else {
2248
            src = gen_new_qreg(QMODE_F64);
2249
            switch (opsize) {
2250
            case OS_LONG:
2251
            case OS_WORD:
2252
            case OS_BYTE:
2253
                gen_helper_i32_to_f64(src, cpu_env, tmp);
2254
                break;
2255
            case OS_SINGLE:
2256
                gen_helper_f32_to_f64(src, cpu_env, tmp);
2257
                break;
2258
            }
2259
        }
2260
    } else {
2261
        /* Source register.  */
2262
        src = FREG(ext, 10);
2263
    }
2264
    dest = FREG(ext, 7);
2265
    res = gen_new_qreg(QMODE_F64);
2266
    if (opmode != 0x3a)
2267
        tcg_gen_mov_f64(res, dest);
2268
    round = 1;
2269
    switch (opmode) {
2270
    case 0: case 0x40: case 0x44: /* fmove */
2271
        tcg_gen_mov_f64(res, src);
2272
        break;
2273
    case 1: /* fint */
2274
        gen_helper_iround_f64(res, cpu_env, src);
2275
        round = 0;
2276
        break;
2277
    case 3: /* fintrz */
2278
        gen_helper_itrunc_f64(res, cpu_env, src);
2279
        round = 0;
2280
        break;
2281
    case 4: case 0x41: case 0x45: /* fsqrt */
2282
        gen_helper_sqrt_f64(res, cpu_env, src);
2283
        break;
2284
    case 0x18: case 0x58: case 0x5c: /* fabs */
2285
        gen_helper_abs_f64(res, src);
2286
        break;
2287
    case 0x1a: case 0x5a: case 0x5e: /* fneg */
2288
        gen_helper_chs_f64(res, src);
2289
        break;
2290
    case 0x20: case 0x60: case 0x64: /* fdiv */
2291
        gen_helper_div_f64(res, cpu_env, res, src);
2292
        break;
2293
    case 0x22: case 0x62: case 0x66: /* fadd */
2294
        gen_helper_add_f64(res, cpu_env, res, src);
2295
        break;
2296
    case 0x23: case 0x63: case 0x67: /* fmul */
2297
        gen_helper_mul_f64(res, cpu_env, res, src);
2298
        break;
2299
    case 0x28: case 0x68: case 0x6c: /* fsub */
2300
        gen_helper_sub_f64(res, cpu_env, res, src);
2301
        break;
2302
    case 0x38: /* fcmp */
2303
        gen_helper_sub_cmp_f64(res, cpu_env, res, src);
2304
        dest = NULL_QREG;
2305
        round = 0;
2306
        break;
2307
    case 0x3a: /* ftst */
2308
        tcg_gen_mov_f64(res, src);
2309
        dest = NULL_QREG;
2310
        round = 0;
2311
        break;
2312
    default:
2313
        goto undef;
2314
    }
2315
    if (round) {
2316
        if (opmode & 0x40) {
2317
            if ((opmode & 0x4) != 0)
2318
                round = 0;
2319
        } else if ((s->fpcr & M68K_FPCR_PREC) == 0) {
2320
            round = 0;
2321
        }
2322
    }
2323
    if (round) {
2324
        TCGv tmp;
2325

    
2326
        tmp = gen_new_qreg(QMODE_F32);
2327
        gen_helper_f64_to_f32(tmp, cpu_env, res);
2328
        gen_helper_f32_to_f64(res, cpu_env, tmp);
2329
    }
2330
    tcg_gen_mov_f64(QREG_FP_RESULT, res);
2331
    if (!IS_NULL_QREG(dest)) {
2332
        tcg_gen_mov_f64(dest, res);
2333
    }
2334
    return;
2335
undef:
2336
    s->pc -= 2;
2337
    disas_undef_fpu(s, insn);
2338
}
2339

    
2340
DISAS_INSN(fbcc)
2341
{
2342
    uint32_t offset;
2343
    uint32_t addr;
2344
    TCGv flag;
2345
    int l1;
2346

    
2347
    addr = s->pc;
2348
    offset = ldsw_code(s->pc);
2349
    s->pc += 2;
2350
    if (insn & (1 << 6)) {
2351
        offset = (offset << 16) | lduw_code(s->pc);
2352
        s->pc += 2;
2353
    }
2354

    
2355
    l1 = gen_new_label();
2356
    /* TODO: Raise BSUN exception.  */
2357
    flag = gen_new_qreg(QMODE_I32);
2358
    gen_helper_compare_f64(flag, cpu_env, QREG_FP_RESULT);
2359
    /* Jump to l1 if condition is true.  */
2360
    switch (insn & 0xf) {
2361
    case 0: /* f */
2362
        break;
2363
    case 1: /* eq (=0) */
2364
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(0), l1);
2365
        break;
2366
    case 2: /* ogt (=1) */
2367
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(1), l1);
2368
        break;
2369
    case 3: /* oge (=0 or =1) */
2370
        tcg_gen_brcond_i32(TCG_COND_LEU, flag, tcg_const_i32(1), l1);
2371
        break;
2372
    case 4: /* olt (=-1) */
2373
        tcg_gen_brcond_i32(TCG_COND_LT, flag, tcg_const_i32(0), l1);
2374
        break;
2375
    case 5: /* ole (=-1 or =0) */
2376
        tcg_gen_brcond_i32(TCG_COND_LE, flag, tcg_const_i32(0), l1);
2377
        break;
2378
    case 6: /* ogl (=-1 or =1) */
2379
        tcg_gen_andi_i32(flag, flag, 1);
2380
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(0), l1);
2381
        break;
2382
    case 7: /* or (=2) */
2383
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(2), l1);
2384
        break;
2385
    case 8: /* un (<2) */
2386
        tcg_gen_brcond_i32(TCG_COND_LT, flag, tcg_const_i32(2), l1);
2387
        break;
2388
    case 9: /* ueq (=0 or =2) */
2389
        tcg_gen_andi_i32(flag, flag, 1);
2390
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(0), l1);
2391
        break;
2392
    case 10: /* ugt (>0) */
2393
        tcg_gen_brcond_i32(TCG_COND_GT, flag, tcg_const_i32(0), l1);
2394
        break;
2395
    case 11: /* uge (>=0) */
2396
        tcg_gen_brcond_i32(TCG_COND_GE, flag, tcg_const_i32(0), l1);
2397
        break;
2398
    case 12: /* ult (=-1 or =2) */
2399
        tcg_gen_brcond_i32(TCG_COND_GEU, flag, tcg_const_i32(2), l1);
2400
        break;
2401
    case 13: /* ule (!=1) */
2402
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(1), l1);
2403
        break;
2404
    case 14: /* ne (!=0) */
2405
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(0), l1);
2406
        break;
2407
    case 15: /* t */
2408
        tcg_gen_br(l1);
2409
        break;
2410
    }
2411
    gen_jmp_tb(s, 0, s->pc);
2412
    gen_set_label(l1);
2413
    gen_jmp_tb(s, 1, addr + offset);
2414
}
2415

    
2416
DISAS_INSN(frestore)
2417
{
2418
    /* TODO: Implement frestore.  */
2419
    qemu_assert(0, "FRESTORE not implemented");
2420
}
2421

    
2422
DISAS_INSN(fsave)
2423
{
2424
    /* TODO: Implement fsave.  */
2425
    qemu_assert(0, "FSAVE not implemented");
2426
}
2427

    
2428
static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
2429
{
2430
    TCGv tmp = gen_new_qreg(QMODE_I32);
2431
    if (s->env->macsr & MACSR_FI) {
2432
        if (upper)
2433
            tcg_gen_andi_i32(tmp, val, 0xffff0000);
2434
        else
2435
            tcg_gen_shli_i32(tmp, val, 16);
2436
    } else if (s->env->macsr & MACSR_SU) {
2437
        if (upper)
2438
            tcg_gen_sari_i32(tmp, val, 16);
2439
        else
2440
            tcg_gen_ext16s_i32(tmp, val);
2441
    } else {
2442
        if (upper)
2443
            tcg_gen_shri_i32(tmp, val, 16);
2444
        else
2445
            tcg_gen_ext16u_i32(tmp, val);
2446
    }
2447
    return tmp;
2448
}
2449

    
2450
static void gen_mac_clear_flags(void)
2451
{
2452
    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
2453
                     ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
2454
}
2455

    
2456
DISAS_INSN(mac)
2457
{
2458
    TCGv rx;
2459
    TCGv ry;
2460
    uint16_t ext;
2461
    int acc;
2462
    TCGv tmp;
2463
    TCGv addr;
2464
    TCGv loadval;
2465
    int dual;
2466
    TCGv saved_flags;
2467

    
2468
    if (IS_NULL_QREG(s->mactmp))
2469
        s->mactmp = tcg_temp_new(TCG_TYPE_I64);
2470

    
2471
    ext = lduw_code(s->pc);
2472
    s->pc += 2;
2473

    
2474
    acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
2475
    dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
2476
    if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
2477
        disas_undef(s, insn);
2478
        return;
2479
    }
2480
    if (insn & 0x30) {
2481
        /* MAC with load.  */
2482
        tmp = gen_lea(s, insn, OS_LONG);
2483
        addr = gen_new_qreg(QMODE_I32);
2484
        tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
2485
        /* Load the value now to ensure correct exception behavior.
2486
           Perform writeback after reading the MAC inputs.  */
2487
        loadval = gen_load(s, OS_LONG, addr, 0);
2488

    
2489
        acc ^= 1;
2490
        rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
2491
        ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
2492
    } else {
2493
        loadval = addr = NULL_QREG;
2494
        rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
2495
        ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2496
    }
2497

    
2498
    gen_mac_clear_flags();
2499
#if 0
2500
    l1 = -1;
2501
    /* Disabled because conditional branches clobber temporary vars.  */
2502
    if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
2503
        /* Skip the multiply if we know we will ignore it.  */
2504
        l1 = gen_new_label();
2505
        tmp = gen_new_qreg(QMODE_I32);
2506
        tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
2507
        gen_op_jmp_nz32(tmp, l1);
2508
    }
2509
#endif
2510

    
2511
    if ((ext & 0x0800) == 0) {
2512
        /* Word.  */
2513
        rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
2514
        ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
2515
    }
2516
    if (s->env->macsr & MACSR_FI) {
2517
        gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
2518
    } else {
2519
        if (s->env->macsr & MACSR_SU)
2520
            gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
2521
        else
2522
            gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
2523
        switch ((ext >> 9) & 3) {
2524
        case 1:
2525
            tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
2526
            break;
2527
        case 3:
2528
            tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
2529
            break;
2530
        }
2531
    }
2532

    
2533
    if (dual) {
2534
        /* Save the overflow flag from the multiply.  */
2535
        saved_flags = gen_new_qreg(QMODE_I32);
2536
        tcg_gen_mov_i32(saved_flags, QREG_MACSR);
2537
    } else {
2538
        saved_flags = NULL_QREG;
2539
    }
2540

    
2541
#if 0
2542
    /* Disabled because conditional branches clobber temporary vars.  */
2543
    if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
2544
        /* Skip the accumulate if the value is already saturated.  */
2545
        l1 = gen_new_label();
2546
        tmp = gen_new_qreg(QMODE_I32);
2547
        gen_op_and32(tmp, QREG_MACSR, gen_im32(MACSR_PAV0 << acc));
2548
        gen_op_jmp_nz32(tmp, l1);
2549
    }
2550
#endif
2551

    
2552
    if (insn & 0x100)
2553
        tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
2554
    else
2555
        tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
2556

    
2557
    if (s->env->macsr & MACSR_FI)
2558
        gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
2559
    else if (s->env->macsr & MACSR_SU)
2560
        gen_helper_macsats(cpu_env, tcg_const_i32(acc));
2561
    else
2562
        gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
2563

    
2564
#if 0
2565
    /* Disabled because conditional branches clobber temporary vars.  */
2566
    if (l1 != -1)
2567
        gen_set_label(l1);
2568
#endif
2569

    
2570
    if (dual) {
2571
        /* Dual accumulate variant.  */
2572
        acc = (ext >> 2) & 3;
2573
        /* Restore the overflow flag from the multiplier.  */
2574
        tcg_gen_mov_i32(QREG_MACSR, saved_flags);
2575
#if 0
2576
        /* Disabled because conditional branches clobber temporary vars.  */
2577
        if ((s->env->macsr & MACSR_OMC) != 0) {
2578
            /* Skip the accumulate if the value is already saturated.  */
2579
            l1 = gen_new_label();
2580
            tmp = gen_new_qreg(QMODE_I32);
2581
            gen_op_and32(tmp, QREG_MACSR, gen_im32(MACSR_PAV0 << acc));
2582
            gen_op_jmp_nz32(tmp, l1);
2583
        }
2584
#endif
2585
        if (ext & 2)
2586
            tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
2587
        else
2588
            tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
2589
        if (s->env->macsr & MACSR_FI)
2590
            gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
2591
        else if (s->env->macsr & MACSR_SU)
2592
            gen_helper_macsats(cpu_env, tcg_const_i32(acc));
2593
        else
2594
            gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
2595
#if 0
2596
        /* Disabled because conditional branches clobber temporary vars.  */
2597
        if (l1 != -1)
2598
            gen_set_label(l1);
2599
#endif
2600
    }
2601
    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
2602

    
2603
    if (insn & 0x30) {
2604
        TCGv rw;
2605
        rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
2606
        tcg_gen_mov_i32(rw, loadval);
2607
        /* FIXME: Should address writeback happen with the masked or
2608
           unmasked value?  */
2609
        switch ((insn >> 3) & 7) {
2610
        case 3: /* Post-increment.  */
2611
            tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
2612
            break;
2613
        case 4: /* Pre-decrement.  */
2614
            tcg_gen_mov_i32(AREG(insn, 0), addr);
2615
        }
2616
    }
2617
}
2618

    
2619
DISAS_INSN(from_mac)
2620
{
2621
    TCGv rx;
2622
    TCGv acc;
2623
    int accnum;
2624

    
2625
    rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2626
    accnum = (insn >> 9) & 3;
2627
    acc = MACREG(accnum);
2628
    if (s->env->macsr & MACSR_FI) {
2629
        gen_helper_get_macf(cpu_env, rx, acc);
2630
    } else if ((s->env->macsr & MACSR_OMC) == 0) {
2631
        tcg_gen_trunc_i64_i32(rx, acc);
2632
    } else if (s->env->macsr & MACSR_SU) {
2633
        gen_helper_get_macs(rx, acc);
2634
    } else {
2635
        gen_helper_get_macu(rx, acc);
2636
    }
2637
    if (insn & 0x40) {
2638
        tcg_gen_movi_i64(acc, 0);
2639
        tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
2640
    }
2641
}
2642

    
2643
DISAS_INSN(move_mac)
2644
{
2645
    /* FIXME: This can be done without a helper.  */
2646
    int src;
2647
    TCGv dest;
2648
    src = insn & 3;
2649
    dest = tcg_const_i32((insn >> 9) & 3);
2650
    gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
2651
    gen_mac_clear_flags();
2652
    gen_helper_mac_set_flags(cpu_env, dest);
2653
}
2654

    
2655
DISAS_INSN(from_macsr)
2656
{
2657
    TCGv reg;
2658

    
2659
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2660
    tcg_gen_mov_i32(reg, QREG_MACSR);
2661
}
2662

    
2663
DISAS_INSN(from_mask)
2664
{
2665
    TCGv reg;
2666
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2667
    tcg_gen_mov_i32(reg, QREG_MAC_MASK);
2668
}
2669

    
2670
DISAS_INSN(from_mext)
2671
{
2672
    TCGv reg;
2673
    TCGv acc;
2674
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2675
    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
2676
    if (s->env->macsr & MACSR_FI)
2677
        gen_helper_get_mac_extf(reg, cpu_env, acc);
2678
    else
2679
        gen_helper_get_mac_exti(reg, cpu_env, acc);
2680
}
2681

    
2682
DISAS_INSN(macsr_to_ccr)
2683
{
2684
    tcg_gen_movi_i32(QREG_CC_X, 0);
2685
    tcg_gen_andi_i32(QREG_CC_DEST, QREG_MACSR, 0xf);
2686
    s->cc_op = CC_OP_FLAGS;
2687
}
2688

    
2689
DISAS_INSN(to_mac)
2690
{
2691
    TCGv acc;
2692
    TCGv val;
2693
    int accnum;
2694
    accnum = (insn >> 9) & 3;
2695
    acc = MACREG(accnum);
2696
    SRC_EA(val, OS_LONG, 0, NULL);
2697
    if (s->env->macsr & MACSR_FI) {
2698
        tcg_gen_ext_i32_i64(acc, val);
2699
        tcg_gen_shli_i64(acc, acc, 8);
2700
    } else if (s->env->macsr & MACSR_SU) {
2701
        tcg_gen_ext_i32_i64(acc, val);
2702
    } else {
2703
        tcg_gen_extu_i32_i64(acc, val);
2704
    }
2705
    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
2706
    gen_mac_clear_flags();
2707
    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
2708
}
2709

    
2710
DISAS_INSN(to_macsr)
2711
{
2712
    TCGv val;
2713
    SRC_EA(val, OS_LONG, 0, NULL);
2714
    gen_helper_set_macsr(cpu_env, val);
2715
    gen_lookup_tb(s);
2716
}
2717

    
2718
DISAS_INSN(to_mask)
2719
{
2720
    TCGv val;
2721
    SRC_EA(val, OS_LONG, 0, NULL);
2722
    tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
2723
}
2724

    
2725
DISAS_INSN(to_mext)
2726
{
2727
    TCGv val;
2728
    TCGv acc;
2729
    SRC_EA(val, OS_LONG, 0, NULL);
2730
    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
2731
    if (s->env->macsr & MACSR_FI)
2732
        gen_helper_set_mac_extf(cpu_env, val, acc);
2733
    else if (s->env->macsr & MACSR_SU)
2734
        gen_helper_set_mac_exts(cpu_env, val, acc);
2735
    else
2736
        gen_helper_set_mac_extu(cpu_env, val, acc);
2737
}
2738

    
2739
static disas_proc opcode_table[65536];
2740

    
2741
static void
2742
register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
2743
{
2744
  int i;
2745
  int from;
2746
  int to;
2747

    
2748
  /* Sanity check.  All set bits must be included in the mask.  */
2749
  if (opcode & ~mask) {
2750
      fprintf(stderr,
2751
              "qemu internal error: bogus opcode definition %04x/%04x\n",
2752
              opcode, mask);
2753
      abort();
2754
  }
2755
  /* This could probably be cleverer.  For now just optimize the case where
2756
     the top bits are known.  */
2757
  /* Find the first zero bit in the mask.  */
2758
  i = 0x8000;
2759
  while ((i & mask) != 0)
2760
      i >>= 1;
2761
  /* Iterate over all combinations of this and lower bits.  */
2762
  if (i == 0)
2763
      i = 1;
2764
  else
2765
      i <<= 1;
2766
  from = opcode & ~(i - 1);
2767
  to = from + i;
2768
  for (i = from; i < to; i++) {
2769
      if ((i & mask) == opcode)
2770
          opcode_table[i] = proc;
2771
  }
2772
}
2773

    
2774
/* Register m68k opcode handlers.  Order is important.
2775
   Later insn override earlier ones.  */
2776
void register_m68k_insns (CPUM68KState *env)
2777
{
2778
#define INSN(name, opcode, mask, feature) do { \
2779
    if (m68k_feature(env, M68K_FEATURE_##feature)) \
2780
        register_opcode(disas_##name, 0x##opcode, 0x##mask); \
2781
    } while(0)
2782
    INSN(undef,     0000, 0000, CF_ISA_A);
2783
    INSN(arith_im,  0080, fff8, CF_ISA_A);
2784
    INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
2785
    INSN(bitop_reg, 0100, f1c0, CF_ISA_A);
2786
    INSN(bitop_reg, 0140, f1c0, CF_ISA_A);
2787
    INSN(bitop_reg, 0180, f1c0, CF_ISA_A);
2788
    INSN(bitop_reg, 01c0, f1c0, CF_ISA_A);
2789
    INSN(arith_im,  0280, fff8, CF_ISA_A);
2790
    INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
2791
    INSN(arith_im,  0480, fff8, CF_ISA_A);
2792
    INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
2793
    INSN(arith_im,  0680, fff8, CF_ISA_A);
2794
    INSN(bitop_im,  0800, ffc0, CF_ISA_A);
2795
    INSN(bitop_im,  0840, ffc0, CF_ISA_A);
2796
    INSN(bitop_im,  0880, ffc0, CF_ISA_A);
2797
    INSN(bitop_im,  08c0, ffc0, CF_ISA_A);
2798
    INSN(arith_im,  0a80, fff8, CF_ISA_A);
2799
    INSN(arith_im,  0c00, ff38, CF_ISA_A);
2800
    INSN(move,      1000, f000, CF_ISA_A);
2801
    INSN(move,      2000, f000, CF_ISA_A);
2802
    INSN(move,      3000, f000, CF_ISA_A);
2803
    INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
2804
    INSN(negx,      4080, fff8, CF_ISA_A);
2805
    INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
2806
    INSN(lea,       41c0, f1c0, CF_ISA_A);
2807
    INSN(clr,       4200, ff00, CF_ISA_A);
2808
    INSN(undef,     42c0, ffc0, CF_ISA_A);
2809
    INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
2810
    INSN(neg,       4480, fff8, CF_ISA_A);
2811
    INSN(move_to_ccr, 44c0, ffc0, CF_ISA_A);
2812
    INSN(not,       4680, fff8, CF_ISA_A);
2813
    INSN(move_to_sr, 46c0, ffc0, CF_ISA_A);
2814
    INSN(pea,       4840, ffc0, CF_ISA_A);
2815
    INSN(swap,      4840, fff8, CF_ISA_A);
2816
    INSN(movem,     48c0, fbc0, CF_ISA_A);
2817
    INSN(ext,       4880, fff8, CF_ISA_A);
2818
    INSN(ext,       48c0, fff8, CF_ISA_A);
2819
    INSN(ext,       49c0, fff8, CF_ISA_A);
2820
    INSN(tst,       4a00, ff00, CF_ISA_A);
2821
    INSN(tas,       4ac0, ffc0, CF_ISA_B);
2822
    INSN(halt,      4ac8, ffff, CF_ISA_A);
2823
    INSN(pulse,     4acc, ffff, CF_ISA_A);
2824
    INSN(illegal,   4afc, ffff, CF_ISA_A);
2825
    INSN(mull,      4c00, ffc0, CF_ISA_A);
2826
    INSN(divl,      4c40, ffc0, CF_ISA_A);
2827
    INSN(sats,      4c80, fff8, CF_ISA_B);
2828
    INSN(trap,      4e40, fff0, CF_ISA_A);
2829
    INSN(link,      4e50, fff8, CF_ISA_A);
2830
    INSN(unlk,      4e58, fff8, CF_ISA_A);
2831
    INSN(move_to_usp, 4e60, fff8, USP);
2832
    INSN(move_from_usp, 4e68, fff8, USP);
2833
    INSN(nop,       4e71, ffff, CF_ISA_A);
2834
    INSN(stop,      4e72, ffff, CF_ISA_A);
2835
    INSN(rte,       4e73, ffff, CF_ISA_A);
2836
    INSN(rts,       4e75, ffff, CF_ISA_A);
2837
    INSN(movec,     4e7b, ffff, CF_ISA_A);
2838
    INSN(jump,      4e80, ffc0, CF_ISA_A);
2839
    INSN(jump,      4ec0, ffc0, CF_ISA_A);
2840
    INSN(addsubq,   5180, f1c0, CF_ISA_A);
2841
    INSN(scc,       50c0, f0f8, CF_ISA_A);
2842
    INSN(addsubq,   5080, f1c0, CF_ISA_A);
2843
    INSN(tpf,       51f8, fff8, CF_ISA_A);
2844

    
2845
    /* Branch instructions.  */
2846
    INSN(branch,    6000, f000, CF_ISA_A);
2847
    /* Disable long branch instructions, then add back the ones we want.  */
2848
    INSN(undef,     60ff, f0ff, CF_ISA_A); /* All long branches.  */
2849
    INSN(branch,    60ff, f0ff, CF_ISA_B);
2850
    INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
2851
    INSN(branch,    60ff, ffff, BRAL);
2852

    
2853
    INSN(moveq,     7000, f100, CF_ISA_A);
2854
    INSN(mvzs,      7100, f100, CF_ISA_B);
2855
    INSN(or,        8000, f000, CF_ISA_A);
2856
    INSN(divw,      80c0, f0c0, CF_ISA_A);
2857
    INSN(addsub,    9000, f000, CF_ISA_A);
2858
    INSN(subx,      9180, f1f8, CF_ISA_A);
2859
    INSN(suba,      91c0, f1c0, CF_ISA_A);
2860

    
2861
    INSN(undef_mac, a000, f000, CF_ISA_A);
2862
    INSN(mac,       a000, f100, CF_EMAC);
2863
    INSN(from_mac,  a180, f9b0, CF_EMAC);
2864
    INSN(move_mac,  a110, f9fc, CF_EMAC);
2865
    INSN(from_macsr,a980, f9f0, CF_EMAC);
2866
    INSN(from_mask, ad80, fff0, CF_EMAC);
2867
    INSN(from_mext, ab80, fbf0, CF_EMAC);
2868
    INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
2869
    INSN(to_mac,    a100, f9c0, CF_EMAC);
2870
    INSN(to_macsr,  a900, ffc0, CF_EMAC);
2871
    INSN(to_mext,   ab00, fbc0, CF_EMAC);
2872
    INSN(to_mask,   ad00, ffc0, CF_EMAC);
2873

    
2874
    INSN(mov3q,     a140, f1c0, CF_ISA_B);
2875
    INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
2876
    INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
2877
    INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
2878
    INSN(cmp,       b080, f1c0, CF_ISA_A);
2879
    INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
2880
    INSN(eor,       b180, f1c0, CF_ISA_A);
2881
    INSN(and,       c000, f000, CF_ISA_A);
2882
    INSN(mulw,      c0c0, f0c0, CF_ISA_A);
2883
    INSN(addsub,    d000, f000, CF_ISA_A);
2884
    INSN(addx,      d180, f1f8, CF_ISA_A);
2885
    INSN(adda,      d1c0, f1c0, CF_ISA_A);
2886
    INSN(shift_im,  e080, f0f0, CF_ISA_A);
2887
    INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
2888
    INSN(undef_fpu, f000, f000, CF_ISA_A);
2889
    INSN(fpu,       f200, ffc0, CF_FPU);
2890
    INSN(fbcc,      f280, ffc0, CF_FPU);
2891
    INSN(frestore,  f340, ffc0, CF_FPU);
2892
    INSN(fsave,     f340, ffc0, CF_FPU);
2893
    INSN(intouch,   f340, ffc0, CF_ISA_A);
2894
    INSN(cpushl,    f428, ff38, CF_ISA_A);
2895
    INSN(wddata,    fb00, ff00, CF_ISA_A);
2896
    INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
2897
#undef INSN
2898
}
2899

    
2900
/* ??? Some of this implementation is not exception safe.  We should always
2901
   write back the result to memory before setting the condition codes.  */
2902
static void disas_m68k_insn(CPUState * env, DisasContext *s)
2903
{
2904
    uint16_t insn;
2905

    
2906
    insn = lduw_code(s->pc);
2907
    s->pc += 2;
2908

    
2909
    opcode_table[insn](s, insn);
2910
}
2911

    
2912
/* generate intermediate code for basic block 'tb'.  */
2913
static inline void
2914
gen_intermediate_code_internal(CPUState *env, TranslationBlock *tb,
2915
                               int search_pc)
2916
{
2917
    DisasContext dc1, *dc = &dc1;
2918
    uint16_t *gen_opc_end;
2919
    int j, lj;
2920
    target_ulong pc_start;
2921
    int pc_offset;
2922
    int last_cc_op;
2923
    int num_insns;
2924
    int max_insns;
2925

    
2926
    /* generate intermediate code */
2927
    pc_start = tb->pc;
2928

    
2929
    dc->tb = tb;
2930

    
2931
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2932

    
2933
    dc->env = env;
2934
    dc->is_jmp = DISAS_NEXT;
2935
    dc->pc = pc_start;
2936
    dc->cc_op = CC_OP_DYNAMIC;
2937
    dc->singlestep_enabled = env->singlestep_enabled;
2938
    dc->fpcr = env->fpcr;
2939
    dc->user = (env->sr & SR_S) == 0;
2940
    dc->is_mem = 0;
2941
    dc->mactmp = NULL_QREG;
2942
    lj = -1;
2943
    num_insns = 0;
2944
    max_insns = tb->cflags & CF_COUNT_MASK;
2945
    if (max_insns == 0)
2946
        max_insns = CF_COUNT_MASK;
2947

    
2948
    gen_icount_start();
2949
    do {
2950
        pc_offset = dc->pc - pc_start;
2951
        gen_throws_exception = NULL;
2952
        if (env->nb_breakpoints > 0) {
2953
            for(j = 0; j < env->nb_breakpoints; j++) {
2954
                if (env->breakpoints[j] == dc->pc) {
2955
                    gen_exception(dc, dc->pc, EXCP_DEBUG);
2956
                    dc->is_jmp = DISAS_JUMP;
2957
                    break;
2958
                }
2959
            }
2960
            if (dc->is_jmp)
2961
                break;
2962
        }
2963
        if (search_pc) {
2964
            j = gen_opc_ptr - gen_opc_buf;
2965
            if (lj < j) {
2966
                lj++;
2967
                while (lj < j)
2968
                    gen_opc_instr_start[lj++] = 0;
2969
            }
2970
            gen_opc_pc[lj] = dc->pc;
2971
            gen_opc_instr_start[lj] = 1;
2972
            gen_opc_icount[lj] = num_insns;
2973
        }
2974
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2975
            gen_io_start();
2976
        last_cc_op = dc->cc_op;
2977
        dc->insn_pc = dc->pc;
2978
        disas_m68k_insn(env, dc);
2979
        num_insns++;
2980

    
2981
        /* Terminate the TB on memory ops if watchpoints are present.  */
2982
        /* FIXME: This should be replaced by the deterministic execution
2983
         * IRQ raising bits.  */
2984
        if (dc->is_mem && env->nb_watchpoints)
2985
            break;
2986
    } while (!dc->is_jmp && gen_opc_ptr < gen_opc_end &&
2987
             !env->singlestep_enabled &&
2988
             (pc_offset) < (TARGET_PAGE_SIZE - 32) &&
2989
             num_insns < max_insns);
2990

    
2991
    if (tb->cflags & CF_LAST_IO)
2992
        gen_io_end();
2993
    if (unlikely(env->singlestep_enabled)) {
2994
        /* Make sure the pc is updated, and raise a debug exception.  */
2995
        if (!dc->is_jmp) {
2996
            gen_flush_cc_op(dc);
2997
            tcg_gen_movi_i32(QREG_PC, dc->pc);
2998
        }
2999
        gen_helper_raise_exception(tcg_const_i32(EXCP_DEBUG));
3000
    } else {
3001
        switch(dc->is_jmp) {
3002
        case DISAS_NEXT:
3003
            gen_flush_cc_op(dc);
3004
            gen_jmp_tb(dc, 0, dc->pc);
3005
            break;
3006
        default:
3007
        case DISAS_JUMP:
3008
        case DISAS_UPDATE:
3009
            gen_flush_cc_op(dc);
3010
            /* indicate that the hash table must be used to find the next TB */
3011
            tcg_gen_exit_tb(0);
3012
            break;
3013
        case DISAS_TB_JUMP:
3014
            /* nothing more to generate */
3015
            break;
3016
        }
3017
    }
3018
    gen_icount_end(tb, num_insns);
3019
    *gen_opc_ptr = INDEX_op_end;
3020

    
3021
#ifdef DEBUG_DISAS
3022
    if (loglevel & CPU_LOG_TB_IN_ASM) {
3023
        fprintf(logfile, "----------------\n");
3024
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
3025
        target_disas(logfile, pc_start, dc->pc - pc_start, 0);
3026
        fprintf(logfile, "\n");
3027
    }
3028
#endif
3029
    if (search_pc) {
3030
        j = gen_opc_ptr - gen_opc_buf;
3031
        lj++;
3032
        while (lj <= j)
3033
            gen_opc_instr_start[lj++] = 0;
3034
    } else {
3035
        tb->size = dc->pc - pc_start;
3036
        tb->icount = num_insns;
3037
    }
3038

    
3039
    //optimize_flags();
3040
    //expand_target_qops();
3041
}
3042

    
3043
void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
3044
{
3045
    gen_intermediate_code_internal(env, tb, 0);
3046
}
3047

    
3048
void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
3049
{
3050
    gen_intermediate_code_internal(env, tb, 1);
3051
}
3052

    
3053
void cpu_dump_state(CPUState *env, FILE *f,
3054
                    int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
3055
                    int flags)
3056
{
3057
    int i;
3058
    uint16_t sr;
3059
    CPU_DoubleU u;
3060
    for (i = 0; i < 8; i++)
3061
      {
3062
        u.d = env->fregs[i];
3063
        cpu_fprintf (f, "D%d = %08x   A%d = %08x   F%d = %08x%08x (%12g)\n",
3064
                     i, env->dregs[i], i, env->aregs[i],
3065
                     i, u.l.upper, u.l.lower, *(double *)&u.d);
3066
      }
3067
    cpu_fprintf (f, "PC = %08x   ", env->pc);
3068
    sr = env->sr;
3069
    cpu_fprintf (f, "SR = %04x %c%c%c%c%c ", sr, (sr & 0x10) ? 'X' : '-',
3070
                 (sr & CCF_N) ? 'N' : '-', (sr & CCF_Z) ? 'Z' : '-',
3071
                 (sr & CCF_V) ? 'V' : '-', (sr & CCF_C) ? 'C' : '-');
3072
    cpu_fprintf (f, "FPRESULT = %12g\n", *(double *)&env->fp_result);
3073
}
3074

    
3075
void gen_pc_load(CPUState *env, TranslationBlock *tb,
3076
                unsigned long searched_pc, int pc_pos, void *puc)
3077
{
3078
    env->pc = gen_opc_pc[pc_pos];
3079
}