Statistics
| Branch: | Revision:

root / target-m68k / translate.c @ bf20dc07

History | View | Annotate | Download (78.5 kB)

1
/*
2
 *  m68k translation
3
 *
4
 *  Copyright (c) 2005-2007 CodeSourcery
5
 *  Written by Paul Brook
6
 *
7
 * This library is free software; you can redistribute it and/or
8
 * modify it under the terms of the GNU Lesser General Public
9
 * License as published by the Free Software Foundation; either
10
 * version 2 of the License, or (at your option) any later version.
11
 *
12
 * This library is distributed in the hope that it will be useful,
13
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
 * General Public License for more details.
16
 *
17
 * You should have received a copy of the GNU Lesser General Public
18
 * License along with this library; if not, write to the Free Software
19
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
20
 */
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26
#include <assert.h>
27

    
28
#include "config.h"
29
#include "cpu.h"
30
#include "exec-all.h"
31
#include "disas.h"
32
#include "tcg-op.h"
33

    
34
#define GEN_HELPER 1
35
#include "helpers.h"
36

    
37
//#define DEBUG_DISPATCH 1
38

    
39
#define DEFO32(name, offset) static TCGv QREG_##name;
40
#define DEFO64(name, offset) static TCGv QREG_##name;
41
#define DEFF64(name, offset) static TCGv QREG_##name;
42
#include "qregs.def"
43
#undef DEFO32
44
#undef DEFO64
45
#undef DEFF64
46

    
47
static TCGv cpu_env;
48

    
49
static char cpu_reg_names[3*8*3 + 5*4];
50
static TCGv cpu_dregs[8];
51
static TCGv cpu_aregs[8];
52
static TCGv cpu_fregs[8];
53
static TCGv cpu_macc[4];
54

    
55
#define DREG(insn, pos) cpu_dregs[((insn) >> (pos)) & 7]
56
#define AREG(insn, pos) cpu_aregs[((insn) >> (pos)) & 7]
57
#define FREG(insn, pos) cpu_fregs[((insn) >> (pos)) & 7]
58
#define MACREG(acc) cpu_macc[acc]
59
#define QREG_SP cpu_aregs[7]
60

    
61
static TCGv NULL_QREG;
62
#define IS_NULL_QREG(t) (GET_TCGV(t) == GET_TCGV(NULL_QREG))
63
/* Used to distinguish stores from bad addressing modes.  */
64
static TCGv store_dummy;
65

    
66
#include "gen-icount.h"
67

    
68
void m68k_tcg_init(void)
69
{
70
    char *p;
71
    int i;
72

    
73
#define DEFO32(name,  offset) QREG_##name = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, offsetof(CPUState, offset), #name);
74
#define DEFO64(name,  offset) QREG_##name = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0, offsetof(CPUState, offset), #name);
75
#define DEFF64(name,  offset) DEFO64(name, offset)
76
#include "qregs.def"
77
#undef DEFO32
78
#undef DEFO64
79
#undef DEFF64
80

    
81
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
82

    
83
    p = cpu_reg_names;
84
    for (i = 0; i < 8; i++) {
85
        sprintf(p, "D%d", i);
86
        cpu_dregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
87
                                          offsetof(CPUM68KState, dregs[i]), p);
88
        p += 3;
89
        sprintf(p, "A%d", i);
90
        cpu_aregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
91
                                          offsetof(CPUM68KState, aregs[i]), p);
92
        p += 3;
93
        sprintf(p, "F%d", i);
94
        cpu_fregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
95
                                          offsetof(CPUM68KState, fregs[i]), p);
96
        p += 3;
97
    }
98
    for (i = 0; i < 4; i++) {
99
        sprintf(p, "ACC%d", i);
100
        cpu_macc[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
101
                                         offsetof(CPUM68KState, macc[i]), p);
102
        p += 5;
103
    }
104

    
105
    NULL_QREG = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, -4, "NULL");
106
    store_dummy = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, -8, "NULL");
107

    
108
#define DEF_HELPER(name, ret, args) \
109
    tcg_register_helper(HELPER(name), #name);
110
#include "helpers.h"
111
}
112

    
113
static inline void qemu_assert(int cond, const char *msg)
114
{
115
    if (!cond) {
116
        fprintf (stderr, "badness: %s\n", msg);
117
        abort();
118
    }
119
}
120

    
121
/* internal defines */
122
typedef struct DisasContext {
123
    CPUM68KState *env;
124
    target_ulong insn_pc; /* Start of the current instruction.  */
125
    target_ulong pc;
126
    int is_jmp;
127
    int cc_op;
128
    int user;
129
    uint32_t fpcr;
130
    struct TranslationBlock *tb;
131
    int singlestep_enabled;
132
    int is_mem;
133
    TCGv mactmp;
134
} DisasContext;
135

    
136
#define DISAS_JUMP_NEXT 4
137

    
138
#if defined(CONFIG_USER_ONLY)
139
#define IS_USER(s) 1
140
#else
141
#define IS_USER(s) s->user
142
#endif
143

    
144
/* XXX: move that elsewhere */
145
/* ??? Fix exceptions.  */
146
static void *gen_throws_exception;
147
#define gen_last_qop NULL
148

    
149
extern FILE *logfile;
150
extern int loglevel;
151

    
152
#define OS_BYTE 0
153
#define OS_WORD 1
154
#define OS_LONG 2
155
#define OS_SINGLE 4
156
#define OS_DOUBLE 5
157

    
158
typedef void (*disas_proc)(DisasContext *, uint16_t);
159

    
160
#ifdef DEBUG_DISPATCH
161
#define DISAS_INSN(name) \
162
  static void real_disas_##name (DisasContext *s, uint16_t insn); \
163
  static void disas_##name (DisasContext *s, uint16_t insn) { \
164
    if (logfile) fprintf(logfile, "Dispatch " #name "\n"); \
165
    real_disas_##name(s, insn); } \
166
  static void real_disas_##name (DisasContext *s, uint16_t insn)
167
#else
168
#define DISAS_INSN(name) \
169
  static void disas_##name (DisasContext *s, uint16_t insn)
170
#endif
171

    
172
/* FIXME: Remove this.  */
173
#define gen_im32(val) tcg_const_i32(val)
174

    
175
/* Fake floating point.  */
176
#define TCG_TYPE_F32 TCG_TYPE_I32
177
#define TCG_TYPE_F64 TCG_TYPE_I64
178
#define tcg_gen_mov_f64 tcg_gen_mov_i64
179
#define tcg_gen_qemu_ldf32 tcg_gen_qemu_ld32u
180
#define tcg_gen_qemu_ldf64 tcg_gen_qemu_ld64
181
#define tcg_gen_qemu_stf32 tcg_gen_qemu_st32
182
#define tcg_gen_qemu_stf64 tcg_gen_qemu_st64
183
#define gen_helper_pack_32_f32 tcg_gen_mov_i32
184
#define gen_helper_pack_f32_32 tcg_gen_mov_i32
185

    
186
#define QMODE_I32 TCG_TYPE_I32
187
#define QMODE_I64 TCG_TYPE_I64
188
#define QMODE_F32 TCG_TYPE_F32
189
#define QMODE_F64 TCG_TYPE_F64
190
static inline TCGv gen_new_qreg(int mode)
191
{
192
    return tcg_temp_new(mode);
193
}
194

    
195
/* Generate a load from the specified address.  Narrow values are
196
   sign extended to full register width.  */
197
static inline TCGv gen_load(DisasContext * s, int opsize, TCGv addr, int sign)
198
{
199
    TCGv tmp;
200
    int index = IS_USER(s);
201
    s->is_mem = 1;
202
    switch(opsize) {
203
    case OS_BYTE:
204
        tmp = gen_new_qreg(QMODE_I32);
205
        if (sign)
206
            tcg_gen_qemu_ld8s(tmp, addr, index);
207
        else
208
            tcg_gen_qemu_ld8u(tmp, addr, index);
209
        break;
210
    case OS_WORD:
211
        tmp = gen_new_qreg(QMODE_I32);
212
        if (sign)
213
            tcg_gen_qemu_ld16s(tmp, addr, index);
214
        else
215
            tcg_gen_qemu_ld16u(tmp, addr, index);
216
        break;
217
    case OS_LONG:
218
        tmp = gen_new_qreg(QMODE_I32);
219
        tcg_gen_qemu_ld32u(tmp, addr, index);
220
        break;
221
    case OS_SINGLE:
222
        tmp = gen_new_qreg(QMODE_F32);
223
        tcg_gen_qemu_ldf32(tmp, addr, index);
224
        break;
225
    case OS_DOUBLE:
226
        tmp  = gen_new_qreg(QMODE_F64);
227
        tcg_gen_qemu_ldf64(tmp, addr, index);
228
        break;
229
    default:
230
        qemu_assert(0, "bad load size");
231
    }
232
    gen_throws_exception = gen_last_qop;
233
    return tmp;
234
}
235

    
236
/* Generate a store.  */
237
static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val)
238
{
239
    int index = IS_USER(s);
240
    s->is_mem = 1;
241
    switch(opsize) {
242
    case OS_BYTE:
243
        tcg_gen_qemu_st8(val, addr, index);
244
        break;
245
    case OS_WORD:
246
        tcg_gen_qemu_st16(val, addr, index);
247
        break;
248
    case OS_LONG:
249
        tcg_gen_qemu_st32(val, addr, index);
250
        break;
251
    case OS_SINGLE:
252
        tcg_gen_qemu_stf32(val, addr, index);
253
        break;
254
    case OS_DOUBLE:
255
        tcg_gen_qemu_stf64(val, addr, index);
256
        break;
257
    default:
258
        qemu_assert(0, "bad store size");
259
    }
260
    gen_throws_exception = gen_last_qop;
261
}
262

    
263
typedef enum {
264
    EA_STORE,
265
    EA_LOADU,
266
    EA_LOADS
267
} ea_what;
268

    
269
/* Generate an unsigned load if VAL is 0 a signed load if val is -1,
270
   otherwise generate a store.  */
271
static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
272
                     ea_what what)
273
{
274
    if (what == EA_STORE) {
275
        gen_store(s, opsize, addr, val);
276
        return store_dummy;
277
    } else {
278
        return gen_load(s, opsize, addr, what == EA_LOADS);
279
    }
280
}
281

    
282
/* Read a 32-bit immediate constant.  */
283
static inline uint32_t read_im32(DisasContext *s)
284
{
285
    uint32_t im;
286
    im = ((uint32_t)lduw_code(s->pc)) << 16;
287
    s->pc += 2;
288
    im |= lduw_code(s->pc);
289
    s->pc += 2;
290
    return im;
291
}
292

    
293
/* Calculate and address index.  */
294
static TCGv gen_addr_index(uint16_t ext, TCGv tmp)
295
{
296
    TCGv add;
297
    int scale;
298

    
299
    add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
300
    if ((ext & 0x800) == 0) {
301
        tcg_gen_ext16s_i32(tmp, add);
302
        add = tmp;
303
    }
304
    scale = (ext >> 9) & 3;
305
    if (scale != 0) {
306
        tcg_gen_shli_i32(tmp, add, scale);
307
        add = tmp;
308
    }
309
    return add;
310
}
311

    
312
/* Handle a base + index + displacement effective addresss.
313
   A NULL_QREG base means pc-relative.  */
314
static TCGv gen_lea_indexed(DisasContext *s, int opsize, TCGv base)
315
{
316
    uint32_t offset;
317
    uint16_t ext;
318
    TCGv add;
319
    TCGv tmp;
320
    uint32_t bd, od;
321

    
322
    offset = s->pc;
323
    ext = lduw_code(s->pc);
324
    s->pc += 2;
325

    
326
    if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
327
        return NULL_QREG;
328

    
329
    if (ext & 0x100) {
330
        /* full extension word format */
331
        if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
332
            return NULL_QREG;
333

    
334
        if ((ext & 0x30) > 0x10) {
335
            /* base displacement */
336
            if ((ext & 0x30) == 0x20) {
337
                bd = (int16_t)lduw_code(s->pc);
338
                s->pc += 2;
339
            } else {
340
                bd = read_im32(s);
341
            }
342
        } else {
343
            bd = 0;
344
        }
345
        tmp = gen_new_qreg(QMODE_I32);
346
        if ((ext & 0x44) == 0) {
347
            /* pre-index */
348
            add = gen_addr_index(ext, tmp);
349
        } else {
350
            add = NULL_QREG;
351
        }
352
        if ((ext & 0x80) == 0) {
353
            /* base not suppressed */
354
            if (IS_NULL_QREG(base)) {
355
                base = gen_im32(offset + bd);
356
                bd = 0;
357
            }
358
            if (!IS_NULL_QREG(add)) {
359
                tcg_gen_add_i32(tmp, add, base);
360
                add = tmp;
361
            } else {
362
                add = base;
363
            }
364
        }
365
        if (!IS_NULL_QREG(add)) {
366
            if (bd != 0) {
367
                tcg_gen_addi_i32(tmp, add, bd);
368
                add = tmp;
369
            }
370
        } else {
371
            add = gen_im32(bd);
372
        }
373
        if ((ext & 3) != 0) {
374
            /* memory indirect */
375
            base = gen_load(s, OS_LONG, add, 0);
376
            if ((ext & 0x44) == 4) {
377
                add = gen_addr_index(ext, tmp);
378
                tcg_gen_add_i32(tmp, add, base);
379
                add = tmp;
380
            } else {
381
                add = base;
382
            }
383
            if ((ext & 3) > 1) {
384
                /* outer displacement */
385
                if ((ext & 3) == 2) {
386
                    od = (int16_t)lduw_code(s->pc);
387
                    s->pc += 2;
388
                } else {
389
                    od = read_im32(s);
390
                }
391
            } else {
392
                od = 0;
393
            }
394
            if (od != 0) {
395
                tcg_gen_addi_i32(tmp, add, od);
396
                add = tmp;
397
            }
398
        }
399
    } else {
400
        /* brief extension word format */
401
        tmp = gen_new_qreg(QMODE_I32);
402
        add = gen_addr_index(ext, tmp);
403
        if (!IS_NULL_QREG(base)) {
404
            tcg_gen_add_i32(tmp, add, base);
405
            if ((int8_t)ext)
406
                tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
407
        } else {
408
            tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
409
        }
410
        add = tmp;
411
    }
412
    return add;
413
}
414

    
415
/* Update the CPU env CC_OP state.  */
416
static inline void gen_flush_cc_op(DisasContext *s)
417
{
418
    if (s->cc_op != CC_OP_DYNAMIC)
419
        tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
420
}
421

    
422
/* Evaluate all the CC flags.  */
423
static inline void gen_flush_flags(DisasContext *s)
424
{
425
    if (s->cc_op == CC_OP_FLAGS)
426
        return;
427
    gen_flush_cc_op(s);
428
    gen_helper_flush_flags(cpu_env, QREG_CC_OP);
429
    s->cc_op = CC_OP_FLAGS;
430
}
431

    
432
static void gen_logic_cc(DisasContext *s, TCGv val)
433
{
434
    tcg_gen_mov_i32(QREG_CC_DEST, val);
435
    s->cc_op = CC_OP_LOGIC;
436
}
437

    
438
static void gen_update_cc_add(TCGv dest, TCGv src)
439
{
440
    tcg_gen_mov_i32(QREG_CC_DEST, dest);
441
    tcg_gen_mov_i32(QREG_CC_SRC, src);
442
}
443

    
444
static inline int opsize_bytes(int opsize)
445
{
446
    switch (opsize) {
447
    case OS_BYTE: return 1;
448
    case OS_WORD: return 2;
449
    case OS_LONG: return 4;
450
    case OS_SINGLE: return 4;
451
    case OS_DOUBLE: return 8;
452
    default:
453
        qemu_assert(0, "bad operand size");
454
    }
455
}
456

    
457
/* Assign value to a register.  If the width is less than the register width
458
   only the low part of the register is set.  */
459
static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
460
{
461
    TCGv tmp;
462
    switch (opsize) {
463
    case OS_BYTE:
464
        tcg_gen_andi_i32(reg, reg, 0xffffff00);
465
        tmp = gen_new_qreg(QMODE_I32);
466
        tcg_gen_ext8u_i32(tmp, val);
467
        tcg_gen_or_i32(reg, reg, tmp);
468
        break;
469
    case OS_WORD:
470
        tcg_gen_andi_i32(reg, reg, 0xffff0000);
471
        tmp = gen_new_qreg(QMODE_I32);
472
        tcg_gen_ext16u_i32(tmp, val);
473
        tcg_gen_or_i32(reg, reg, tmp);
474
        break;
475
    case OS_LONG:
476
        tcg_gen_mov_i32(reg, val);
477
        break;
478
    case OS_SINGLE:
479
        gen_helper_pack_32_f32(reg, val);
480
        break;
481
    default:
482
        qemu_assert(0, "Bad operand size");
483
        break;
484
    }
485
}
486

    
487
/* Sign or zero extend a value.  */
488
static inline TCGv gen_extend(TCGv val, int opsize, int sign)
489
{
490
    TCGv tmp;
491

    
492
    switch (opsize) {
493
    case OS_BYTE:
494
        tmp = gen_new_qreg(QMODE_I32);
495
        if (sign)
496
            tcg_gen_ext8s_i32(tmp, val);
497
        else
498
            tcg_gen_ext8u_i32(tmp, val);
499
        break;
500
    case OS_WORD:
501
        tmp = gen_new_qreg(QMODE_I32);
502
        if (sign)
503
            tcg_gen_ext16s_i32(tmp, val);
504
        else
505
            tcg_gen_ext16u_i32(tmp, val);
506
        break;
507
    case OS_LONG:
508
        tmp = val;
509
        break;
510
    case OS_SINGLE:
511
        tmp = gen_new_qreg(QMODE_F32);
512
        gen_helper_pack_f32_32(tmp, val);
513
        break;
514
    default:
515
        qemu_assert(0, "Bad operand size");
516
    }
517
    return tmp;
518
}
519

    
520
/* Generate code for an "effective address".  Does not adjust the base
521
   register for autoincrememnt addressing modes.  */
522
static TCGv gen_lea(DisasContext *s, uint16_t insn, int opsize)
523
{
524
    TCGv reg;
525
    TCGv tmp;
526
    uint16_t ext;
527
    uint32_t offset;
528

    
529
    switch ((insn >> 3) & 7) {
530
    case 0: /* Data register direct.  */
531
    case 1: /* Address register direct.  */
532
        return NULL_QREG;
533
    case 2: /* Indirect register */
534
    case 3: /* Indirect postincrement.  */
535
        return AREG(insn, 0);
536
    case 4: /* Indirect predecrememnt.  */
537
        reg = AREG(insn, 0);
538
        tmp = gen_new_qreg(QMODE_I32);
539
        tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
540
        return tmp;
541
    case 5: /* Indirect displacement.  */
542
        reg = AREG(insn, 0);
543
        tmp = gen_new_qreg(QMODE_I32);
544
        ext = lduw_code(s->pc);
545
        s->pc += 2;
546
        tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
547
        return tmp;
548
    case 6: /* Indirect index + displacement.  */
549
        reg = AREG(insn, 0);
550
        return gen_lea_indexed(s, opsize, reg);
551
    case 7: /* Other */
552
        switch (insn & 7) {
553
        case 0: /* Absolute short.  */
554
            offset = ldsw_code(s->pc);
555
            s->pc += 2;
556
            return gen_im32(offset);
557
        case 1: /* Absolute long.  */
558
            offset = read_im32(s);
559
            return gen_im32(offset);
560
        case 2: /* pc displacement  */
561
            tmp = gen_new_qreg(QMODE_I32);
562
            offset = s->pc;
563
            offset += ldsw_code(s->pc);
564
            s->pc += 2;
565
            return gen_im32(offset);
566
        case 3: /* pc index+displacement.  */
567
            return gen_lea_indexed(s, opsize, NULL_QREG);
568
        case 4: /* Immediate.  */
569
        default:
570
            return NULL_QREG;
571
        }
572
    }
573
    /* Should never happen.  */
574
    return NULL_QREG;
575
}
576

    
577
/* Helper function for gen_ea. Reuse the computed address between the
578
   for read/write operands.  */
579
static inline TCGv gen_ea_once(DisasContext *s, uint16_t insn, int opsize,
580
                              TCGv val, TCGv *addrp, ea_what what)
581
{
582
    TCGv tmp;
583

    
584
    if (addrp && what == EA_STORE) {
585
        tmp = *addrp;
586
    } else {
587
        tmp = gen_lea(s, insn, opsize);
588
        if (IS_NULL_QREG(tmp))
589
            return tmp;
590
        if (addrp)
591
            *addrp = tmp;
592
    }
593
    return gen_ldst(s, opsize, tmp, val, what);
594
}
595

    
596
/* Generate code to load/store a value ito/from an EA.  If VAL > 0 this is
597
   a write otherwise it is a read (0 == sign extend, -1 == zero extend).
598
   ADDRP is non-null for readwrite operands.  */
599
static TCGv gen_ea(DisasContext *s, uint16_t insn, int opsize, TCGv val,
600
                   TCGv *addrp, ea_what what)
601
{
602
    TCGv reg;
603
    TCGv result;
604
    uint32_t offset;
605

    
606
    switch ((insn >> 3) & 7) {
607
    case 0: /* Data register direct.  */
608
        reg = DREG(insn, 0);
609
        if (what == EA_STORE) {
610
            gen_partset_reg(opsize, reg, val);
611
            return store_dummy;
612
        } else {
613
            return gen_extend(reg, opsize, what == EA_LOADS);
614
        }
615
    case 1: /* Address register direct.  */
616
        reg = AREG(insn, 0);
617
        if (what == EA_STORE) {
618
            tcg_gen_mov_i32(reg, val);
619
            return store_dummy;
620
        } else {
621
            return gen_extend(reg, opsize, what == EA_LOADS);
622
        }
623
    case 2: /* Indirect register */
624
        reg = AREG(insn, 0);
625
        return gen_ldst(s, opsize, reg, val, what);
626
    case 3: /* Indirect postincrement.  */
627
        reg = AREG(insn, 0);
628
        result = gen_ldst(s, opsize, reg, val, what);
629
        /* ??? This is not exception safe.  The instruction may still
630
           fault after this point.  */
631
        if (what == EA_STORE || !addrp)
632
            tcg_gen_addi_i32(reg, reg, opsize_bytes(opsize));
633
        return result;
634
    case 4: /* Indirect predecrememnt.  */
635
        {
636
            TCGv tmp;
637
            if (addrp && what == EA_STORE) {
638
                tmp = *addrp;
639
            } else {
640
                tmp = gen_lea(s, insn, opsize);
641
                if (IS_NULL_QREG(tmp))
642
                    return tmp;
643
                if (addrp)
644
                    *addrp = tmp;
645
            }
646
            result = gen_ldst(s, opsize, tmp, val, what);
647
            /* ??? This is not exception safe.  The instruction may still
648
               fault after this point.  */
649
            if (what == EA_STORE || !addrp) {
650
                reg = AREG(insn, 0);
651
                tcg_gen_mov_i32(reg, tmp);
652
            }
653
        }
654
        return result;
655
    case 5: /* Indirect displacement.  */
656
    case 6: /* Indirect index + displacement.  */
657
        return gen_ea_once(s, insn, opsize, val, addrp, what);
658
    case 7: /* Other */
659
        switch (insn & 7) {
660
        case 0: /* Absolute short.  */
661
        case 1: /* Absolute long.  */
662
        case 2: /* pc displacement  */
663
        case 3: /* pc index+displacement.  */
664
            return gen_ea_once(s, insn, opsize, val, addrp, what);
665
        case 4: /* Immediate.  */
666
            /* Sign extend values for consistency.  */
667
            switch (opsize) {
668
            case OS_BYTE:
669
                if (what == EA_LOADS)
670
                    offset = ldsb_code(s->pc + 1);
671
                else
672
                    offset = ldub_code(s->pc + 1);
673
                s->pc += 2;
674
                break;
675
            case OS_WORD:
676
                if (what == EA_LOADS)
677
                    offset = ldsw_code(s->pc);
678
                else
679
                    offset = lduw_code(s->pc);
680
                s->pc += 2;
681
                break;
682
            case OS_LONG:
683
                offset = read_im32(s);
684
                break;
685
            default:
686
                qemu_assert(0, "Bad immediate operand");
687
            }
688
            return tcg_const_i32(offset);
689
        default:
690
            return NULL_QREG;
691
        }
692
    }
693
    /* Should never happen.  */
694
    return NULL_QREG;
695
}
696

    
697
/* This generates a conditional branch, clobbering all temporaries.  */
698
static void gen_jmpcc(DisasContext *s, int cond, int l1)
699
{
700
    TCGv tmp;
701

    
702
    /* TODO: Optimize compare/branch pairs rather than always flushing
703
       flag state to CC_OP_FLAGS.  */
704
    gen_flush_flags(s);
705
    switch (cond) {
706
    case 0: /* T */
707
        tcg_gen_br(l1);
708
        break;
709
    case 1: /* F */
710
        break;
711
    case 2: /* HI (!C && !Z) */
712
        tmp = gen_new_qreg(QMODE_I32);
713
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
714
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
715
        break;
716
    case 3: /* LS (C || Z) */
717
        tmp = gen_new_qreg(QMODE_I32);
718
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
719
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
720
        break;
721
    case 4: /* CC (!C) */
722
        tmp = gen_new_qreg(QMODE_I32);
723
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
724
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
725
        break;
726
    case 5: /* CS (C) */
727
        tmp = gen_new_qreg(QMODE_I32);
728
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
729
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
730
        break;
731
    case 6: /* NE (!Z) */
732
        tmp = gen_new_qreg(QMODE_I32);
733
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
734
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
735
        break;
736
    case 7: /* EQ (Z) */
737
        tmp = gen_new_qreg(QMODE_I32);
738
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
739
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
740
        break;
741
    case 8: /* VC (!V) */
742
        tmp = gen_new_qreg(QMODE_I32);
743
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
744
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
745
        break;
746
    case 9: /* VS (V) */
747
        tmp = gen_new_qreg(QMODE_I32);
748
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
749
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
750
        break;
751
    case 10: /* PL (!N) */
752
        tmp = gen_new_qreg(QMODE_I32);
753
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
754
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
755
        break;
756
    case 11: /* MI (N) */
757
        tmp = gen_new_qreg(QMODE_I32);
758
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
759
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
760
        break;
761
    case 12: /* GE (!(N ^ V)) */
762
        tmp = gen_new_qreg(QMODE_I32);
763
        assert(CCF_V == (CCF_N >> 2));
764
        tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
765
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
766
        tcg_gen_andi_i32(tmp, tmp, CCF_V);
767
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
768
        break;
769
    case 13: /* LT (N ^ V) */
770
        tmp = gen_new_qreg(QMODE_I32);
771
        assert(CCF_V == (CCF_N >> 2));
772
        tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
773
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
774
        tcg_gen_andi_i32(tmp, tmp, CCF_V);
775
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
776
        break;
777
    case 14: /* GT (!(Z || (N ^ V))) */
778
        tmp = gen_new_qreg(QMODE_I32);
779
        assert(CCF_V == (CCF_N >> 2));
780
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
781
        tcg_gen_shri_i32(tmp, tmp, 2);
782
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
783
        tcg_gen_andi_i32(tmp, tmp, CCF_V | CCF_Z);
784
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
785
        break;
786
    case 15: /* LE (Z || (N ^ V)) */
787
        tmp = gen_new_qreg(QMODE_I32);
788
        assert(CCF_V == (CCF_N >> 2));
789
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
790
        tcg_gen_shri_i32(tmp, tmp, 2);
791
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
792
        tcg_gen_andi_i32(tmp, tmp, CCF_V | CCF_Z);
793
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
794
        break;
795
    default:
796
        /* Should ever happen.  */
797
        abort();
798
    }
799
}
800

    
801
DISAS_INSN(scc)
802
{
803
    int l1;
804
    int cond;
805
    TCGv reg;
806

    
807
    l1 = gen_new_label();
808
    cond = (insn >> 8) & 0xf;
809
    reg = DREG(insn, 0);
810
    tcg_gen_andi_i32(reg, reg, 0xffffff00);
811
    /* This is safe because we modify the reg directly, with no other values
812
       live.  */
813
    gen_jmpcc(s, cond ^ 1, l1);
814
    tcg_gen_ori_i32(reg, reg, 0xff);
815
    gen_set_label(l1);
816
}
817

    
818
/* Force a TB lookup after an instruction that changes the CPU state.  */
819
static void gen_lookup_tb(DisasContext *s)
820
{
821
    gen_flush_cc_op(s);
822
    tcg_gen_movi_i32(QREG_PC, s->pc);
823
    s->is_jmp = DISAS_UPDATE;
824
}
825

    
826
/* Generate a jump to an immediate address.  */
827
static void gen_jmp_im(DisasContext *s, uint32_t dest)
828
{
829
    gen_flush_cc_op(s);
830
    tcg_gen_movi_i32(QREG_PC, dest);
831
    s->is_jmp = DISAS_JUMP;
832
}
833

    
834
/* Generate a jump to the address in qreg DEST.  */
835
static void gen_jmp(DisasContext *s, TCGv dest)
836
{
837
    gen_flush_cc_op(s);
838
    tcg_gen_mov_i32(QREG_PC, dest);
839
    s->is_jmp = DISAS_JUMP;
840
}
841

    
842
static void gen_exception(DisasContext *s, uint32_t where, int nr)
843
{
844
    gen_flush_cc_op(s);
845
    gen_jmp_im(s, where);
846
    gen_helper_raise_exception(tcg_const_i32(nr));
847
}
848

    
849
static inline void gen_addr_fault(DisasContext *s)
850
{
851
    gen_exception(s, s->insn_pc, EXCP_ADDRESS);
852
}
853

    
854
#define SRC_EA(result, opsize, op_sign, addrp) do { \
855
    result = gen_ea(s, insn, opsize, NULL_QREG, addrp, op_sign ? EA_LOADS : EA_LOADU); \
856
    if (IS_NULL_QREG(result)) { \
857
        gen_addr_fault(s); \
858
        return; \
859
    } \
860
    } while (0)
861

    
862
#define DEST_EA(insn, opsize, val, addrp) do { \
863
    TCGv ea_result = gen_ea(s, insn, opsize, val, addrp, EA_STORE); \
864
    if (IS_NULL_QREG(ea_result)) { \
865
        gen_addr_fault(s); \
866
        return; \
867
    } \
868
    } while (0)
869

    
870
/* Generate a jump to an immediate address.  */
871
static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
872
{
873
    TranslationBlock *tb;
874

    
875
    tb = s->tb;
876
    if (__builtin_expect (s->singlestep_enabled, 0)) {
877
        gen_exception(s, dest, EXCP_DEBUG);
878
    } else if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
879
               (s->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
880
        tcg_gen_goto_tb(n);
881
        tcg_gen_movi_i32(QREG_PC, dest);
882
        tcg_gen_exit_tb((long)tb + n);
883
    } else {
884
        gen_jmp_im(s, dest);
885
        tcg_gen_exit_tb(0);
886
    }
887
    s->is_jmp = DISAS_TB_JUMP;
888
}
889

    
890
DISAS_INSN(undef_mac)
891
{
892
    gen_exception(s, s->pc - 2, EXCP_LINEA);
893
}
894

    
895
DISAS_INSN(undef_fpu)
896
{
897
    gen_exception(s, s->pc - 2, EXCP_LINEF);
898
}
899

    
900
DISAS_INSN(undef)
901
{
902
    gen_exception(s, s->pc - 2, EXCP_UNSUPPORTED);
903
    cpu_abort(cpu_single_env, "Illegal instruction: %04x @ %08x",
904
              insn, s->pc - 2);
905
}
906

    
907
DISAS_INSN(mulw)
908
{
909
    TCGv reg;
910
    TCGv tmp;
911
    TCGv src;
912
    int sign;
913

    
914
    sign = (insn & 0x100) != 0;
915
    reg = DREG(insn, 9);
916
    tmp = gen_new_qreg(QMODE_I32);
917
    if (sign)
918
        tcg_gen_ext16s_i32(tmp, reg);
919
    else
920
        tcg_gen_ext16u_i32(tmp, reg);
921
    SRC_EA(src, OS_WORD, sign, NULL);
922
    tcg_gen_mul_i32(tmp, tmp, src);
923
    tcg_gen_mov_i32(reg, tmp);
924
    /* Unlike m68k, coldfire always clears the overflow bit.  */
925
    gen_logic_cc(s, tmp);
926
}
927

    
928
DISAS_INSN(divw)
929
{
930
    TCGv reg;
931
    TCGv tmp;
932
    TCGv src;
933
    int sign;
934

    
935
    sign = (insn & 0x100) != 0;
936
    reg = DREG(insn, 9);
937
    if (sign) {
938
        tcg_gen_ext16s_i32(QREG_DIV1, reg);
939
    } else {
940
        tcg_gen_ext16u_i32(QREG_DIV1, reg);
941
    }
942
    SRC_EA(src, OS_WORD, sign, NULL);
943
    tcg_gen_mov_i32(QREG_DIV2, src);
944
    if (sign) {
945
        gen_helper_divs(cpu_env, tcg_const_i32(1));
946
    } else {
947
        gen_helper_divu(cpu_env, tcg_const_i32(1));
948
    }
949

    
950
    tmp = gen_new_qreg(QMODE_I32);
951
    src = gen_new_qreg(QMODE_I32);
952
    tcg_gen_ext16u_i32(tmp, QREG_DIV1);
953
    tcg_gen_shli_i32(src, QREG_DIV2, 16);
954
    tcg_gen_or_i32(reg, tmp, src);
955
    s->cc_op = CC_OP_FLAGS;
956
}
957

    
958
DISAS_INSN(divl)
959
{
960
    TCGv num;
961
    TCGv den;
962
    TCGv reg;
963
    uint16_t ext;
964

    
965
    ext = lduw_code(s->pc);
966
    s->pc += 2;
967
    if (ext & 0x87f8) {
968
        gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
969
        return;
970
    }
971
    num = DREG(ext, 12);
972
    reg = DREG(ext, 0);
973
    tcg_gen_mov_i32(QREG_DIV1, num);
974
    SRC_EA(den, OS_LONG, 0, NULL);
975
    tcg_gen_mov_i32(QREG_DIV2, den);
976
    if (ext & 0x0800) {
977
        gen_helper_divs(cpu_env, tcg_const_i32(0));
978
    } else {
979
        gen_helper_divu(cpu_env, tcg_const_i32(0));
980
    }
981
    if ((ext & 7) == ((ext >> 12) & 7)) {
982
        /* div */
983
        tcg_gen_mov_i32 (reg, QREG_DIV1);
984
    } else {
985
        /* rem */
986
        tcg_gen_mov_i32 (reg, QREG_DIV2);
987
    }
988
    s->cc_op = CC_OP_FLAGS;
989
}
990

    
991
DISAS_INSN(addsub)
992
{
993
    TCGv reg;
994
    TCGv dest;
995
    TCGv src;
996
    TCGv tmp;
997
    TCGv addr;
998
    int add;
999

    
1000
    add = (insn & 0x4000) != 0;
1001
    reg = DREG(insn, 9);
1002
    dest = gen_new_qreg(QMODE_I32);
1003
    if (insn & 0x100) {
1004
        SRC_EA(tmp, OS_LONG, 0, &addr);
1005
        src = reg;
1006
    } else {
1007
        tmp = reg;
1008
        SRC_EA(src, OS_LONG, 0, NULL);
1009
    }
1010
    if (add) {
1011
        tcg_gen_add_i32(dest, tmp, src);
1012
        gen_helper_xflag_lt(QREG_CC_X, dest, src);
1013
        s->cc_op = CC_OP_ADD;
1014
    } else {
1015
        gen_helper_xflag_lt(QREG_CC_X, tmp, src);
1016
        tcg_gen_sub_i32(dest, tmp, src);
1017
        s->cc_op = CC_OP_SUB;
1018
    }
1019
    gen_update_cc_add(dest, src);
1020
    if (insn & 0x100) {
1021
        DEST_EA(insn, OS_LONG, dest, &addr);
1022
    } else {
1023
        tcg_gen_mov_i32(reg, dest);
1024
    }
1025
}
1026

    
1027

    
1028
/* Reverse the order of the bits in REG.  */
1029
DISAS_INSN(bitrev)
1030
{
1031
    TCGv reg;
1032
    reg = DREG(insn, 0);
1033
    gen_helper_bitrev(reg, reg);
1034
}
1035

    
1036
DISAS_INSN(bitop_reg)
1037
{
1038
    int opsize;
1039
    int op;
1040
    TCGv src1;
1041
    TCGv src2;
1042
    TCGv tmp;
1043
    TCGv addr;
1044
    TCGv dest;
1045

    
1046
    if ((insn & 0x38) != 0)
1047
        opsize = OS_BYTE;
1048
    else
1049
        opsize = OS_LONG;
1050
    op = (insn >> 6) & 3;
1051
    SRC_EA(src1, opsize, 0, op ? &addr: NULL);
1052
    src2 = DREG(insn, 9);
1053
    dest = gen_new_qreg(QMODE_I32);
1054

    
1055
    gen_flush_flags(s);
1056
    tmp = gen_new_qreg(QMODE_I32);
1057
    if (opsize == OS_BYTE)
1058
        tcg_gen_andi_i32(tmp, src2, 7);
1059
    else
1060
        tcg_gen_andi_i32(tmp, src2, 31);
1061
    src2 = tmp;
1062
    tmp = gen_new_qreg(QMODE_I32);
1063
    tcg_gen_shr_i32(tmp, src1, src2);
1064
    tcg_gen_andi_i32(tmp, tmp, 1);
1065
    tcg_gen_shli_i32(tmp, tmp, 2);
1066
    /* Clear CCF_Z if bit set.  */
1067
    tcg_gen_ori_i32(QREG_CC_DEST, QREG_CC_DEST, CCF_Z);
1068
    tcg_gen_xor_i32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1069

    
1070
    tcg_gen_shl_i32(tmp, tcg_const_i32(1), src2);
1071
    switch (op) {
1072
    case 1: /* bchg */
1073
        tcg_gen_xor_i32(dest, src1, tmp);
1074
        break;
1075
    case 2: /* bclr */
1076
        tcg_gen_not_i32(tmp, tmp);
1077
        tcg_gen_and_i32(dest, src1, tmp);
1078
        break;
1079
    case 3: /* bset */
1080
        tcg_gen_or_i32(dest, src1, tmp);
1081
        break;
1082
    default: /* btst */
1083
        break;
1084
    }
1085
    if (op)
1086
        DEST_EA(insn, opsize, dest, &addr);
1087
}
1088

    
1089
DISAS_INSN(sats)
1090
{
1091
    TCGv reg;
1092
    reg = DREG(insn, 0);
1093
    gen_flush_flags(s);
1094
    gen_helper_sats(reg, reg, QREG_CC_DEST);
1095
    gen_logic_cc(s, reg);
1096
}
1097

    
1098
static void gen_push(DisasContext *s, TCGv val)
1099
{
1100
    TCGv tmp;
1101

    
1102
    tmp = gen_new_qreg(QMODE_I32);
1103
    tcg_gen_subi_i32(tmp, QREG_SP, 4);
1104
    gen_store(s, OS_LONG, tmp, val);
1105
    tcg_gen_mov_i32(QREG_SP, tmp);
1106
}
1107

    
1108
DISAS_INSN(movem)
1109
{
1110
    TCGv addr;
1111
    int i;
1112
    uint16_t mask;
1113
    TCGv reg;
1114
    TCGv tmp;
1115
    int is_load;
1116

    
1117
    mask = lduw_code(s->pc);
1118
    s->pc += 2;
1119
    tmp = gen_lea(s, insn, OS_LONG);
1120
    if (IS_NULL_QREG(tmp)) {
1121
        gen_addr_fault(s);
1122
        return;
1123
    }
1124
    addr = gen_new_qreg(QMODE_I32);
1125
    tcg_gen_mov_i32(addr, tmp);
1126
    is_load = ((insn & 0x0400) != 0);
1127
    for (i = 0; i < 16; i++, mask >>= 1) {
1128
        if (mask & 1) {
1129
            if (i < 8)
1130
                reg = DREG(i, 0);
1131
            else
1132
                reg = AREG(i, 0);
1133
            if (is_load) {
1134
                tmp = gen_load(s, OS_LONG, addr, 0);
1135
                tcg_gen_mov_i32(reg, tmp);
1136
            } else {
1137
                gen_store(s, OS_LONG, addr, reg);
1138
            }
1139
            if (mask != 1)
1140
                tcg_gen_addi_i32(addr, addr, 4);
1141
        }
1142
    }
1143
}
1144

    
1145
DISAS_INSN(bitop_im)
1146
{
1147
    int opsize;
1148
    int op;
1149
    TCGv src1;
1150
    uint32_t mask;
1151
    int bitnum;
1152
    TCGv tmp;
1153
    TCGv addr;
1154

    
1155
    if ((insn & 0x38) != 0)
1156
        opsize = OS_BYTE;
1157
    else
1158
        opsize = OS_LONG;
1159
    op = (insn >> 6) & 3;
1160

    
1161
    bitnum = lduw_code(s->pc);
1162
    s->pc += 2;
1163
    if (bitnum & 0xff00) {
1164
        disas_undef(s, insn);
1165
        return;
1166
    }
1167

    
1168
    SRC_EA(src1, opsize, 0, op ? &addr: NULL);
1169

    
1170
    gen_flush_flags(s);
1171
    if (opsize == OS_BYTE)
1172
        bitnum &= 7;
1173
    else
1174
        bitnum &= 31;
1175
    mask = 1 << bitnum;
1176

    
1177
    tmp = gen_new_qreg(QMODE_I32);
1178
    assert (CCF_Z == (1 << 2));
1179
    if (bitnum > 2)
1180
        tcg_gen_shri_i32(tmp, src1, bitnum - 2);
1181
    else if (bitnum < 2)
1182
        tcg_gen_shli_i32(tmp, src1, 2 - bitnum);
1183
    else
1184
        tcg_gen_mov_i32(tmp, src1);
1185
    tcg_gen_andi_i32(tmp, tmp, CCF_Z);
1186
    /* Clear CCF_Z if bit set.  */
1187
    tcg_gen_ori_i32(QREG_CC_DEST, QREG_CC_DEST, CCF_Z);
1188
    tcg_gen_xor_i32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1189
    if (op) {
1190
        switch (op) {
1191
        case 1: /* bchg */
1192
            tcg_gen_xori_i32(tmp, src1, mask);
1193
            break;
1194
        case 2: /* bclr */
1195
            tcg_gen_andi_i32(tmp, src1, ~mask);
1196
            break;
1197
        case 3: /* bset */
1198
            tcg_gen_ori_i32(tmp, src1, mask);
1199
            break;
1200
        default: /* btst */
1201
            break;
1202
        }
1203
        DEST_EA(insn, opsize, tmp, &addr);
1204
    }
1205
}
1206

    
1207
DISAS_INSN(arith_im)
1208
{
1209
    int op;
1210
    uint32_t im;
1211
    TCGv src1;
1212
    TCGv dest;
1213
    TCGv addr;
1214

    
1215
    op = (insn >> 9) & 7;
1216
    SRC_EA(src1, OS_LONG, 0, (op == 6) ? NULL : &addr);
1217
    im = read_im32(s);
1218
    dest = gen_new_qreg(QMODE_I32);
1219
    switch (op) {
1220
    case 0: /* ori */
1221
        tcg_gen_ori_i32(dest, src1, im);
1222
        gen_logic_cc(s, dest);
1223
        break;
1224
    case 1: /* andi */
1225
        tcg_gen_andi_i32(dest, src1, im);
1226
        gen_logic_cc(s, dest);
1227
        break;
1228
    case 2: /* subi */
1229
        tcg_gen_mov_i32(dest, src1);
1230
        gen_helper_xflag_lt(QREG_CC_X, dest, gen_im32(im));
1231
        tcg_gen_subi_i32(dest, dest, im);
1232
        gen_update_cc_add(dest, gen_im32(im));
1233
        s->cc_op = CC_OP_SUB;
1234
        break;
1235
    case 3: /* addi */
1236
        tcg_gen_mov_i32(dest, src1);
1237
        tcg_gen_addi_i32(dest, dest, im);
1238
        gen_update_cc_add(dest, gen_im32(im));
1239
        gen_helper_xflag_lt(QREG_CC_X, dest, gen_im32(im));
1240
        s->cc_op = CC_OP_ADD;
1241
        break;
1242
    case 5: /* eori */
1243
        tcg_gen_xori_i32(dest, src1, im);
1244
        gen_logic_cc(s, dest);
1245
        break;
1246
    case 6: /* cmpi */
1247
        tcg_gen_mov_i32(dest, src1);
1248
        tcg_gen_subi_i32(dest, dest, im);
1249
        gen_update_cc_add(dest, gen_im32(im));
1250
        s->cc_op = CC_OP_SUB;
1251
        break;
1252
    default:
1253
        abort();
1254
    }
1255
    if (op != 6) {
1256
        DEST_EA(insn, OS_LONG, dest, &addr);
1257
    }
1258
}
1259

    
1260
DISAS_INSN(byterev)
1261
{
1262
    TCGv reg;
1263

    
1264
    reg = DREG(insn, 0);
1265
    tcg_gen_bswap_i32(reg, reg);
1266
}
1267

    
1268
DISAS_INSN(move)
1269
{
1270
    TCGv src;
1271
    TCGv dest;
1272
    int op;
1273
    int opsize;
1274

    
1275
    switch (insn >> 12) {
1276
    case 1: /* move.b */
1277
        opsize = OS_BYTE;
1278
        break;
1279
    case 2: /* move.l */
1280
        opsize = OS_LONG;
1281
        break;
1282
    case 3: /* move.w */
1283
        opsize = OS_WORD;
1284
        break;
1285
    default:
1286
        abort();
1287
    }
1288
    SRC_EA(src, opsize, 1, NULL);
1289
    op = (insn >> 6) & 7;
1290
    if (op == 1) {
1291
        /* movea */
1292
        /* The value will already have been sign extended.  */
1293
        dest = AREG(insn, 9);
1294
        tcg_gen_mov_i32(dest, src);
1295
    } else {
1296
        /* normal move */
1297
        uint16_t dest_ea;
1298
        dest_ea = ((insn >> 9) & 7) | (op << 3);
1299
        DEST_EA(dest_ea, opsize, src, NULL);
1300
        /* This will be correct because loads sign extend.  */
1301
        gen_logic_cc(s, src);
1302
    }
1303
}
1304

    
1305
DISAS_INSN(negx)
1306
{
1307
    TCGv reg;
1308

    
1309
    gen_flush_flags(s);
1310
    reg = DREG(insn, 0);
1311
    gen_helper_subx_cc(reg, cpu_env, tcg_const_i32(0), reg);
1312
}
1313

    
1314
DISAS_INSN(lea)
1315
{
1316
    TCGv reg;
1317
    TCGv tmp;
1318

    
1319
    reg = AREG(insn, 9);
1320
    tmp = gen_lea(s, insn, OS_LONG);
1321
    if (IS_NULL_QREG(tmp)) {
1322
        gen_addr_fault(s);
1323
        return;
1324
    }
1325
    tcg_gen_mov_i32(reg, tmp);
1326
}
1327

    
1328
DISAS_INSN(clr)
1329
{
1330
    int opsize;
1331

    
1332
    switch ((insn >> 6) & 3) {
1333
    case 0: /* clr.b */
1334
        opsize = OS_BYTE;
1335
        break;
1336
    case 1: /* clr.w */
1337
        opsize = OS_WORD;
1338
        break;
1339
    case 2: /* clr.l */
1340
        opsize = OS_LONG;
1341
        break;
1342
    default:
1343
        abort();
1344
    }
1345
    DEST_EA(insn, opsize, gen_im32(0), NULL);
1346
    gen_logic_cc(s, gen_im32(0));
1347
}
1348

    
1349
static TCGv gen_get_ccr(DisasContext *s)
1350
{
1351
    TCGv dest;
1352

    
1353
    gen_flush_flags(s);
1354
    dest = gen_new_qreg(QMODE_I32);
1355
    tcg_gen_shli_i32(dest, QREG_CC_X, 4);
1356
    tcg_gen_or_i32(dest, dest, QREG_CC_DEST);
1357
    return dest;
1358
}
1359

    
1360
DISAS_INSN(move_from_ccr)
1361
{
1362
    TCGv reg;
1363
    TCGv ccr;
1364

    
1365
    ccr = gen_get_ccr(s);
1366
    reg = DREG(insn, 0);
1367
    gen_partset_reg(OS_WORD, reg, ccr);
1368
}
1369

    
1370
DISAS_INSN(neg)
1371
{
1372
    TCGv reg;
1373
    TCGv src1;
1374

    
1375
    reg = DREG(insn, 0);
1376
    src1 = gen_new_qreg(QMODE_I32);
1377
    tcg_gen_mov_i32(src1, reg);
1378
    tcg_gen_neg_i32(reg, src1);
1379
    s->cc_op = CC_OP_SUB;
1380
    gen_update_cc_add(reg, src1);
1381
    gen_helper_xflag_lt(QREG_CC_X, tcg_const_i32(0), src1);
1382
    s->cc_op = CC_OP_SUB;
1383
}
1384

    
1385
static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
1386
{
1387
    tcg_gen_movi_i32(QREG_CC_DEST, val & 0xf);
1388
    tcg_gen_movi_i32(QREG_CC_X, (val & 0x10) >> 4);
1389
    if (!ccr_only) {
1390
        gen_helper_set_sr(cpu_env, tcg_const_i32(val & 0xff00));
1391
    }
1392
}
1393

    
1394
static void gen_set_sr(DisasContext *s, uint16_t insn, int ccr_only)
1395
{
1396
    TCGv tmp;
1397
    TCGv reg;
1398

    
1399
    s->cc_op = CC_OP_FLAGS;
1400
    if ((insn & 0x38) == 0)
1401
      {
1402
        tmp = gen_new_qreg(QMODE_I32);
1403
        reg = DREG(insn, 0);
1404
        tcg_gen_andi_i32(QREG_CC_DEST, reg, 0xf);
1405
        tcg_gen_shri_i32(tmp, reg, 4);
1406
        tcg_gen_andi_i32(QREG_CC_X, tmp, 1);
1407
        if (!ccr_only) {
1408
            gen_helper_set_sr(cpu_env, reg);
1409
        }
1410
      }
1411
    else if ((insn & 0x3f) == 0x3c)
1412
      {
1413
        uint16_t val;
1414
        val = lduw_code(s->pc);
1415
        s->pc += 2;
1416
        gen_set_sr_im(s, val, ccr_only);
1417
      }
1418
    else
1419
        disas_undef(s, insn);
1420
}
1421

    
1422
DISAS_INSN(move_to_ccr)
1423
{
1424
    gen_set_sr(s, insn, 1);
1425
}
1426

    
1427
DISAS_INSN(not)
1428
{
1429
    TCGv reg;
1430

    
1431
    reg = DREG(insn, 0);
1432
    tcg_gen_not_i32(reg, reg);
1433
    gen_logic_cc(s, reg);
1434
}
1435

    
1436
DISAS_INSN(swap)
1437
{
1438
    TCGv src1;
1439
    TCGv src2;
1440
    TCGv reg;
1441

    
1442
    src1 = gen_new_qreg(QMODE_I32);
1443
    src2 = gen_new_qreg(QMODE_I32);
1444
    reg = DREG(insn, 0);
1445
    tcg_gen_shli_i32(src1, reg, 16);
1446
    tcg_gen_shri_i32(src2, reg, 16);
1447
    tcg_gen_or_i32(reg, src1, src2);
1448
    gen_logic_cc(s, reg);
1449
}
1450

    
1451
DISAS_INSN(pea)
1452
{
1453
    TCGv tmp;
1454

    
1455
    tmp = gen_lea(s, insn, OS_LONG);
1456
    if (IS_NULL_QREG(tmp)) {
1457
        gen_addr_fault(s);
1458
        return;
1459
    }
1460
    gen_push(s, tmp);
1461
}
1462

    
1463
DISAS_INSN(ext)
1464
{
1465
    int op;
1466
    TCGv reg;
1467
    TCGv tmp;
1468

    
1469
    reg = DREG(insn, 0);
1470
    op = (insn >> 6) & 7;
1471
    tmp = gen_new_qreg(QMODE_I32);
1472
    if (op == 3)
1473
        tcg_gen_ext16s_i32(tmp, reg);
1474
    else
1475
        tcg_gen_ext8s_i32(tmp, reg);
1476
    if (op == 2)
1477
        gen_partset_reg(OS_WORD, reg, tmp);
1478
    else
1479
        tcg_gen_mov_i32(reg, tmp);
1480
    gen_logic_cc(s, tmp);
1481
}
1482

    
1483
DISAS_INSN(tst)
1484
{
1485
    int opsize;
1486
    TCGv tmp;
1487

    
1488
    switch ((insn >> 6) & 3) {
1489
    case 0: /* tst.b */
1490
        opsize = OS_BYTE;
1491
        break;
1492
    case 1: /* tst.w */
1493
        opsize = OS_WORD;
1494
        break;
1495
    case 2: /* tst.l */
1496
        opsize = OS_LONG;
1497
        break;
1498
    default:
1499
        abort();
1500
    }
1501
    SRC_EA(tmp, opsize, 1, NULL);
1502
    gen_logic_cc(s, tmp);
1503
}
1504

    
1505
DISAS_INSN(pulse)
1506
{
1507
  /* Implemented as a NOP.  */
1508
}
1509

    
1510
DISAS_INSN(illegal)
1511
{
1512
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
1513
}
1514

    
1515
/* ??? This should be atomic.  */
1516
DISAS_INSN(tas)
1517
{
1518
    TCGv dest;
1519
    TCGv src1;
1520
    TCGv addr;
1521

    
1522
    dest = gen_new_qreg(QMODE_I32);
1523
    SRC_EA(src1, OS_BYTE, 1, &addr);
1524
    gen_logic_cc(s, src1);
1525
    tcg_gen_ori_i32(dest, src1, 0x80);
1526
    DEST_EA(insn, OS_BYTE, dest, &addr);
1527
}
1528

    
1529
DISAS_INSN(mull)
1530
{
1531
    uint16_t ext;
1532
    TCGv reg;
1533
    TCGv src1;
1534
    TCGv dest;
1535

    
1536
    /* The upper 32 bits of the product are discarded, so
1537
       muls.l and mulu.l are functionally equivalent.  */
1538
    ext = lduw_code(s->pc);
1539
    s->pc += 2;
1540
    if (ext & 0x87ff) {
1541
        gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
1542
        return;
1543
    }
1544
    reg = DREG(ext, 12);
1545
    SRC_EA(src1, OS_LONG, 0, NULL);
1546
    dest = gen_new_qreg(QMODE_I32);
1547
    tcg_gen_mul_i32(dest, src1, reg);
1548
    tcg_gen_mov_i32(reg, dest);
1549
    /* Unlike m68k, coldfire always clears the overflow bit.  */
1550
    gen_logic_cc(s, dest);
1551
}
1552

    
1553
DISAS_INSN(link)
1554
{
1555
    int16_t offset;
1556
    TCGv reg;
1557
    TCGv tmp;
1558

    
1559
    offset = ldsw_code(s->pc);
1560
    s->pc += 2;
1561
    reg = AREG(insn, 0);
1562
    tmp = gen_new_qreg(QMODE_I32);
1563
    tcg_gen_subi_i32(tmp, QREG_SP, 4);
1564
    gen_store(s, OS_LONG, tmp, reg);
1565
    if ((insn & 7) != 7)
1566
        tcg_gen_mov_i32(reg, tmp);
1567
    tcg_gen_addi_i32(QREG_SP, tmp, offset);
1568
}
1569

    
1570
DISAS_INSN(unlk)
1571
{
1572
    TCGv src;
1573
    TCGv reg;
1574
    TCGv tmp;
1575

    
1576
    src = gen_new_qreg(QMODE_I32);
1577
    reg = AREG(insn, 0);
1578
    tcg_gen_mov_i32(src, reg);
1579
    tmp = gen_load(s, OS_LONG, src, 0);
1580
    tcg_gen_mov_i32(reg, tmp);
1581
    tcg_gen_addi_i32(QREG_SP, src, 4);
1582
}
1583

    
1584
DISAS_INSN(nop)
1585
{
1586
}
1587

    
1588
DISAS_INSN(rts)
1589
{
1590
    TCGv tmp;
1591

    
1592
    tmp = gen_load(s, OS_LONG, QREG_SP, 0);
1593
    tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
1594
    gen_jmp(s, tmp);
1595
}
1596

    
1597
DISAS_INSN(jump)
1598
{
1599
    TCGv tmp;
1600

    
1601
    /* Load the target address first to ensure correct exception
1602
       behavior.  */
1603
    tmp = gen_lea(s, insn, OS_LONG);
1604
    if (IS_NULL_QREG(tmp)) {
1605
        gen_addr_fault(s);
1606
        return;
1607
    }
1608
    if ((insn & 0x40) == 0) {
1609
        /* jsr */
1610
        gen_push(s, gen_im32(s->pc));
1611
    }
1612
    gen_jmp(s, tmp);
1613
}
1614

    
1615
DISAS_INSN(addsubq)
1616
{
1617
    TCGv src1;
1618
    TCGv src2;
1619
    TCGv dest;
1620
    int val;
1621
    TCGv addr;
1622

    
1623
    SRC_EA(src1, OS_LONG, 0, &addr);
1624
    val = (insn >> 9) & 7;
1625
    if (val == 0)
1626
        val = 8;
1627
    dest = gen_new_qreg(QMODE_I32);
1628
    tcg_gen_mov_i32(dest, src1);
1629
    if ((insn & 0x38) == 0x08) {
1630
        /* Don't update condition codes if the destination is an
1631
           address register.  */
1632
        if (insn & 0x0100) {
1633
            tcg_gen_subi_i32(dest, dest, val);
1634
        } else {
1635
            tcg_gen_addi_i32(dest, dest, val);
1636
        }
1637
    } else {
1638
        src2 = gen_im32(val);
1639
        if (insn & 0x0100) {
1640
            gen_helper_xflag_lt(QREG_CC_X, dest, src2);
1641
            tcg_gen_subi_i32(dest, dest, val);
1642
            s->cc_op = CC_OP_SUB;
1643
        } else {
1644
            tcg_gen_addi_i32(dest, dest, val);
1645
            gen_helper_xflag_lt(QREG_CC_X, dest, src2);
1646
            s->cc_op = CC_OP_ADD;
1647
        }
1648
        gen_update_cc_add(dest, src2);
1649
    }
1650
    DEST_EA(insn, OS_LONG, dest, &addr);
1651
}
1652

    
1653
DISAS_INSN(tpf)
1654
{
1655
    switch (insn & 7) {
1656
    case 2: /* One extension word.  */
1657
        s->pc += 2;
1658
        break;
1659
    case 3: /* Two extension words.  */
1660
        s->pc += 4;
1661
        break;
1662
    case 4: /* No extension words.  */
1663
        break;
1664
    default:
1665
        disas_undef(s, insn);
1666
    }
1667
}
1668

    
1669
DISAS_INSN(branch)
1670
{
1671
    int32_t offset;
1672
    uint32_t base;
1673
    int op;
1674
    int l1;
1675

    
1676
    base = s->pc;
1677
    op = (insn >> 8) & 0xf;
1678
    offset = (int8_t)insn;
1679
    if (offset == 0) {
1680
        offset = ldsw_code(s->pc);
1681
        s->pc += 2;
1682
    } else if (offset == -1) {
1683
        offset = read_im32(s);
1684
    }
1685
    if (op == 1) {
1686
        /* bsr */
1687
        gen_push(s, gen_im32(s->pc));
1688
    }
1689
    gen_flush_cc_op(s);
1690
    if (op > 1) {
1691
        /* Bcc */
1692
        l1 = gen_new_label();
1693
        gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
1694
        gen_jmp_tb(s, 1, base + offset);
1695
        gen_set_label(l1);
1696
        gen_jmp_tb(s, 0, s->pc);
1697
    } else {
1698
        /* Unconditional branch.  */
1699
        gen_jmp_tb(s, 0, base + offset);
1700
    }
1701
}
1702

    
1703
DISAS_INSN(moveq)
1704
{
1705
    uint32_t val;
1706

    
1707
    val = (int8_t)insn;
1708
    tcg_gen_movi_i32(DREG(insn, 9), val);
1709
    gen_logic_cc(s, tcg_const_i32(val));
1710
}
1711

    
1712
DISAS_INSN(mvzs)
1713
{
1714
    int opsize;
1715
    TCGv src;
1716
    TCGv reg;
1717

    
1718
    if (insn & 0x40)
1719
        opsize = OS_WORD;
1720
    else
1721
        opsize = OS_BYTE;
1722
    SRC_EA(src, opsize, (insn & 0x80) != 0, NULL);
1723
    reg = DREG(insn, 9);
1724
    tcg_gen_mov_i32(reg, src);
1725
    gen_logic_cc(s, src);
1726
}
1727

    
1728
DISAS_INSN(or)
1729
{
1730
    TCGv reg;
1731
    TCGv dest;
1732
    TCGv src;
1733
    TCGv addr;
1734

    
1735
    reg = DREG(insn, 9);
1736
    dest = gen_new_qreg(QMODE_I32);
1737
    if (insn & 0x100) {
1738
        SRC_EA(src, OS_LONG, 0, &addr);
1739
        tcg_gen_or_i32(dest, src, reg);
1740
        DEST_EA(insn, OS_LONG, dest, &addr);
1741
    } else {
1742
        SRC_EA(src, OS_LONG, 0, NULL);
1743
        tcg_gen_or_i32(dest, src, reg);
1744
        tcg_gen_mov_i32(reg, dest);
1745
    }
1746
    gen_logic_cc(s, dest);
1747
}
1748

    
1749
DISAS_INSN(suba)
1750
{
1751
    TCGv src;
1752
    TCGv reg;
1753

    
1754
    SRC_EA(src, OS_LONG, 0, NULL);
1755
    reg = AREG(insn, 9);
1756
    tcg_gen_sub_i32(reg, reg, src);
1757
}
1758

    
1759
DISAS_INSN(subx)
1760
{
1761
    TCGv reg;
1762
    TCGv src;
1763

    
1764
    gen_flush_flags(s);
1765
    reg = DREG(insn, 9);
1766
    src = DREG(insn, 0);
1767
    gen_helper_subx_cc(reg, cpu_env, reg, src);
1768
}
1769

    
1770
DISAS_INSN(mov3q)
1771
{
1772
    TCGv src;
1773
    int val;
1774

    
1775
    val = (insn >> 9) & 7;
1776
    if (val == 0)
1777
        val = -1;
1778
    src = gen_im32(val);
1779
    gen_logic_cc(s, src);
1780
    DEST_EA(insn, OS_LONG, src, NULL);
1781
}
1782

    
1783
DISAS_INSN(cmp)
1784
{
1785
    int op;
1786
    TCGv src;
1787
    TCGv reg;
1788
    TCGv dest;
1789
    int opsize;
1790

    
1791
    op = (insn >> 6) & 3;
1792
    switch (op) {
1793
    case 0: /* cmp.b */
1794
        opsize = OS_BYTE;
1795
        s->cc_op = CC_OP_CMPB;
1796
        break;
1797
    case 1: /* cmp.w */
1798
        opsize = OS_WORD;
1799
        s->cc_op = CC_OP_CMPW;
1800
        break;
1801
    case 2: /* cmp.l */
1802
        opsize = OS_LONG;
1803
        s->cc_op = CC_OP_SUB;
1804
        break;
1805
    default:
1806
        abort();
1807
    }
1808
    SRC_EA(src, opsize, 1, NULL);
1809
    reg = DREG(insn, 9);
1810
    dest = gen_new_qreg(QMODE_I32);
1811
    tcg_gen_sub_i32(dest, reg, src);
1812
    gen_update_cc_add(dest, src);
1813
}
1814

    
1815
DISAS_INSN(cmpa)
1816
{
1817
    int opsize;
1818
    TCGv src;
1819
    TCGv reg;
1820
    TCGv dest;
1821

    
1822
    if (insn & 0x100) {
1823
        opsize = OS_LONG;
1824
    } else {
1825
        opsize = OS_WORD;
1826
    }
1827
    SRC_EA(src, opsize, 1, NULL);
1828
    reg = AREG(insn, 9);
1829
    dest = gen_new_qreg(QMODE_I32);
1830
    tcg_gen_sub_i32(dest, reg, src);
1831
    gen_update_cc_add(dest, src);
1832
    s->cc_op = CC_OP_SUB;
1833
}
1834

    
1835
DISAS_INSN(eor)
1836
{
1837
    TCGv src;
1838
    TCGv reg;
1839
    TCGv dest;
1840
    TCGv addr;
1841

    
1842
    SRC_EA(src, OS_LONG, 0, &addr);
1843
    reg = DREG(insn, 9);
1844
    dest = gen_new_qreg(QMODE_I32);
1845
    tcg_gen_xor_i32(dest, src, reg);
1846
    gen_logic_cc(s, dest);
1847
    DEST_EA(insn, OS_LONG, dest, &addr);
1848
}
1849

    
1850
DISAS_INSN(and)
1851
{
1852
    TCGv src;
1853
    TCGv reg;
1854
    TCGv dest;
1855
    TCGv addr;
1856

    
1857
    reg = DREG(insn, 9);
1858
    dest = gen_new_qreg(QMODE_I32);
1859
    if (insn & 0x100) {
1860
        SRC_EA(src, OS_LONG, 0, &addr);
1861
        tcg_gen_and_i32(dest, src, reg);
1862
        DEST_EA(insn, OS_LONG, dest, &addr);
1863
    } else {
1864
        SRC_EA(src, OS_LONG, 0, NULL);
1865
        tcg_gen_and_i32(dest, src, reg);
1866
        tcg_gen_mov_i32(reg, dest);
1867
    }
1868
    gen_logic_cc(s, dest);
1869
}
1870

    
1871
DISAS_INSN(adda)
1872
{
1873
    TCGv src;
1874
    TCGv reg;
1875

    
1876
    SRC_EA(src, OS_LONG, 0, NULL);
1877
    reg = AREG(insn, 9);
1878
    tcg_gen_add_i32(reg, reg, src);
1879
}
1880

    
1881
DISAS_INSN(addx)
1882
{
1883
    TCGv reg;
1884
    TCGv src;
1885

    
1886
    gen_flush_flags(s);
1887
    reg = DREG(insn, 9);
1888
    src = DREG(insn, 0);
1889
    gen_helper_addx_cc(reg, cpu_env, reg, src);
1890
    s->cc_op = CC_OP_FLAGS;
1891
}
1892

    
1893
/* TODO: This could be implemented without helper functions.  */
1894
DISAS_INSN(shift_im)
1895
{
1896
    TCGv reg;
1897
    int tmp;
1898
    TCGv shift;
1899

    
1900
    reg = DREG(insn, 0);
1901
    tmp = (insn >> 9) & 7;
1902
    if (tmp == 0)
1903
        tmp = 8;
1904
    shift = gen_im32(tmp);
1905
    /* No need to flush flags becuse we know we will set C flag.  */
1906
    if (insn & 0x100) {
1907
        gen_helper_shl_cc(reg, cpu_env, reg, shift);
1908
    } else {
1909
        if (insn & 8) {
1910
            gen_helper_shr_cc(reg, cpu_env, reg, shift);
1911
        } else {
1912
            gen_helper_sar_cc(reg, cpu_env, reg, shift);
1913
        }
1914
    }
1915
    s->cc_op = CC_OP_SHIFT;
1916
}
1917

    
1918
DISAS_INSN(shift_reg)
1919
{
1920
    TCGv reg;
1921
    TCGv shift;
1922

    
1923
    reg = DREG(insn, 0);
1924
    shift = DREG(insn, 9);
1925
    /* Shift by zero leaves C flag unmodified.   */
1926
    gen_flush_flags(s);
1927
    if (insn & 0x100) {
1928
        gen_helper_shl_cc(reg, cpu_env, reg, shift);
1929
    } else {
1930
        if (insn & 8) {
1931
            gen_helper_shr_cc(reg, cpu_env, reg, shift);
1932
        } else {
1933
            gen_helper_sar_cc(reg, cpu_env, reg, shift);
1934
        }
1935
    }
1936
    s->cc_op = CC_OP_SHIFT;
1937
}
1938

    
1939
DISAS_INSN(ff1)
1940
{
1941
    TCGv reg;
1942
    reg = DREG(insn, 0);
1943
    gen_logic_cc(s, reg);
1944
    gen_helper_ff1(reg, reg);
1945
}
1946

    
1947
static TCGv gen_get_sr(DisasContext *s)
1948
{
1949
    TCGv ccr;
1950
    TCGv sr;
1951

    
1952
    ccr = gen_get_ccr(s);
1953
    sr = gen_new_qreg(QMODE_I32);
1954
    tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
1955
    tcg_gen_or_i32(sr, sr, ccr);
1956
    return sr;
1957
}
1958

    
1959
DISAS_INSN(strldsr)
1960
{
1961
    uint16_t ext;
1962
    uint32_t addr;
1963

    
1964
    addr = s->pc - 2;
1965
    ext = lduw_code(s->pc);
1966
    s->pc += 2;
1967
    if (ext != 0x46FC) {
1968
        gen_exception(s, addr, EXCP_UNSUPPORTED);
1969
        return;
1970
    }
1971
    ext = lduw_code(s->pc);
1972
    s->pc += 2;
1973
    if (IS_USER(s) || (ext & SR_S) == 0) {
1974
        gen_exception(s, addr, EXCP_PRIVILEGE);
1975
        return;
1976
    }
1977
    gen_push(s, gen_get_sr(s));
1978
    gen_set_sr_im(s, ext, 0);
1979
}
1980

    
1981
DISAS_INSN(move_from_sr)
1982
{
1983
    TCGv reg;
1984
    TCGv sr;
1985

    
1986
    if (IS_USER(s)) {
1987
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1988
        return;
1989
    }
1990
    sr = gen_get_sr(s);
1991
    reg = DREG(insn, 0);
1992
    gen_partset_reg(OS_WORD, reg, sr);
1993
}
1994

    
1995
DISAS_INSN(move_to_sr)
1996
{
1997
    if (IS_USER(s)) {
1998
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1999
        return;
2000
    }
2001
    gen_set_sr(s, insn, 0);
2002
    gen_lookup_tb(s);
2003
}
2004

    
2005
DISAS_INSN(move_from_usp)
2006
{
2007
    if (IS_USER(s)) {
2008
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2009
        return;
2010
    }
2011
    /* TODO: Implement USP.  */
2012
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2013
}
2014

    
2015
DISAS_INSN(move_to_usp)
2016
{
2017
    if (IS_USER(s)) {
2018
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2019
        return;
2020
    }
2021
    /* TODO: Implement USP.  */
2022
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2023
}
2024

    
2025
DISAS_INSN(halt)
2026
{
2027
    gen_exception(s, s->pc, EXCP_HALT_INSN);
2028
}
2029

    
2030
DISAS_INSN(stop)
2031
{
2032
    uint16_t ext;
2033

    
2034
    if (IS_USER(s)) {
2035
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2036
        return;
2037
    }
2038

    
2039
    ext = lduw_code(s->pc);
2040
    s->pc += 2;
2041

    
2042
    gen_set_sr_im(s, ext, 0);
2043
    tcg_gen_movi_i32(QREG_HALTED, 1);
2044
    gen_exception(s, s->pc, EXCP_HLT);
2045
}
2046

    
2047
DISAS_INSN(rte)
2048
{
2049
    if (IS_USER(s)) {
2050
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2051
        return;
2052
    }
2053
    gen_exception(s, s->pc - 2, EXCP_RTE);
2054
}
2055

    
2056
DISAS_INSN(movec)
2057
{
2058
    uint16_t ext;
2059
    TCGv reg;
2060

    
2061
    if (IS_USER(s)) {
2062
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2063
        return;
2064
    }
2065

    
2066
    ext = lduw_code(s->pc);
2067
    s->pc += 2;
2068

    
2069
    if (ext & 0x8000) {
2070
        reg = AREG(ext, 12);
2071
    } else {
2072
        reg = DREG(ext, 12);
2073
    }
2074
    gen_helper_movec(cpu_env, tcg_const_i32(ext & 0xfff), reg);
2075
    gen_lookup_tb(s);
2076
}
2077

    
2078
DISAS_INSN(intouch)
2079
{
2080
    if (IS_USER(s)) {
2081
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2082
        return;
2083
    }
2084
    /* ICache fetch.  Implement as no-op.  */
2085
}
2086

    
2087
DISAS_INSN(cpushl)
2088
{
2089
    if (IS_USER(s)) {
2090
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2091
        return;
2092
    }
2093
    /* Cache push/invalidate.  Implement as no-op.  */
2094
}
2095

    
2096
DISAS_INSN(wddata)
2097
{
2098
    gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2099
}
2100

    
2101
DISAS_INSN(wdebug)
2102
{
2103
    if (IS_USER(s)) {
2104
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2105
        return;
2106
    }
2107
    /* TODO: Implement wdebug.  */
2108
    qemu_assert(0, "WDEBUG not implemented");
2109
}
2110

    
2111
DISAS_INSN(trap)
2112
{
2113
    gen_exception(s, s->pc - 2, EXCP_TRAP0 + (insn & 0xf));
2114
}
2115

    
2116
/* ??? FP exceptions are not implemented.  Most exceptions are deferred until
2117
   immediately before the next FP instruction is executed.  */
2118
DISAS_INSN(fpu)
2119
{
2120
    uint16_t ext;
2121
    int opmode;
2122
    TCGv src;
2123
    TCGv dest;
2124
    TCGv res;
2125
    int round;
2126
    int opsize;
2127

    
2128
    ext = lduw_code(s->pc);
2129
    s->pc += 2;
2130
    opmode = ext & 0x7f;
2131
    switch ((ext >> 13) & 7) {
2132
    case 0: case 2:
2133
        break;
2134
    case 1:
2135
        goto undef;
2136
    case 3: /* fmove out */
2137
        src = FREG(ext, 7);
2138
        /* fmove */
2139
        /* ??? TODO: Proper behavior on overflow.  */
2140
        switch ((ext >> 10) & 7) {
2141
        case 0:
2142
            opsize = OS_LONG;
2143
            res = gen_new_qreg(QMODE_I32);
2144
            gen_helper_f64_to_i32(res, cpu_env, src);
2145
            break;
2146
        case 1:
2147
            opsize = OS_SINGLE;
2148
            res = gen_new_qreg(QMODE_F32);
2149
            gen_helper_f64_to_f32(res, cpu_env, src);
2150
            break;
2151
        case 4:
2152
            opsize = OS_WORD;
2153
            res = gen_new_qreg(QMODE_I32);
2154
            gen_helper_f64_to_i32(res, cpu_env, src);
2155
            break;
2156
        case 5:
2157
            opsize = OS_DOUBLE;
2158
            res = src;
2159
            break;
2160
        case 6:
2161
            opsize = OS_BYTE;
2162
            res = gen_new_qreg(QMODE_I32);
2163
            gen_helper_f64_to_i32(res, cpu_env, src);
2164
            break;
2165
        default:
2166
            goto undef;
2167
        }
2168
        DEST_EA(insn, opsize, res, NULL);
2169
        return;
2170
    case 4: /* fmove to control register.  */
2171
        switch ((ext >> 10) & 7) {
2172
        case 4: /* FPCR */
2173
            /* Not implemented.  Ignore writes.  */
2174
            break;
2175
        case 1: /* FPIAR */
2176
        case 2: /* FPSR */
2177
        default:
2178
            cpu_abort(NULL, "Unimplemented: fmove to control %d",
2179
                      (ext >> 10) & 7);
2180
        }
2181
        break;
2182
    case 5: /* fmove from control register.  */
2183
        switch ((ext >> 10) & 7) {
2184
        case 4: /* FPCR */
2185
            /* Not implemented.  Always return zero.  */
2186
            res = gen_im32(0);
2187
            break;
2188
        case 1: /* FPIAR */
2189
        case 2: /* FPSR */
2190
        default:
2191
            cpu_abort(NULL, "Unimplemented: fmove from control %d",
2192
                      (ext >> 10) & 7);
2193
            goto undef;
2194
        }
2195
        DEST_EA(insn, OS_LONG, res, NULL);
2196
        break;
2197
    case 6: /* fmovem */
2198
    case 7:
2199
        {
2200
            TCGv addr;
2201
            uint16_t mask;
2202
            int i;
2203
            if ((ext & 0x1f00) != 0x1000 || (ext & 0xff) == 0)
2204
                goto undef;
2205
            src = gen_lea(s, insn, OS_LONG);
2206
            if (IS_NULL_QREG(src)) {
2207
                gen_addr_fault(s);
2208
                return;
2209
            }
2210
            addr = gen_new_qreg(QMODE_I32);
2211
            tcg_gen_mov_i32(addr, src);
2212
            mask = 0x80;
2213
            for (i = 0; i < 8; i++) {
2214
                if (ext & mask) {
2215
                    s->is_mem = 1;
2216
                    dest = FREG(i, 0);
2217
                    if (ext & (1 << 13)) {
2218
                        /* store */
2219
                        tcg_gen_qemu_stf64(dest, addr, IS_USER(s));
2220
                    } else {
2221
                        /* load */
2222
                        tcg_gen_qemu_ldf64(dest, addr, IS_USER(s));
2223
                    }
2224
                    if (ext & (mask - 1))
2225
                        tcg_gen_addi_i32(addr, addr, 8);
2226
                }
2227
                mask >>= 1;
2228
            }
2229
        }
2230
        return;
2231
    }
2232
    if (ext & (1 << 14)) {
2233
        TCGv tmp;
2234

    
2235
        /* Source effective address.  */
2236
        switch ((ext >> 10) & 7) {
2237
        case 0: opsize = OS_LONG; break;
2238
        case 1: opsize = OS_SINGLE; break;
2239
        case 4: opsize = OS_WORD; break;
2240
        case 5: opsize = OS_DOUBLE; break;
2241
        case 6: opsize = OS_BYTE; break;
2242
        default:
2243
            goto undef;
2244
        }
2245
        SRC_EA(tmp, opsize, 1, NULL);
2246
        if (opsize == OS_DOUBLE) {
2247
            src = tmp;
2248
        } else {
2249
            src = gen_new_qreg(QMODE_F64);
2250
            switch (opsize) {
2251
            case OS_LONG:
2252
            case OS_WORD:
2253
            case OS_BYTE:
2254
                gen_helper_i32_to_f64(src, cpu_env, tmp);
2255
                break;
2256
            case OS_SINGLE:
2257
                gen_helper_f32_to_f64(src, cpu_env, tmp);
2258
                break;
2259
            }
2260
        }
2261
    } else {
2262
        /* Source register.  */
2263
        src = FREG(ext, 10);
2264
    }
2265
    dest = FREG(ext, 7);
2266
    res = gen_new_qreg(QMODE_F64);
2267
    if (opmode != 0x3a)
2268
        tcg_gen_mov_f64(res, dest);
2269
    round = 1;
2270
    switch (opmode) {
2271
    case 0: case 0x40: case 0x44: /* fmove */
2272
        tcg_gen_mov_f64(res, src);
2273
        break;
2274
    case 1: /* fint */
2275
        gen_helper_iround_f64(res, cpu_env, src);
2276
        round = 0;
2277
        break;
2278
    case 3: /* fintrz */
2279
        gen_helper_itrunc_f64(res, cpu_env, src);
2280
        round = 0;
2281
        break;
2282
    case 4: case 0x41: case 0x45: /* fsqrt */
2283
        gen_helper_sqrt_f64(res, cpu_env, src);
2284
        break;
2285
    case 0x18: case 0x58: case 0x5c: /* fabs */
2286
        gen_helper_abs_f64(res, src);
2287
        break;
2288
    case 0x1a: case 0x5a: case 0x5e: /* fneg */
2289
        gen_helper_chs_f64(res, src);
2290
        break;
2291
    case 0x20: case 0x60: case 0x64: /* fdiv */
2292
        gen_helper_div_f64(res, cpu_env, res, src);
2293
        break;
2294
    case 0x22: case 0x62: case 0x66: /* fadd */
2295
        gen_helper_add_f64(res, cpu_env, res, src);
2296
        break;
2297
    case 0x23: case 0x63: case 0x67: /* fmul */
2298
        gen_helper_mul_f64(res, cpu_env, res, src);
2299
        break;
2300
    case 0x28: case 0x68: case 0x6c: /* fsub */
2301
        gen_helper_sub_f64(res, cpu_env, res, src);
2302
        break;
2303
    case 0x38: /* fcmp */
2304
        gen_helper_sub_cmp_f64(res, cpu_env, res, src);
2305
        dest = NULL_QREG;
2306
        round = 0;
2307
        break;
2308
    case 0x3a: /* ftst */
2309
        tcg_gen_mov_f64(res, src);
2310
        dest = NULL_QREG;
2311
        round = 0;
2312
        break;
2313
    default:
2314
        goto undef;
2315
    }
2316
    if (round) {
2317
        if (opmode & 0x40) {
2318
            if ((opmode & 0x4) != 0)
2319
                round = 0;
2320
        } else if ((s->fpcr & M68K_FPCR_PREC) == 0) {
2321
            round = 0;
2322
        }
2323
    }
2324
    if (round) {
2325
        TCGv tmp;
2326

    
2327
        tmp = gen_new_qreg(QMODE_F32);
2328
        gen_helper_f64_to_f32(tmp, cpu_env, res);
2329
        gen_helper_f32_to_f64(res, cpu_env, tmp);
2330
    }
2331
    tcg_gen_mov_f64(QREG_FP_RESULT, res);
2332
    if (!IS_NULL_QREG(dest)) {
2333
        tcg_gen_mov_f64(dest, res);
2334
    }
2335
    return;
2336
undef:
2337
    s->pc -= 2;
2338
    disas_undef_fpu(s, insn);
2339
}
2340

    
2341
DISAS_INSN(fbcc)
2342
{
2343
    uint32_t offset;
2344
    uint32_t addr;
2345
    TCGv flag;
2346
    int l1;
2347

    
2348
    addr = s->pc;
2349
    offset = ldsw_code(s->pc);
2350
    s->pc += 2;
2351
    if (insn & (1 << 6)) {
2352
        offset = (offset << 16) | lduw_code(s->pc);
2353
        s->pc += 2;
2354
    }
2355

    
2356
    l1 = gen_new_label();
2357
    /* TODO: Raise BSUN exception.  */
2358
    flag = gen_new_qreg(QMODE_I32);
2359
    gen_helper_compare_f64(flag, cpu_env, QREG_FP_RESULT);
2360
    /* Jump to l1 if condition is true.  */
2361
    switch (insn & 0xf) {
2362
    case 0: /* f */
2363
        break;
2364
    case 1: /* eq (=0) */
2365
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(0), l1);
2366
        break;
2367
    case 2: /* ogt (=1) */
2368
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(1), l1);
2369
        break;
2370
    case 3: /* oge (=0 or =1) */
2371
        tcg_gen_brcond_i32(TCG_COND_LEU, flag, tcg_const_i32(1), l1);
2372
        break;
2373
    case 4: /* olt (=-1) */
2374
        tcg_gen_brcond_i32(TCG_COND_LT, flag, tcg_const_i32(0), l1);
2375
        break;
2376
    case 5: /* ole (=-1 or =0) */
2377
        tcg_gen_brcond_i32(TCG_COND_LE, flag, tcg_const_i32(0), l1);
2378
        break;
2379
    case 6: /* ogl (=-1 or =1) */
2380
        tcg_gen_andi_i32(flag, flag, 1);
2381
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(0), l1);
2382
        break;
2383
    case 7: /* or (=2) */
2384
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(2), l1);
2385
        break;
2386
    case 8: /* un (<2) */
2387
        tcg_gen_brcond_i32(TCG_COND_LT, flag, tcg_const_i32(2), l1);
2388
        break;
2389
    case 9: /* ueq (=0 or =2) */
2390
        tcg_gen_andi_i32(flag, flag, 1);
2391
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(0), l1);
2392
        break;
2393
    case 10: /* ugt (>0) */
2394
        tcg_gen_brcond_i32(TCG_COND_GT, flag, tcg_const_i32(0), l1);
2395
        break;
2396
    case 11: /* uge (>=0) */
2397
        tcg_gen_brcond_i32(TCG_COND_GE, flag, tcg_const_i32(0), l1);
2398
        break;
2399
    case 12: /* ult (=-1 or =2) */
2400
        tcg_gen_brcond_i32(TCG_COND_GEU, flag, tcg_const_i32(2), l1);
2401
        break;
2402
    case 13: /* ule (!=1) */
2403
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(1), l1);
2404
        break;
2405
    case 14: /* ne (!=0) */
2406
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(0), l1);
2407
        break;
2408
    case 15: /* t */
2409
        tcg_gen_br(l1);
2410
        break;
2411
    }
2412
    gen_jmp_tb(s, 0, s->pc);
2413
    gen_set_label(l1);
2414
    gen_jmp_tb(s, 1, addr + offset);
2415
}
2416

    
2417
DISAS_INSN(frestore)
2418
{
2419
    /* TODO: Implement frestore.  */
2420
    qemu_assert(0, "FRESTORE not implemented");
2421
}
2422

    
2423
DISAS_INSN(fsave)
2424
{
2425
    /* TODO: Implement fsave.  */
2426
    qemu_assert(0, "FSAVE not implemented");
2427
}
2428

    
2429
static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
2430
{
2431
    TCGv tmp = gen_new_qreg(QMODE_I32);
2432
    if (s->env->macsr & MACSR_FI) {
2433
        if (upper)
2434
            tcg_gen_andi_i32(tmp, val, 0xffff0000);
2435
        else
2436
            tcg_gen_shli_i32(tmp, val, 16);
2437
    } else if (s->env->macsr & MACSR_SU) {
2438
        if (upper)
2439
            tcg_gen_sari_i32(tmp, val, 16);
2440
        else
2441
            tcg_gen_ext16s_i32(tmp, val);
2442
    } else {
2443
        if (upper)
2444
            tcg_gen_shri_i32(tmp, val, 16);
2445
        else
2446
            tcg_gen_ext16u_i32(tmp, val);
2447
    }
2448
    return tmp;
2449
}
2450

    
2451
static void gen_mac_clear_flags(void)
2452
{
2453
    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
2454
                     ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
2455
}
2456

    
2457
DISAS_INSN(mac)
2458
{
2459
    TCGv rx;
2460
    TCGv ry;
2461
    uint16_t ext;
2462
    int acc;
2463
    TCGv tmp;
2464
    TCGv addr;
2465
    TCGv loadval;
2466
    int dual;
2467
    TCGv saved_flags;
2468

    
2469
    if (IS_NULL_QREG(s->mactmp))
2470
        s->mactmp = tcg_temp_new(TCG_TYPE_I64);
2471

    
2472
    ext = lduw_code(s->pc);
2473
    s->pc += 2;
2474

    
2475
    acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
2476
    dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
2477
    if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
2478
        disas_undef(s, insn);
2479
        return;
2480
    }
2481
    if (insn & 0x30) {
2482
        /* MAC with load.  */
2483
        tmp = gen_lea(s, insn, OS_LONG);
2484
        addr = gen_new_qreg(QMODE_I32);
2485
        tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
2486
        /* Load the value now to ensure correct exception behavior.
2487
           Perform writeback after reading the MAC inputs.  */
2488
        loadval = gen_load(s, OS_LONG, addr, 0);
2489

    
2490
        acc ^= 1;
2491
        rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
2492
        ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
2493
    } else {
2494
        loadval = addr = NULL_QREG;
2495
        rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
2496
        ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2497
    }
2498

    
2499
    gen_mac_clear_flags();
2500
#if 0
2501
    l1 = -1;
2502
    /* Disabled because conditional branches clobber temporary vars.  */
2503
    if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
2504
        /* Skip the multiply if we know we will ignore it.  */
2505
        l1 = gen_new_label();
2506
        tmp = gen_new_qreg(QMODE_I32);
2507
        tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
2508
        gen_op_jmp_nz32(tmp, l1);
2509
    }
2510
#endif
2511

    
2512
    if ((ext & 0x0800) == 0) {
2513
        /* Word.  */
2514
        rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
2515
        ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
2516
    }
2517
    if (s->env->macsr & MACSR_FI) {
2518
        gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
2519
    } else {
2520
        if (s->env->macsr & MACSR_SU)
2521
            gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
2522
        else
2523
            gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
2524
        switch ((ext >> 9) & 3) {
2525
        case 1:
2526
            tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
2527
            break;
2528
        case 3:
2529
            tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
2530
            break;
2531
        }
2532
    }
2533

    
2534
    if (dual) {
2535
        /* Save the overflow flag from the multiply.  */
2536
        saved_flags = gen_new_qreg(QMODE_I32);
2537
        tcg_gen_mov_i32(saved_flags, QREG_MACSR);
2538
    } else {
2539
        saved_flags = NULL_QREG;
2540
    }
2541

    
2542
#if 0
2543
    /* Disabled because conditional branches clobber temporary vars.  */
2544
    if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
2545
        /* Skip the accumulate if the value is already saturated.  */
2546
        l1 = gen_new_label();
2547
        tmp = gen_new_qreg(QMODE_I32);
2548
        gen_op_and32(tmp, QREG_MACSR, gen_im32(MACSR_PAV0 << acc));
2549
        gen_op_jmp_nz32(tmp, l1);
2550
    }
2551
#endif
2552

    
2553
    if (insn & 0x100)
2554
        tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
2555
    else
2556
        tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
2557

    
2558
    if (s->env->macsr & MACSR_FI)
2559
        gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
2560
    else if (s->env->macsr & MACSR_SU)
2561
        gen_helper_macsats(cpu_env, tcg_const_i32(acc));
2562
    else
2563
        gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
2564

    
2565
#if 0
2566
    /* Disabled because conditional branches clobber temporary vars.  */
2567
    if (l1 != -1)
2568
        gen_set_label(l1);
2569
#endif
2570

    
2571
    if (dual) {
2572
        /* Dual accumulate variant.  */
2573
        acc = (ext >> 2) & 3;
2574
        /* Restore the overflow flag from the multiplier.  */
2575
        tcg_gen_mov_i32(QREG_MACSR, saved_flags);
2576
#if 0
2577
        /* Disabled because conditional branches clobber temporary vars.  */
2578
        if ((s->env->macsr & MACSR_OMC) != 0) {
2579
            /* Skip the accumulate if the value is already saturated.  */
2580
            l1 = gen_new_label();
2581
            tmp = gen_new_qreg(QMODE_I32);
2582
            gen_op_and32(tmp, QREG_MACSR, gen_im32(MACSR_PAV0 << acc));
2583
            gen_op_jmp_nz32(tmp, l1);
2584
        }
2585
#endif
2586
        if (ext & 2)
2587
            tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
2588
        else
2589
            tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
2590
        if (s->env->macsr & MACSR_FI)
2591
            gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
2592
        else if (s->env->macsr & MACSR_SU)
2593
            gen_helper_macsats(cpu_env, tcg_const_i32(acc));
2594
        else
2595
            gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
2596
#if 0
2597
        /* Disabled because conditional branches clobber temporary vars.  */
2598
        if (l1 != -1)
2599
            gen_set_label(l1);
2600
#endif
2601
    }
2602
    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
2603

    
2604
    if (insn & 0x30) {
2605
        TCGv rw;
2606
        rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
2607
        tcg_gen_mov_i32(rw, loadval);
2608
        /* FIXME: Should address writeback happen with the masked or
2609
           unmasked value?  */
2610
        switch ((insn >> 3) & 7) {
2611
        case 3: /* Post-increment.  */
2612
            tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
2613
            break;
2614
        case 4: /* Pre-decrement.  */
2615
            tcg_gen_mov_i32(AREG(insn, 0), addr);
2616
        }
2617
    }
2618
}
2619

    
2620
DISAS_INSN(from_mac)
2621
{
2622
    TCGv rx;
2623
    TCGv acc;
2624
    int accnum;
2625

    
2626
    rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2627
    accnum = (insn >> 9) & 3;
2628
    acc = MACREG(accnum);
2629
    if (s->env->macsr & MACSR_FI) {
2630
        gen_helper_get_macf(cpu_env, rx, acc);
2631
    } else if ((s->env->macsr & MACSR_OMC) == 0) {
2632
        tcg_gen_trunc_i64_i32(rx, acc);
2633
    } else if (s->env->macsr & MACSR_SU) {
2634
        gen_helper_get_macs(rx, acc);
2635
    } else {
2636
        gen_helper_get_macu(rx, acc);
2637
    }
2638
    if (insn & 0x40) {
2639
        tcg_gen_movi_i64(acc, 0);
2640
        tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
2641
    }
2642
}
2643

    
2644
DISAS_INSN(move_mac)
2645
{
2646
    /* FIXME: This can be done without a helper.  */
2647
    int src;
2648
    TCGv dest;
2649
    src = insn & 3;
2650
    dest = tcg_const_i32((insn >> 9) & 3);
2651
    gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
2652
    gen_mac_clear_flags();
2653
    gen_helper_mac_set_flags(cpu_env, dest);
2654
}
2655

    
2656
DISAS_INSN(from_macsr)
2657
{
2658
    TCGv reg;
2659

    
2660
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2661
    tcg_gen_mov_i32(reg, QREG_MACSR);
2662
}
2663

    
2664
DISAS_INSN(from_mask)
2665
{
2666
    TCGv reg;
2667
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2668
    tcg_gen_mov_i32(reg, QREG_MAC_MASK);
2669
}
2670

    
2671
DISAS_INSN(from_mext)
2672
{
2673
    TCGv reg;
2674
    TCGv acc;
2675
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2676
    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
2677
    if (s->env->macsr & MACSR_FI)
2678
        gen_helper_get_mac_extf(reg, cpu_env, acc);
2679
    else
2680
        gen_helper_get_mac_exti(reg, cpu_env, acc);
2681
}
2682

    
2683
DISAS_INSN(macsr_to_ccr)
2684
{
2685
    tcg_gen_movi_i32(QREG_CC_X, 0);
2686
    tcg_gen_andi_i32(QREG_CC_DEST, QREG_MACSR, 0xf);
2687
    s->cc_op = CC_OP_FLAGS;
2688
}
2689

    
2690
DISAS_INSN(to_mac)
2691
{
2692
    TCGv acc;
2693
    TCGv val;
2694
    int accnum;
2695
    accnum = (insn >> 9) & 3;
2696
    acc = MACREG(accnum);
2697
    SRC_EA(val, OS_LONG, 0, NULL);
2698
    if (s->env->macsr & MACSR_FI) {
2699
        tcg_gen_ext_i32_i64(acc, val);
2700
        tcg_gen_shli_i64(acc, acc, 8);
2701
    } else if (s->env->macsr & MACSR_SU) {
2702
        tcg_gen_ext_i32_i64(acc, val);
2703
    } else {
2704
        tcg_gen_extu_i32_i64(acc, val);
2705
    }
2706
    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
2707
    gen_mac_clear_flags();
2708
    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
2709
}
2710

    
2711
DISAS_INSN(to_macsr)
2712
{
2713
    TCGv val;
2714
    SRC_EA(val, OS_LONG, 0, NULL);
2715
    gen_helper_set_macsr(cpu_env, val);
2716
    gen_lookup_tb(s);
2717
}
2718

    
2719
DISAS_INSN(to_mask)
2720
{
2721
    TCGv val;
2722
    SRC_EA(val, OS_LONG, 0, NULL);
2723
    tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
2724
}
2725

    
2726
DISAS_INSN(to_mext)
2727
{
2728
    TCGv val;
2729
    TCGv acc;
2730
    SRC_EA(val, OS_LONG, 0, NULL);
2731
    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
2732
    if (s->env->macsr & MACSR_FI)
2733
        gen_helper_set_mac_extf(cpu_env, val, acc);
2734
    else if (s->env->macsr & MACSR_SU)
2735
        gen_helper_set_mac_exts(cpu_env, val, acc);
2736
    else
2737
        gen_helper_set_mac_extu(cpu_env, val, acc);
2738
}
2739

    
2740
static disas_proc opcode_table[65536];
2741

    
2742
static void
2743
register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
2744
{
2745
  int i;
2746
  int from;
2747
  int to;
2748

    
2749
  /* Sanity check.  All set bits must be included in the mask.  */
2750
  if (opcode & ~mask) {
2751
      fprintf(stderr,
2752
              "qemu internal error: bogus opcode definition %04x/%04x\n",
2753
              opcode, mask);
2754
      abort();
2755
  }
2756
  /* This could probably be cleverer.  For now just optimize the case where
2757
     the top bits are known.  */
2758
  /* Find the first zero bit in the mask.  */
2759
  i = 0x8000;
2760
  while ((i & mask) != 0)
2761
      i >>= 1;
2762
  /* Iterate over all combinations of this and lower bits.  */
2763
  if (i == 0)
2764
      i = 1;
2765
  else
2766
      i <<= 1;
2767
  from = opcode & ~(i - 1);
2768
  to = from + i;
2769
  for (i = from; i < to; i++) {
2770
      if ((i & mask) == opcode)
2771
          opcode_table[i] = proc;
2772
  }
2773
}
2774

    
2775
/* Register m68k opcode handlers.  Order is important.
2776
   Later insn override earlier ones.  */
2777
void register_m68k_insns (CPUM68KState *env)
2778
{
2779
#define INSN(name, opcode, mask, feature) do { \
2780
    if (m68k_feature(env, M68K_FEATURE_##feature)) \
2781
        register_opcode(disas_##name, 0x##opcode, 0x##mask); \
2782
    } while(0)
2783
    INSN(undef,     0000, 0000, CF_ISA_A);
2784
    INSN(arith_im,  0080, fff8, CF_ISA_A);
2785
    INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
2786
    INSN(bitop_reg, 0100, f1c0, CF_ISA_A);
2787
    INSN(bitop_reg, 0140, f1c0, CF_ISA_A);
2788
    INSN(bitop_reg, 0180, f1c0, CF_ISA_A);
2789
    INSN(bitop_reg, 01c0, f1c0, CF_ISA_A);
2790
    INSN(arith_im,  0280, fff8, CF_ISA_A);
2791
    INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
2792
    INSN(arith_im,  0480, fff8, CF_ISA_A);
2793
    INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
2794
    INSN(arith_im,  0680, fff8, CF_ISA_A);
2795
    INSN(bitop_im,  0800, ffc0, CF_ISA_A);
2796
    INSN(bitop_im,  0840, ffc0, CF_ISA_A);
2797
    INSN(bitop_im,  0880, ffc0, CF_ISA_A);
2798
    INSN(bitop_im,  08c0, ffc0, CF_ISA_A);
2799
    INSN(arith_im,  0a80, fff8, CF_ISA_A);
2800
    INSN(arith_im,  0c00, ff38, CF_ISA_A);
2801
    INSN(move,      1000, f000, CF_ISA_A);
2802
    INSN(move,      2000, f000, CF_ISA_A);
2803
    INSN(move,      3000, f000, CF_ISA_A);
2804
    INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
2805
    INSN(negx,      4080, fff8, CF_ISA_A);
2806
    INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
2807
    INSN(lea,       41c0, f1c0, CF_ISA_A);
2808
    INSN(clr,       4200, ff00, CF_ISA_A);
2809
    INSN(undef,     42c0, ffc0, CF_ISA_A);
2810
    INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
2811
    INSN(neg,       4480, fff8, CF_ISA_A);
2812
    INSN(move_to_ccr, 44c0, ffc0, CF_ISA_A);
2813
    INSN(not,       4680, fff8, CF_ISA_A);
2814
    INSN(move_to_sr, 46c0, ffc0, CF_ISA_A);
2815
    INSN(pea,       4840, ffc0, CF_ISA_A);
2816
    INSN(swap,      4840, fff8, CF_ISA_A);
2817
    INSN(movem,     48c0, fbc0, CF_ISA_A);
2818
    INSN(ext,       4880, fff8, CF_ISA_A);
2819
    INSN(ext,       48c0, fff8, CF_ISA_A);
2820
    INSN(ext,       49c0, fff8, CF_ISA_A);
2821
    INSN(tst,       4a00, ff00, CF_ISA_A);
2822
    INSN(tas,       4ac0, ffc0, CF_ISA_B);
2823
    INSN(halt,      4ac8, ffff, CF_ISA_A);
2824
    INSN(pulse,     4acc, ffff, CF_ISA_A);
2825
    INSN(illegal,   4afc, ffff, CF_ISA_A);
2826
    INSN(mull,      4c00, ffc0, CF_ISA_A);
2827
    INSN(divl,      4c40, ffc0, CF_ISA_A);
2828
    INSN(sats,      4c80, fff8, CF_ISA_B);
2829
    INSN(trap,      4e40, fff0, CF_ISA_A);
2830
    INSN(link,      4e50, fff8, CF_ISA_A);
2831
    INSN(unlk,      4e58, fff8, CF_ISA_A);
2832
    INSN(move_to_usp, 4e60, fff8, USP);
2833
    INSN(move_from_usp, 4e68, fff8, USP);
2834
    INSN(nop,       4e71, ffff, CF_ISA_A);
2835
    INSN(stop,      4e72, ffff, CF_ISA_A);
2836
    INSN(rte,       4e73, ffff, CF_ISA_A);
2837
    INSN(rts,       4e75, ffff, CF_ISA_A);
2838
    INSN(movec,     4e7b, ffff, CF_ISA_A);
2839
    INSN(jump,      4e80, ffc0, CF_ISA_A);
2840
    INSN(jump,      4ec0, ffc0, CF_ISA_A);
2841
    INSN(addsubq,   5180, f1c0, CF_ISA_A);
2842
    INSN(scc,       50c0, f0f8, CF_ISA_A);
2843
    INSN(addsubq,   5080, f1c0, CF_ISA_A);
2844
    INSN(tpf,       51f8, fff8, CF_ISA_A);
2845

    
2846
    /* Branch instructions.  */
2847
    INSN(branch,    6000, f000, CF_ISA_A);
2848
    /* Disable long branch instructions, then add back the ones we want.  */
2849
    INSN(undef,     60ff, f0ff, CF_ISA_A); /* All long branches.  */
2850
    INSN(branch,    60ff, f0ff, CF_ISA_B);
2851
    INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
2852
    INSN(branch,    60ff, ffff, BRAL);
2853

    
2854
    INSN(moveq,     7000, f100, CF_ISA_A);
2855
    INSN(mvzs,      7100, f100, CF_ISA_B);
2856
    INSN(or,        8000, f000, CF_ISA_A);
2857
    INSN(divw,      80c0, f0c0, CF_ISA_A);
2858
    INSN(addsub,    9000, f000, CF_ISA_A);
2859
    INSN(subx,      9180, f1f8, CF_ISA_A);
2860
    INSN(suba,      91c0, f1c0, CF_ISA_A);
2861

    
2862
    INSN(undef_mac, a000, f000, CF_ISA_A);
2863
    INSN(mac,       a000, f100, CF_EMAC);
2864
    INSN(from_mac,  a180, f9b0, CF_EMAC);
2865
    INSN(move_mac,  a110, f9fc, CF_EMAC);
2866
    INSN(from_macsr,a980, f9f0, CF_EMAC);
2867
    INSN(from_mask, ad80, fff0, CF_EMAC);
2868
    INSN(from_mext, ab80, fbf0, CF_EMAC);
2869
    INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
2870
    INSN(to_mac,    a100, f9c0, CF_EMAC);
2871
    INSN(to_macsr,  a900, ffc0, CF_EMAC);
2872
    INSN(to_mext,   ab00, fbc0, CF_EMAC);
2873
    INSN(to_mask,   ad00, ffc0, CF_EMAC);
2874

    
2875
    INSN(mov3q,     a140, f1c0, CF_ISA_B);
2876
    INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
2877
    INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
2878
    INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
2879
    INSN(cmp,       b080, f1c0, CF_ISA_A);
2880
    INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
2881
    INSN(eor,       b180, f1c0, CF_ISA_A);
2882
    INSN(and,       c000, f000, CF_ISA_A);
2883
    INSN(mulw,      c0c0, f0c0, CF_ISA_A);
2884
    INSN(addsub,    d000, f000, CF_ISA_A);
2885
    INSN(addx,      d180, f1f8, CF_ISA_A);
2886
    INSN(adda,      d1c0, f1c0, CF_ISA_A);
2887
    INSN(shift_im,  e080, f0f0, CF_ISA_A);
2888
    INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
2889
    INSN(undef_fpu, f000, f000, CF_ISA_A);
2890
    INSN(fpu,       f200, ffc0, CF_FPU);
2891
    INSN(fbcc,      f280, ffc0, CF_FPU);
2892
    INSN(frestore,  f340, ffc0, CF_FPU);
2893
    INSN(fsave,     f340, ffc0, CF_FPU);
2894
    INSN(intouch,   f340, ffc0, CF_ISA_A);
2895
    INSN(cpushl,    f428, ff38, CF_ISA_A);
2896
    INSN(wddata,    fb00, ff00, CF_ISA_A);
2897
    INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
2898
#undef INSN
2899
}
2900

    
2901
/* ??? Some of this implementation is not exception safe.  We should always
2902
   write back the result to memory before setting the condition codes.  */
2903
static void disas_m68k_insn(CPUState * env, DisasContext *s)
2904
{
2905
    uint16_t insn;
2906

    
2907
    insn = lduw_code(s->pc);
2908
    s->pc += 2;
2909

    
2910
    opcode_table[insn](s, insn);
2911
}
2912

    
2913
/* generate intermediate code for basic block 'tb'.  */
2914
static inline int
2915
gen_intermediate_code_internal(CPUState *env, TranslationBlock *tb,
2916
                               int search_pc)
2917
{
2918
    DisasContext dc1, *dc = &dc1;
2919
    uint16_t *gen_opc_end;
2920
    int j, lj;
2921
    target_ulong pc_start;
2922
    int pc_offset;
2923
    int last_cc_op;
2924
    int num_insns;
2925
    int max_insns;
2926

    
2927
    /* generate intermediate code */
2928
    pc_start = tb->pc;
2929

    
2930
    dc->tb = tb;
2931

    
2932
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2933

    
2934
    dc->env = env;
2935
    dc->is_jmp = DISAS_NEXT;
2936
    dc->pc = pc_start;
2937
    dc->cc_op = CC_OP_DYNAMIC;
2938
    dc->singlestep_enabled = env->singlestep_enabled;
2939
    dc->fpcr = env->fpcr;
2940
    dc->user = (env->sr & SR_S) == 0;
2941
    dc->is_mem = 0;
2942
    dc->mactmp = NULL_QREG;
2943
    lj = -1;
2944
    num_insns = 0;
2945
    max_insns = tb->cflags & CF_COUNT_MASK;
2946
    if (max_insns == 0)
2947
        max_insns = CF_COUNT_MASK;
2948

    
2949
    gen_icount_start();
2950
    do {
2951
        pc_offset = dc->pc - pc_start;
2952
        gen_throws_exception = NULL;
2953
        if (env->nb_breakpoints > 0) {
2954
            for(j = 0; j < env->nb_breakpoints; j++) {
2955
                if (env->breakpoints[j] == dc->pc) {
2956
                    gen_exception(dc, dc->pc, EXCP_DEBUG);
2957
                    dc->is_jmp = DISAS_JUMP;
2958
                    break;
2959
                }
2960
            }
2961
            if (dc->is_jmp)
2962
                break;
2963
        }
2964
        if (search_pc) {
2965
            j = gen_opc_ptr - gen_opc_buf;
2966
            if (lj < j) {
2967
                lj++;
2968
                while (lj < j)
2969
                    gen_opc_instr_start[lj++] = 0;
2970
            }
2971
            gen_opc_pc[lj] = dc->pc;
2972
            gen_opc_instr_start[lj] = 1;
2973
            gen_opc_icount[lj] = num_insns;
2974
        }
2975
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2976
            gen_io_start();
2977
        last_cc_op = dc->cc_op;
2978
        dc->insn_pc = dc->pc;
2979
        disas_m68k_insn(env, dc);
2980
        num_insns++;
2981

    
2982
        /* Terminate the TB on memory ops if watchpoints are present.  */
2983
        /* FIXME: This should be replaced by the deterministic execution
2984
         * IRQ raising bits.  */
2985
        if (dc->is_mem && env->nb_watchpoints)
2986
            break;
2987
    } while (!dc->is_jmp && gen_opc_ptr < gen_opc_end &&
2988
             !env->singlestep_enabled &&
2989
             (pc_offset) < (TARGET_PAGE_SIZE - 32) &&
2990
             num_insns < max_insns);
2991

    
2992
    if (tb->cflags & CF_LAST_IO)
2993
        gen_io_end();
2994
    if (__builtin_expect(env->singlestep_enabled, 0)) {
2995
        /* Make sure the pc is updated, and raise a debug exception.  */
2996
        if (!dc->is_jmp) {
2997
            gen_flush_cc_op(dc);
2998
            tcg_gen_movi_i32(QREG_PC, dc->pc);
2999
        }
3000
        gen_helper_raise_exception(tcg_const_i32(EXCP_DEBUG));
3001
    } else {
3002
        switch(dc->is_jmp) {
3003
        case DISAS_NEXT:
3004
            gen_flush_cc_op(dc);
3005
            gen_jmp_tb(dc, 0, dc->pc);
3006
            break;
3007
        default:
3008
        case DISAS_JUMP:
3009
        case DISAS_UPDATE:
3010
            gen_flush_cc_op(dc);
3011
            /* indicate that the hash table must be used to find the next TB */
3012
            tcg_gen_exit_tb(0);
3013
            break;
3014
        case DISAS_TB_JUMP:
3015
            /* nothing more to generate */
3016
            break;
3017
        }
3018
    }
3019
    gen_icount_end(tb, num_insns);
3020
    *gen_opc_ptr = INDEX_op_end;
3021

    
3022
#ifdef DEBUG_DISAS
3023
    if (loglevel & CPU_LOG_TB_IN_ASM) {
3024
        fprintf(logfile, "----------------\n");
3025
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
3026
        target_disas(logfile, pc_start, dc->pc - pc_start, 0);
3027
        fprintf(logfile, "\n");
3028
    }
3029
#endif
3030
    if (search_pc) {
3031
        j = gen_opc_ptr - gen_opc_buf;
3032
        lj++;
3033
        while (lj <= j)
3034
            gen_opc_instr_start[lj++] = 0;
3035
    } else {
3036
        tb->size = dc->pc - pc_start;
3037
        tb->icount = num_insns;
3038
    }
3039

    
3040
    //optimize_flags();
3041
    //expand_target_qops();
3042
    return 0;
3043
}
3044

    
3045
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
3046
{
3047
    return gen_intermediate_code_internal(env, tb, 0);
3048
}
3049

    
3050
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
3051
{
3052
    return gen_intermediate_code_internal(env, tb, 1);
3053
}
3054

    
3055
void cpu_dump_state(CPUState *env, FILE *f,
3056
                    int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
3057
                    int flags)
3058
{
3059
    int i;
3060
    uint16_t sr;
3061
    CPU_DoubleU u;
3062
    for (i = 0; i < 8; i++)
3063
      {
3064
        u.d = env->fregs[i];
3065
        cpu_fprintf (f, "D%d = %08x   A%d = %08x   F%d = %08x%08x (%12g)\n",
3066
                     i, env->dregs[i], i, env->aregs[i],
3067
                     i, u.l.upper, u.l.lower, *(double *)&u.d);
3068
      }
3069
    cpu_fprintf (f, "PC = %08x   ", env->pc);
3070
    sr = env->sr;
3071
    cpu_fprintf (f, "SR = %04x %c%c%c%c%c ", sr, (sr & 0x10) ? 'X' : '-',
3072
                 (sr & CCF_N) ? 'N' : '-', (sr & CCF_Z) ? 'Z' : '-',
3073
                 (sr & CCF_V) ? 'V' : '-', (sr & CCF_C) ? 'C' : '-');
3074
    cpu_fprintf (f, "FPRESULT = %12g\n", *(double *)&env->fp_result);
3075
}
3076

    
3077
void gen_pc_load(CPUState *env, TranslationBlock *tb,
3078
                unsigned long searched_pc, int pc_pos, void *puc)
3079
{
3080
    env->pc = gen_opc_pc[pc_pos];
3081
}