Statistics
| Branch: | Revision:

root / target-m68k / translate.c @ e1f3808e

History | View | Annotate | Download (78 kB)

1
/*
2
 *  m68k translation
3
 *
4
 *  Copyright (c) 2005-2007 CodeSourcery
5
 *  Written by Paul Brook
6
 *
7
 * This library is free software; you can redistribute it and/or
8
 * modify it under the terms of the GNU Lesser General Public
9
 * License as published by the Free Software Foundation; either
10
 * version 2 of the License, or (at your option) any later version.
11
 *
12
 * This library is distributed in the hope that it will be useful,
13
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
 * General Public License for more details.
16
 *
17
 * You should have received a copy of the GNU Lesser General Public
18
 * License along with this library; if not, write to the Free Software
19
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
20
 */
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26
#include <assert.h>
27

    
28
#include "config.h"
29
#include "cpu.h"
30
#include "exec-all.h"
31
#include "disas.h"
32
#include "tcg-op.h"
33

    
34
#define GEN_HELPER 1
35
#include "helpers.h"
36

    
37
//#define DEBUG_DISPATCH 1
38

    
39
#define DEFO32(name, offset) static TCGv QREG_##name;
40
#define DEFO64(name, offset) static TCGv QREG_##name;
41
#define DEFF64(name, offset) static TCGv QREG_##name;
42
#include "qregs.def"
43
#undef DEFO32
44
#undef DEFO64
45
#undef DEFF64
46

    
47
static TCGv cpu_env;
48

    
49
static char cpu_reg_names[3*8*3 + 5*4];
50
static TCGv cpu_dregs[8];
51
static TCGv cpu_aregs[8];
52
static TCGv cpu_fregs[8];
53
static TCGv cpu_macc[4];
54

    
55
#define DREG(insn, pos) cpu_dregs[((insn) >> (pos)) & 7]
56
#define AREG(insn, pos) cpu_aregs[((insn) >> (pos)) & 7]
57
#define FREG(insn, pos) cpu_fregs[((insn) >> (pos)) & 7]
58
#define MACREG(acc) cpu_macc[acc]
59
#define QREG_SP cpu_aregs[7]
60

    
61
static TCGv NULL_QREG;
62
#define IS_NULL_QREG(t) (GET_TCGV(t) == GET_TCGV(NULL_QREG))
63
/* Used to distinguish stores from bad addressing modes.  */
64
static TCGv store_dummy;
65

    
66
void m68k_tcg_init(void)
67
{
68
    char *p;
69
    int i;
70

    
71
#define DEFO32(name,  offset) QREG_##name = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, offsetof(CPUState, offset), #name);
72
#define DEFO64(name,  offset) QREG_##name = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0, offsetof(CPUState, offset), #name);
73
#define DEFF64(name,  offset) DEFO64(name, offset)
74
#include "qregs.def"
75
#undef DEFO32
76
#undef DEFO64
77
#undef DEFF64
78

    
79
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
80

    
81
    p = cpu_reg_names;
82
    for (i = 0; i < 8; i++) {
83
        sprintf(p, "D%d", i);
84
        cpu_dregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
85
                                          offsetof(CPUM68KState, dregs[i]), p);
86
        p += 3;
87
        sprintf(p, "A%d", i);
88
        cpu_aregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
89
                                          offsetof(CPUM68KState, aregs[i]), p);
90
        p += 3;
91
        sprintf(p, "F%d", i);
92
        cpu_fregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
93
                                          offsetof(CPUM68KState, fregs[i]), p);
94
        p += 3;
95
    }
96
    for (i = 0; i < 4; i++) {
97
        sprintf(p, "ACC%d", i);
98
        cpu_macc[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
99
                                         offsetof(CPUM68KState, macc[i]), p);
100
        p += 5;
101
    }
102

    
103
    NULL_QREG = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, -4, "NULL");
104
    store_dummy = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, -8, "NULL");
105

    
106
#define DEF_HELPER(name, ret, args) \
107
    tcg_register_helper(HELPER(name), #name);
108
#include "helpers.h"
109
}
110

    
111
static inline void qemu_assert(int cond, const char *msg)
112
{
113
    if (!cond) {
114
        fprintf (stderr, "badness: %s\n", msg);
115
        abort();
116
    }
117
}
118

    
119
/* internal defines */
120
typedef struct DisasContext {
121
    CPUM68KState *env;
122
    target_ulong insn_pc; /* Start of the current instruction.  */
123
    target_ulong pc;
124
    int is_jmp;
125
    int cc_op;
126
    int user;
127
    uint32_t fpcr;
128
    struct TranslationBlock *tb;
129
    int singlestep_enabled;
130
    int is_mem;
131
    TCGv mactmp;
132
} DisasContext;
133

    
134
#define DISAS_JUMP_NEXT 4
135

    
136
#if defined(CONFIG_USER_ONLY)
137
#define IS_USER(s) 1
138
#else
139
#define IS_USER(s) s->user
140
#endif
141

    
142
/* XXX: move that elsewhere */
143
/* ??? Fix exceptions.  */
144
static void *gen_throws_exception;
145
#define gen_last_qop NULL
146

    
147
extern FILE *logfile;
148
extern int loglevel;
149

    
150
#define OS_BYTE 0
151
#define OS_WORD 1
152
#define OS_LONG 2
153
#define OS_SINGLE 4
154
#define OS_DOUBLE 5
155

    
156
typedef void (*disas_proc)(DisasContext *, uint16_t);
157

    
158
#ifdef DEBUG_DISPATCH
159
#define DISAS_INSN(name) \
160
  static void real_disas_##name (DisasContext *s, uint16_t insn); \
161
  static void disas_##name (DisasContext *s, uint16_t insn) { \
162
    if (logfile) fprintf(logfile, "Dispatch " #name "\n"); \
163
    real_disas_##name(s, insn); } \
164
  static void real_disas_##name (DisasContext *s, uint16_t insn)
165
#else
166
#define DISAS_INSN(name) \
167
  static void disas_##name (DisasContext *s, uint16_t insn)
168
#endif
169

    
170
/* FIXME: Remove this.  */
171
#define gen_im32(val) tcg_const_i32(val)
172

    
173
/* Fake floating point.  */
174
#define TCG_TYPE_F32 TCG_TYPE_I32
175
#define TCG_TYPE_F64 TCG_TYPE_I64
176
#define tcg_gen_mov_f64 tcg_gen_mov_i64
177
#define tcg_gen_qemu_ldf32 tcg_gen_qemu_ld32u
178
#define tcg_gen_qemu_ldf64 tcg_gen_qemu_ld64
179
#define tcg_gen_qemu_stf32 tcg_gen_qemu_st32
180
#define tcg_gen_qemu_stf64 tcg_gen_qemu_st64
181
#define gen_helper_pack_32_f32 tcg_gen_mov_i32
182
#define gen_helper_pack_f32_32 tcg_gen_mov_i32
183

    
184
#define QMODE_I32 TCG_TYPE_I32
185
#define QMODE_I64 TCG_TYPE_I64
186
#define QMODE_F32 TCG_TYPE_F32
187
#define QMODE_F64 TCG_TYPE_F64
188
static inline TCGv gen_new_qreg(int mode)
189
{
190
    return tcg_temp_new(mode);
191
}
192

    
193
/* Generate a load from the specified address.  Narrow values are
194
   sign extended to full register width.  */
195
static inline TCGv gen_load(DisasContext * s, int opsize, TCGv addr, int sign)
196
{
197
    TCGv tmp;
198
    int index = IS_USER(s);
199
    s->is_mem = 1;
200
    switch(opsize) {
201
    case OS_BYTE:
202
        tmp = gen_new_qreg(QMODE_I32);
203
        if (sign)
204
            tcg_gen_qemu_ld8s(tmp, addr, index);
205
        else
206
            tcg_gen_qemu_ld8u(tmp, addr, index);
207
        break;
208
    case OS_WORD:
209
        tmp = gen_new_qreg(QMODE_I32);
210
        if (sign)
211
            tcg_gen_qemu_ld16s(tmp, addr, index);
212
        else
213
            tcg_gen_qemu_ld16u(tmp, addr, index);
214
        break;
215
    case OS_LONG:
216
        tmp = gen_new_qreg(QMODE_I32);
217
        tcg_gen_qemu_ld32u(tmp, addr, index);
218
        break;
219
    case OS_SINGLE:
220
        tmp = gen_new_qreg(QMODE_F32);
221
        tcg_gen_qemu_ldf32(tmp, addr, index);
222
        break;
223
    case OS_DOUBLE:
224
        tmp  = gen_new_qreg(QMODE_F64);
225
        tcg_gen_qemu_ldf64(tmp, addr, index);
226
        break;
227
    default:
228
        qemu_assert(0, "bad load size");
229
    }
230
    gen_throws_exception = gen_last_qop;
231
    return tmp;
232
}
233

    
234
/* Generate a store.  */
235
static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val)
236
{
237
    int index = IS_USER(s);
238
    s->is_mem = 1;
239
    switch(opsize) {
240
    case OS_BYTE:
241
        tcg_gen_qemu_st8(val, addr, index);
242
        break;
243
    case OS_WORD:
244
        tcg_gen_qemu_st16(val, addr, index);
245
        break;
246
    case OS_LONG:
247
        tcg_gen_qemu_st32(val, addr, index);
248
        break;
249
    case OS_SINGLE:
250
        tcg_gen_qemu_stf32(val, addr, index);
251
        break;
252
    case OS_DOUBLE:
253
        tcg_gen_qemu_stf64(val, addr, index);
254
        break;
255
    default:
256
        qemu_assert(0, "bad store size");
257
    }
258
    gen_throws_exception = gen_last_qop;
259
}
260

    
261
typedef enum {
262
    EA_STORE,
263
    EA_LOADU,
264
    EA_LOADS
265
} ea_what;
266

    
267
/* Generate an unsigned load if VAL is 0 a signed load if val is -1,
268
   otherwise generate a store.  */
269
static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
270
                     ea_what what)
271
{
272
    if (what == EA_STORE) {
273
        gen_store(s, opsize, addr, val);
274
        return store_dummy;
275
    } else {
276
        return gen_load(s, opsize, addr, what == EA_LOADS);
277
    }
278
}
279

    
280
/* Read a 32-bit immediate constant.  */
281
static inline uint32_t read_im32(DisasContext *s)
282
{
283
    uint32_t im;
284
    im = ((uint32_t)lduw_code(s->pc)) << 16;
285
    s->pc += 2;
286
    im |= lduw_code(s->pc);
287
    s->pc += 2;
288
    return im;
289
}
290

    
291
/* Calculate and address index.  */
292
static TCGv gen_addr_index(uint16_t ext, TCGv tmp)
293
{
294
    TCGv add;
295
    int scale;
296

    
297
    add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
298
    if ((ext & 0x800) == 0) {
299
        tcg_gen_ext16s_i32(tmp, add);
300
        add = tmp;
301
    }
302
    scale = (ext >> 9) & 3;
303
    if (scale != 0) {
304
        tcg_gen_shli_i32(tmp, add, scale);
305
        add = tmp;
306
    }
307
    return add;
308
}
309

    
310
/* Handle a base + index + displacement effective addresss.
311
   A NULL_QREG base means pc-relative.  */
312
static TCGv gen_lea_indexed(DisasContext *s, int opsize, TCGv base)
313
{
314
    uint32_t offset;
315
    uint16_t ext;
316
    TCGv add;
317
    TCGv tmp;
318
    uint32_t bd, od;
319

    
320
    offset = s->pc;
321
    ext = lduw_code(s->pc);
322
    s->pc += 2;
323

    
324
    if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
325
        return NULL_QREG;
326

    
327
    if (ext & 0x100) {
328
        /* full extension word format */
329
        if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
330
            return NULL_QREG;
331

    
332
        if ((ext & 0x30) > 0x10) {
333
            /* base displacement */
334
            if ((ext & 0x30) == 0x20) {
335
                bd = (int16_t)lduw_code(s->pc);
336
                s->pc += 2;
337
            } else {
338
                bd = read_im32(s);
339
            }
340
        } else {
341
            bd = 0;
342
        }
343
        tmp = gen_new_qreg(QMODE_I32);
344
        if ((ext & 0x44) == 0) {
345
            /* pre-index */
346
            add = gen_addr_index(ext, tmp);
347
        } else {
348
            add = NULL_QREG;
349
        }
350
        if ((ext & 0x80) == 0) {
351
            /* base not suppressed */
352
            if (IS_NULL_QREG(base)) {
353
                base = gen_im32(offset + bd);
354
                bd = 0;
355
            }
356
            if (!IS_NULL_QREG(add)) {
357
                tcg_gen_add_i32(tmp, add, base);
358
                add = tmp;
359
            } else {
360
                add = base;
361
            }
362
        }
363
        if (!IS_NULL_QREG(add)) {
364
            if (bd != 0) {
365
                tcg_gen_addi_i32(tmp, add, bd);
366
                add = tmp;
367
            }
368
        } else {
369
            add = gen_im32(bd);
370
        }
371
        if ((ext & 3) != 0) {
372
            /* memory indirect */
373
            base = gen_load(s, OS_LONG, add, 0);
374
            if ((ext & 0x44) == 4) {
375
                add = gen_addr_index(ext, tmp);
376
                tcg_gen_add_i32(tmp, add, base);
377
                add = tmp;
378
            } else {
379
                add = base;
380
            }
381
            if ((ext & 3) > 1) {
382
                /* outer displacement */
383
                if ((ext & 3) == 2) {
384
                    od = (int16_t)lduw_code(s->pc);
385
                    s->pc += 2;
386
                } else {
387
                    od = read_im32(s);
388
                }
389
            } else {
390
                od = 0;
391
            }
392
            if (od != 0) {
393
                tcg_gen_addi_i32(tmp, add, od);
394
                add = tmp;
395
            }
396
        }
397
    } else {
398
        /* brief extension word format */
399
        tmp = gen_new_qreg(QMODE_I32);
400
        add = gen_addr_index(ext, tmp);
401
        if (!IS_NULL_QREG(base)) {
402
            tcg_gen_add_i32(tmp, add, base);
403
            if ((int8_t)ext)
404
                tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
405
        } else {
406
            tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
407
        }
408
        add = tmp;
409
    }
410
    return add;
411
}
412

    
413
/* Update the CPU env CC_OP state.  */
414
static inline void gen_flush_cc_op(DisasContext *s)
415
{
416
    if (s->cc_op != CC_OP_DYNAMIC)
417
        tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
418
}
419

    
420
/* Evaluate all the CC flags.  */
421
static inline void gen_flush_flags(DisasContext *s)
422
{
423
    if (s->cc_op == CC_OP_FLAGS)
424
        return;
425
    gen_flush_cc_op(s);
426
    gen_helper_flush_flags(cpu_env, QREG_CC_OP);
427
    s->cc_op = CC_OP_FLAGS;
428
}
429

    
430
static void gen_logic_cc(DisasContext *s, TCGv val)
431
{
432
    tcg_gen_mov_i32(QREG_CC_DEST, val);
433
    s->cc_op = CC_OP_LOGIC;
434
}
435

    
436
static void gen_update_cc_add(TCGv dest, TCGv src)
437
{
438
    tcg_gen_mov_i32(QREG_CC_DEST, dest);
439
    tcg_gen_mov_i32(QREG_CC_SRC, src);
440
}
441

    
442
static inline int opsize_bytes(int opsize)
443
{
444
    switch (opsize) {
445
    case OS_BYTE: return 1;
446
    case OS_WORD: return 2;
447
    case OS_LONG: return 4;
448
    case OS_SINGLE: return 4;
449
    case OS_DOUBLE: return 8;
450
    default:
451
        qemu_assert(0, "bad operand size");
452
    }
453
}
454

    
455
/* Assign value to a register.  If the width is less than the register width
456
   only the low part of the register is set.  */
457
static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
458
{
459
    TCGv tmp;
460
    switch (opsize) {
461
    case OS_BYTE:
462
        tcg_gen_andi_i32(reg, reg, 0xffffff00);
463
        tmp = gen_new_qreg(QMODE_I32);
464
        tcg_gen_ext8u_i32(tmp, val);
465
        tcg_gen_or_i32(reg, reg, tmp);
466
        break;
467
    case OS_WORD:
468
        tcg_gen_andi_i32(reg, reg, 0xffff0000);
469
        tmp = gen_new_qreg(QMODE_I32);
470
        tcg_gen_ext16u_i32(tmp, val);
471
        tcg_gen_or_i32(reg, reg, tmp);
472
        break;
473
    case OS_LONG:
474
        tcg_gen_mov_i32(reg, val);
475
        break;
476
    case OS_SINGLE:
477
        gen_helper_pack_32_f32(reg, val);
478
        break;
479
    default:
480
        qemu_assert(0, "Bad operand size");
481
        break;
482
    }
483
}
484

    
485
/* Sign or zero extend a value.  */
486
static inline TCGv gen_extend(TCGv val, int opsize, int sign)
487
{
488
    TCGv tmp;
489

    
490
    switch (opsize) {
491
    case OS_BYTE:
492
        tmp = gen_new_qreg(QMODE_I32);
493
        if (sign)
494
            tcg_gen_ext8s_i32(tmp, val);
495
        else
496
            tcg_gen_ext8u_i32(tmp, val);
497
        break;
498
    case OS_WORD:
499
        tmp = gen_new_qreg(QMODE_I32);
500
        if (sign)
501
            tcg_gen_ext16s_i32(tmp, val);
502
        else
503
            tcg_gen_ext16u_i32(tmp, val);
504
        break;
505
    case OS_LONG:
506
        tmp = val;
507
        break;
508
    case OS_SINGLE:
509
        tmp = gen_new_qreg(QMODE_F32);
510
        gen_helper_pack_f32_32(tmp, val);
511
        break;
512
    default:
513
        qemu_assert(0, "Bad operand size");
514
    }
515
    return tmp;
516
}
517

    
518
/* Generate code for an "effective address".  Does not adjust the base
519
   register for autoincrememnt addressing modes.  */
520
static TCGv gen_lea(DisasContext *s, uint16_t insn, int opsize)
521
{
522
    TCGv reg;
523
    TCGv tmp;
524
    uint16_t ext;
525
    uint32_t offset;
526

    
527
    switch ((insn >> 3) & 7) {
528
    case 0: /* Data register direct.  */
529
    case 1: /* Address register direct.  */
530
        return NULL_QREG;
531
    case 2: /* Indirect register */
532
    case 3: /* Indirect postincrement.  */
533
        return AREG(insn, 0);
534
    case 4: /* Indirect predecrememnt.  */
535
        reg = AREG(insn, 0);
536
        tmp = gen_new_qreg(QMODE_I32);
537
        tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
538
        return tmp;
539
    case 5: /* Indirect displacement.  */
540
        reg = AREG(insn, 0);
541
        tmp = gen_new_qreg(QMODE_I32);
542
        ext = lduw_code(s->pc);
543
        s->pc += 2;
544
        tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
545
        return tmp;
546
    case 6: /* Indirect index + displacement.  */
547
        reg = AREG(insn, 0);
548
        return gen_lea_indexed(s, opsize, reg);
549
    case 7: /* Other */
550
        switch (insn & 7) {
551
        case 0: /* Absolute short.  */
552
            offset = ldsw_code(s->pc);
553
            s->pc += 2;
554
            return gen_im32(offset);
555
        case 1: /* Absolute long.  */
556
            offset = read_im32(s);
557
            return gen_im32(offset);
558
        case 2: /* pc displacement  */
559
            tmp = gen_new_qreg(QMODE_I32);
560
            offset = s->pc;
561
            offset += ldsw_code(s->pc);
562
            s->pc += 2;
563
            return gen_im32(offset);
564
        case 3: /* pc index+displacement.  */
565
            return gen_lea_indexed(s, opsize, NULL_QREG);
566
        case 4: /* Immediate.  */
567
        default:
568
            return NULL_QREG;
569
        }
570
    }
571
    /* Should never happen.  */
572
    return NULL_QREG;
573
}
574

    
575
/* Helper function for gen_ea. Reuse the computed address between the
576
   for read/write operands.  */
577
static inline TCGv gen_ea_once(DisasContext *s, uint16_t insn, int opsize,
578
                              TCGv val, TCGv *addrp, ea_what what)
579
{
580
    TCGv tmp;
581

    
582
    if (addrp && what == EA_STORE) {
583
        tmp = *addrp;
584
    } else {
585
        tmp = gen_lea(s, insn, opsize);
586
        if (IS_NULL_QREG(tmp))
587
            return tmp;
588
        if (addrp)
589
            *addrp = tmp;
590
    }
591
    return gen_ldst(s, opsize, tmp, val, what);
592
}
593

    
594
/* Generate code to load/store a value ito/from an EA.  If VAL > 0 this is
595
   a write otherwise it is a read (0 == sign extend, -1 == zero extend).
596
   ADDRP is non-null for readwrite operands.  */
597
static TCGv gen_ea(DisasContext *s, uint16_t insn, int opsize, TCGv val,
598
                   TCGv *addrp, ea_what what)
599
{
600
    TCGv reg;
601
    TCGv result;
602
    uint32_t offset;
603

    
604
    switch ((insn >> 3) & 7) {
605
    case 0: /* Data register direct.  */
606
        reg = DREG(insn, 0);
607
        if (what == EA_STORE) {
608
            gen_partset_reg(opsize, reg, val);
609
            return store_dummy;
610
        } else {
611
            return gen_extend(reg, opsize, what == EA_LOADS);
612
        }
613
    case 1: /* Address register direct.  */
614
        reg = AREG(insn, 0);
615
        if (what == EA_STORE) {
616
            tcg_gen_mov_i32(reg, val);
617
            return store_dummy;
618
        } else {
619
            return gen_extend(reg, opsize, what == EA_LOADS);
620
        }
621
    case 2: /* Indirect register */
622
        reg = AREG(insn, 0);
623
        return gen_ldst(s, opsize, reg, val, what);
624
    case 3: /* Indirect postincrement.  */
625
        reg = AREG(insn, 0);
626
        result = gen_ldst(s, opsize, reg, val, what);
627
        /* ??? This is not exception safe.  The instruction may still
628
           fault after this point.  */
629
        if (what == EA_STORE || !addrp)
630
            tcg_gen_addi_i32(reg, reg, opsize_bytes(opsize));
631
        return result;
632
    case 4: /* Indirect predecrememnt.  */
633
        {
634
            TCGv tmp;
635
            if (addrp && what == EA_STORE) {
636
                tmp = *addrp;
637
            } else {
638
                tmp = gen_lea(s, insn, opsize);
639
                if (IS_NULL_QREG(tmp))
640
                    return tmp;
641
                if (addrp)
642
                    *addrp = tmp;
643
            }
644
            result = gen_ldst(s, opsize, tmp, val, what);
645
            /* ??? This is not exception safe.  The instruction may still
646
               fault after this point.  */
647
            if (what == EA_STORE || !addrp) {
648
                reg = AREG(insn, 0);
649
                tcg_gen_mov_i32(reg, tmp);
650
            }
651
        }
652
        return result;
653
    case 5: /* Indirect displacement.  */
654
    case 6: /* Indirect index + displacement.  */
655
        return gen_ea_once(s, insn, opsize, val, addrp, what);
656
    case 7: /* Other */
657
        switch (insn & 7) {
658
        case 0: /* Absolute short.  */
659
        case 1: /* Absolute long.  */
660
        case 2: /* pc displacement  */
661
        case 3: /* pc index+displacement.  */
662
            return gen_ea_once(s, insn, opsize, val, addrp, what);
663
        case 4: /* Immediate.  */
664
            /* Sign extend values for consistency.  */
665
            switch (opsize) {
666
            case OS_BYTE:
667
                if (what == EA_LOADS)
668
                    offset = ldsb_code(s->pc + 1);
669
                else
670
                    offset = ldub_code(s->pc + 1);
671
                s->pc += 2;
672
                break;
673
            case OS_WORD:
674
                if (what == EA_LOADS)
675
                    offset = ldsw_code(s->pc);
676
                else
677
                    offset = lduw_code(s->pc);
678
                s->pc += 2;
679
                break;
680
            case OS_LONG:
681
                offset = read_im32(s);
682
                break;
683
            default:
684
                qemu_assert(0, "Bad immediate operand");
685
            }
686
            return tcg_const_i32(offset);
687
        default:
688
            return NULL_QREG;
689
        }
690
    }
691
    /* Should never happen.  */
692
    return NULL_QREG;
693
}
694

    
695
/* This generates a conditional branch, clobbering all temporaries.  */
696
static void gen_jmpcc(DisasContext *s, int cond, int l1)
697
{
698
    TCGv tmp;
699

    
700
    /* TODO: Optimize compare/branch pairs rather than always flushing
701
       flag state to CC_OP_FLAGS.  */
702
    gen_flush_flags(s);
703
    switch (cond) {
704
    case 0: /* T */
705
        tcg_gen_br(l1);
706
        break;
707
    case 1: /* F */
708
        break;
709
    case 2: /* HI (!C && !Z) */
710
        tmp = gen_new_qreg(QMODE_I32);
711
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
712
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
713
        break;
714
    case 3: /* LS (C || Z) */
715
        tmp = gen_new_qreg(QMODE_I32);
716
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
717
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
718
        break;
719
    case 4: /* CC (!C) */
720
        tmp = gen_new_qreg(QMODE_I32);
721
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
722
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
723
        break;
724
    case 5: /* CS (C) */
725
        tmp = gen_new_qreg(QMODE_I32);
726
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
727
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
728
        break;
729
    case 6: /* NE (!Z) */
730
        tmp = gen_new_qreg(QMODE_I32);
731
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
732
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
733
        break;
734
    case 7: /* EQ (Z) */
735
        tmp = gen_new_qreg(QMODE_I32);
736
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
737
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
738
        break;
739
    case 8: /* VC (!V) */
740
        tmp = gen_new_qreg(QMODE_I32);
741
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
742
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
743
        break;
744
    case 9: /* VS (V) */
745
        tmp = gen_new_qreg(QMODE_I32);
746
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
747
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
748
        break;
749
    case 10: /* PL (!N) */
750
        tmp = gen_new_qreg(QMODE_I32);
751
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
752
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
753
        break;
754
    case 11: /* MI (N) */
755
        tmp = gen_new_qreg(QMODE_I32);
756
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
757
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
758
        break;
759
    case 12: /* GE (!(N ^ V)) */
760
        tmp = gen_new_qreg(QMODE_I32);
761
        assert(CCF_V == (CCF_N >> 2));
762
        tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
763
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
764
        tcg_gen_andi_i32(tmp, tmp, CCF_V);
765
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
766
        break;
767
    case 13: /* LT (N ^ V) */
768
        tmp = gen_new_qreg(QMODE_I32);
769
        assert(CCF_V == (CCF_N >> 2));
770
        tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
771
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
772
        tcg_gen_andi_i32(tmp, tmp, CCF_V);
773
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
774
        break;
775
    case 14: /* GT (!(Z || (N ^ V))) */
776
        tmp = gen_new_qreg(QMODE_I32);
777
        assert(CCF_V == (CCF_N >> 2));
778
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
779
        tcg_gen_shri_i32(tmp, tmp, 2);
780
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
781
        tcg_gen_andi_i32(tmp, tmp, CCF_V | CCF_Z);
782
        tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
783
        break;
784
    case 15: /* LE (Z || (N ^ V)) */
785
        tmp = gen_new_qreg(QMODE_I32);
786
        assert(CCF_V == (CCF_N >> 2));
787
        tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
788
        tcg_gen_shri_i32(tmp, tmp, 2);
789
        tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
790
        tcg_gen_andi_i32(tmp, tmp, CCF_V | CCF_Z);
791
        tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
792
        break;
793
    default:
794
        /* Should ever happen.  */
795
        abort();
796
    }
797
}
798

    
799
DISAS_INSN(scc)
800
{
801
    int l1;
802
    int cond;
803
    TCGv reg;
804

    
805
    l1 = gen_new_label();
806
    cond = (insn >> 8) & 0xf;
807
    reg = DREG(insn, 0);
808
    tcg_gen_andi_i32(reg, reg, 0xffffff00);
809
    /* This is safe because we modify the reg directly, with no other values
810
       live.  */
811
    gen_jmpcc(s, cond ^ 1, l1);
812
    tcg_gen_ori_i32(reg, reg, 0xff);
813
    gen_set_label(l1);
814
}
815

    
816
/* Force a TB lookup after an instruction that changes the CPU state.  */
817
static void gen_lookup_tb(DisasContext *s)
818
{
819
    gen_flush_cc_op(s);
820
    tcg_gen_movi_i32(QREG_PC, s->pc);
821
    s->is_jmp = DISAS_UPDATE;
822
}
823

    
824
/* Generate a jump to an immediate address.  */
825
static void gen_jmp_im(DisasContext *s, uint32_t dest)
826
{
827
    gen_flush_cc_op(s);
828
    tcg_gen_movi_i32(QREG_PC, dest);
829
    s->is_jmp = DISAS_JUMP;
830
}
831

    
832
/* Generate a jump to the address in qreg DEST.  */
833
static void gen_jmp(DisasContext *s, TCGv dest)
834
{
835
    gen_flush_cc_op(s);
836
    tcg_gen_mov_i32(QREG_PC, dest);
837
    s->is_jmp = DISAS_JUMP;
838
}
839

    
840
static void gen_exception(DisasContext *s, uint32_t where, int nr)
841
{
842
    gen_flush_cc_op(s);
843
    gen_jmp_im(s, where);
844
    gen_helper_raise_exception(tcg_const_i32(nr));
845
}
846

    
847
static inline void gen_addr_fault(DisasContext *s)
848
{
849
    gen_exception(s, s->insn_pc, EXCP_ADDRESS);
850
}
851

    
852
#define SRC_EA(result, opsize, op_sign, addrp) do { \
853
    result = gen_ea(s, insn, opsize, NULL_QREG, addrp, op_sign ? EA_LOADS : EA_LOADU); \
854
    if (IS_NULL_QREG(result)) { \
855
        gen_addr_fault(s); \
856
        return; \
857
    } \
858
    } while (0)
859

    
860
#define DEST_EA(insn, opsize, val, addrp) do { \
861
    TCGv ea_result = gen_ea(s, insn, opsize, val, addrp, EA_STORE); \
862
    if (IS_NULL_QREG(ea_result)) { \
863
        gen_addr_fault(s); \
864
        return; \
865
    } \
866
    } while (0)
867

    
868
/* Generate a jump to an immediate address.  */
869
static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
870
{
871
    TranslationBlock *tb;
872

    
873
    tb = s->tb;
874
    if (__builtin_expect (s->singlestep_enabled, 0)) {
875
        gen_exception(s, dest, EXCP_DEBUG);
876
    } else if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
877
               (s->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
878
        tcg_gen_goto_tb(n);
879
        tcg_gen_movi_i32(QREG_PC, dest);
880
        tcg_gen_exit_tb((long)tb + n);
881
    } else {
882
        gen_jmp_im(s, dest);
883
        tcg_gen_exit_tb(0);
884
    }
885
    s->is_jmp = DISAS_TB_JUMP;
886
}
887

    
888
DISAS_INSN(undef_mac)
889
{
890
    gen_exception(s, s->pc - 2, EXCP_LINEA);
891
}
892

    
893
DISAS_INSN(undef_fpu)
894
{
895
    gen_exception(s, s->pc - 2, EXCP_LINEF);
896
}
897

    
898
DISAS_INSN(undef)
899
{
900
    gen_exception(s, s->pc - 2, EXCP_UNSUPPORTED);
901
    cpu_abort(cpu_single_env, "Illegal instruction: %04x @ %08x",
902
              insn, s->pc - 2);
903
}
904

    
905
DISAS_INSN(mulw)
906
{
907
    TCGv reg;
908
    TCGv tmp;
909
    TCGv src;
910
    int sign;
911

    
912
    sign = (insn & 0x100) != 0;
913
    reg = DREG(insn, 9);
914
    tmp = gen_new_qreg(QMODE_I32);
915
    if (sign)
916
        tcg_gen_ext16s_i32(tmp, reg);
917
    else
918
        tcg_gen_ext16u_i32(tmp, reg);
919
    SRC_EA(src, OS_WORD, sign, NULL);
920
    tcg_gen_mul_i32(tmp, tmp, src);
921
    tcg_gen_mov_i32(reg, tmp);
922
    /* Unlike m68k, coldfire always clears the overflow bit.  */
923
    gen_logic_cc(s, tmp);
924
}
925

    
926
DISAS_INSN(divw)
927
{
928
    TCGv reg;
929
    TCGv tmp;
930
    TCGv src;
931
    int sign;
932

    
933
    sign = (insn & 0x100) != 0;
934
    reg = DREG(insn, 9);
935
    if (sign) {
936
        tcg_gen_ext16s_i32(QREG_DIV1, reg);
937
    } else {
938
        tcg_gen_ext16u_i32(QREG_DIV1, reg);
939
    }
940
    SRC_EA(src, OS_WORD, sign, NULL);
941
    tcg_gen_mov_i32(QREG_DIV2, src);
942
    if (sign) {
943
        gen_helper_divs(cpu_env, tcg_const_i32(1));
944
    } else {
945
        gen_helper_divu(cpu_env, tcg_const_i32(1));
946
    }
947

    
948
    tmp = gen_new_qreg(QMODE_I32);
949
    src = gen_new_qreg(QMODE_I32);
950
    tcg_gen_ext16u_i32(tmp, QREG_DIV1);
951
    tcg_gen_shli_i32(src, QREG_DIV2, 16);
952
    tcg_gen_or_i32(reg, tmp, src);
953
    s->cc_op = CC_OP_FLAGS;
954
}
955

    
956
DISAS_INSN(divl)
957
{
958
    TCGv num;
959
    TCGv den;
960
    TCGv reg;
961
    uint16_t ext;
962

    
963
    ext = lduw_code(s->pc);
964
    s->pc += 2;
965
    if (ext & 0x87f8) {
966
        gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
967
        return;
968
    }
969
    num = DREG(ext, 12);
970
    reg = DREG(ext, 0);
971
    tcg_gen_mov_i32(QREG_DIV1, num);
972
    SRC_EA(den, OS_LONG, 0, NULL);
973
    tcg_gen_mov_i32(QREG_DIV2, den);
974
    if (ext & 0x0800) {
975
        gen_helper_divs(cpu_env, tcg_const_i32(0));
976
    } else {
977
        gen_helper_divu(cpu_env, tcg_const_i32(0));
978
    }
979
    if ((ext & 7) == ((ext >> 12) & 7)) {
980
        /* div */
981
        tcg_gen_mov_i32 (reg, QREG_DIV1);
982
    } else {
983
        /* rem */
984
        tcg_gen_mov_i32 (reg, QREG_DIV2);
985
    }
986
    s->cc_op = CC_OP_FLAGS;
987
}
988

    
989
DISAS_INSN(addsub)
990
{
991
    TCGv reg;
992
    TCGv dest;
993
    TCGv src;
994
    TCGv tmp;
995
    TCGv addr;
996
    int add;
997

    
998
    add = (insn & 0x4000) != 0;
999
    reg = DREG(insn, 9);
1000
    dest = gen_new_qreg(QMODE_I32);
1001
    if (insn & 0x100) {
1002
        SRC_EA(tmp, OS_LONG, 0, &addr);
1003
        src = reg;
1004
    } else {
1005
        tmp = reg;
1006
        SRC_EA(src, OS_LONG, 0, NULL);
1007
    }
1008
    if (add) {
1009
        tcg_gen_add_i32(dest, tmp, src);
1010
        gen_helper_xflag_lt(QREG_CC_X, dest, src);
1011
        s->cc_op = CC_OP_ADD;
1012
    } else {
1013
        gen_helper_xflag_lt(QREG_CC_X, tmp, src);
1014
        tcg_gen_sub_i32(dest, tmp, src);
1015
        s->cc_op = CC_OP_SUB;
1016
    }
1017
    gen_update_cc_add(dest, src);
1018
    if (insn & 0x100) {
1019
        DEST_EA(insn, OS_LONG, dest, &addr);
1020
    } else {
1021
        tcg_gen_mov_i32(reg, dest);
1022
    }
1023
}
1024

    
1025

    
1026
/* Reverse the order of the bits in REG.  */
1027
DISAS_INSN(bitrev)
1028
{
1029
    TCGv reg;
1030
    reg = DREG(insn, 0);
1031
    gen_helper_bitrev(reg, reg);
1032
}
1033

    
1034
DISAS_INSN(bitop_reg)
1035
{
1036
    int opsize;
1037
    int op;
1038
    TCGv src1;
1039
    TCGv src2;
1040
    TCGv tmp;
1041
    TCGv addr;
1042
    TCGv dest;
1043

    
1044
    if ((insn & 0x38) != 0)
1045
        opsize = OS_BYTE;
1046
    else
1047
        opsize = OS_LONG;
1048
    op = (insn >> 6) & 3;
1049
    SRC_EA(src1, opsize, 0, op ? &addr: NULL);
1050
    src2 = DREG(insn, 9);
1051
    dest = gen_new_qreg(QMODE_I32);
1052

    
1053
    gen_flush_flags(s);
1054
    tmp = gen_new_qreg(QMODE_I32);
1055
    if (opsize == OS_BYTE)
1056
        tcg_gen_andi_i32(tmp, src2, 7);
1057
    else
1058
        tcg_gen_andi_i32(tmp, src2, 31);
1059
    src2 = tmp;
1060
    tmp = gen_new_qreg(QMODE_I32);
1061
    tcg_gen_shr_i32(tmp, src1, src2);
1062
    tcg_gen_andi_i32(tmp, tmp, 1);
1063
    tcg_gen_shli_i32(tmp, tmp, 2);
1064
    /* Clear CCF_Z if bit set.  */
1065
    tcg_gen_ori_i32(QREG_CC_DEST, QREG_CC_DEST, CCF_Z);
1066
    tcg_gen_xor_i32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1067

    
1068
    tcg_gen_shl_i32(tmp, tcg_const_i32(1), src2);
1069
    switch (op) {
1070
    case 1: /* bchg */
1071
        tcg_gen_xor_i32(dest, src1, tmp);
1072
        break;
1073
    case 2: /* bclr */
1074
        tcg_gen_not_i32(tmp, tmp);
1075
        tcg_gen_and_i32(dest, src1, tmp);
1076
        break;
1077
    case 3: /* bset */
1078
        tcg_gen_or_i32(dest, src1, tmp);
1079
        break;
1080
    default: /* btst */
1081
        break;
1082
    }
1083
    if (op)
1084
        DEST_EA(insn, opsize, dest, &addr);
1085
}
1086

    
1087
DISAS_INSN(sats)
1088
{
1089
    TCGv reg;
1090
    reg = DREG(insn, 0);
1091
    gen_flush_flags(s);
1092
    gen_helper_sats(reg, reg, QREG_CC_DEST);
1093
    gen_logic_cc(s, reg);
1094
}
1095

    
1096
static void gen_push(DisasContext *s, TCGv val)
1097
{
1098
    TCGv tmp;
1099

    
1100
    tmp = gen_new_qreg(QMODE_I32);
1101
    tcg_gen_subi_i32(tmp, QREG_SP, 4);
1102
    gen_store(s, OS_LONG, tmp, val);
1103
    tcg_gen_mov_i32(QREG_SP, tmp);
1104
}
1105

    
1106
DISAS_INSN(movem)
1107
{
1108
    TCGv addr;
1109
    int i;
1110
    uint16_t mask;
1111
    TCGv reg;
1112
    TCGv tmp;
1113
    int is_load;
1114

    
1115
    mask = lduw_code(s->pc);
1116
    s->pc += 2;
1117
    tmp = gen_lea(s, insn, OS_LONG);
1118
    if (IS_NULL_QREG(tmp)) {
1119
        gen_addr_fault(s);
1120
        return;
1121
    }
1122
    addr = gen_new_qreg(QMODE_I32);
1123
    tcg_gen_mov_i32(addr, tmp);
1124
    is_load = ((insn & 0x0400) != 0);
1125
    for (i = 0; i < 16; i++, mask >>= 1) {
1126
        if (mask & 1) {
1127
            if (i < 8)
1128
                reg = DREG(i, 0);
1129
            else
1130
                reg = AREG(i, 0);
1131
            if (is_load) {
1132
                tmp = gen_load(s, OS_LONG, addr, 0);
1133
                tcg_gen_mov_i32(reg, tmp);
1134
            } else {
1135
                gen_store(s, OS_LONG, addr, reg);
1136
            }
1137
            if (mask != 1)
1138
                tcg_gen_addi_i32(addr, addr, 4);
1139
        }
1140
    }
1141
}
1142

    
1143
DISAS_INSN(bitop_im)
1144
{
1145
    int opsize;
1146
    int op;
1147
    TCGv src1;
1148
    uint32_t mask;
1149
    int bitnum;
1150
    TCGv tmp;
1151
    TCGv addr;
1152

    
1153
    if ((insn & 0x38) != 0)
1154
        opsize = OS_BYTE;
1155
    else
1156
        opsize = OS_LONG;
1157
    op = (insn >> 6) & 3;
1158

    
1159
    bitnum = lduw_code(s->pc);
1160
    s->pc += 2;
1161
    if (bitnum & 0xff00) {
1162
        disas_undef(s, insn);
1163
        return;
1164
    }
1165

    
1166
    SRC_EA(src1, opsize, 0, op ? &addr: NULL);
1167

    
1168
    gen_flush_flags(s);
1169
    if (opsize == OS_BYTE)
1170
        bitnum &= 7;
1171
    else
1172
        bitnum &= 31;
1173
    mask = 1 << bitnum;
1174

    
1175
    tmp = gen_new_qreg(QMODE_I32);
1176
    assert (CCF_Z == (1 << 2));
1177
    if (bitnum > 2)
1178
        tcg_gen_shri_i32(tmp, src1, bitnum - 2);
1179
    else if (bitnum < 2)
1180
        tcg_gen_shli_i32(tmp, src1, 2 - bitnum);
1181
    else
1182
        tcg_gen_mov_i32(tmp, src1);
1183
    tcg_gen_andi_i32(tmp, tmp, CCF_Z);
1184
    /* Clear CCF_Z if bit set.  */
1185
    tcg_gen_ori_i32(QREG_CC_DEST, QREG_CC_DEST, CCF_Z);
1186
    tcg_gen_xor_i32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1187
    if (op) {
1188
        switch (op) {
1189
        case 1: /* bchg */
1190
            tcg_gen_xori_i32(tmp, src1, mask);
1191
            break;
1192
        case 2: /* bclr */
1193
            tcg_gen_andi_i32(tmp, src1, ~mask);
1194
            break;
1195
        case 3: /* bset */
1196
            tcg_gen_ori_i32(tmp, src1, mask);
1197
            break;
1198
        default: /* btst */
1199
            break;
1200
        }
1201
        DEST_EA(insn, opsize, tmp, &addr);
1202
    }
1203
}
1204

    
1205
DISAS_INSN(arith_im)
1206
{
1207
    int op;
1208
    uint32_t im;
1209
    TCGv src1;
1210
    TCGv dest;
1211
    TCGv addr;
1212

    
1213
    op = (insn >> 9) & 7;
1214
    SRC_EA(src1, OS_LONG, 0, (op == 6) ? NULL : &addr);
1215
    im = read_im32(s);
1216
    dest = gen_new_qreg(QMODE_I32);
1217
    switch (op) {
1218
    case 0: /* ori */
1219
        tcg_gen_ori_i32(dest, src1, im);
1220
        gen_logic_cc(s, dest);
1221
        break;
1222
    case 1: /* andi */
1223
        tcg_gen_andi_i32(dest, src1, im);
1224
        gen_logic_cc(s, dest);
1225
        break;
1226
    case 2: /* subi */
1227
        tcg_gen_mov_i32(dest, src1);
1228
        gen_helper_xflag_lt(QREG_CC_X, dest, gen_im32(im));
1229
        tcg_gen_subi_i32(dest, dest, im);
1230
        gen_update_cc_add(dest, gen_im32(im));
1231
        s->cc_op = CC_OP_SUB;
1232
        break;
1233
    case 3: /* addi */
1234
        tcg_gen_mov_i32(dest, src1);
1235
        tcg_gen_addi_i32(dest, dest, im);
1236
        gen_update_cc_add(dest, gen_im32(im));
1237
        gen_helper_xflag_lt(QREG_CC_X, dest, gen_im32(im));
1238
        s->cc_op = CC_OP_ADD;
1239
        break;
1240
    case 5: /* eori */
1241
        tcg_gen_xori_i32(dest, src1, im);
1242
        gen_logic_cc(s, dest);
1243
        break;
1244
    case 6: /* cmpi */
1245
        tcg_gen_mov_i32(dest, src1);
1246
        tcg_gen_subi_i32(dest, dest, im);
1247
        gen_update_cc_add(dest, gen_im32(im));
1248
        s->cc_op = CC_OP_SUB;
1249
        break;
1250
    default:
1251
        abort();
1252
    }
1253
    if (op != 6) {
1254
        DEST_EA(insn, OS_LONG, dest, &addr);
1255
    }
1256
}
1257

    
1258
DISAS_INSN(byterev)
1259
{
1260
    TCGv reg;
1261

    
1262
    reg = DREG(insn, 0);
1263
    tcg_gen_bswap_i32(reg, reg);
1264
}
1265

    
1266
DISAS_INSN(move)
1267
{
1268
    TCGv src;
1269
    TCGv dest;
1270
    int op;
1271
    int opsize;
1272

    
1273
    switch (insn >> 12) {
1274
    case 1: /* move.b */
1275
        opsize = OS_BYTE;
1276
        break;
1277
    case 2: /* move.l */
1278
        opsize = OS_LONG;
1279
        break;
1280
    case 3: /* move.w */
1281
        opsize = OS_WORD;
1282
        break;
1283
    default:
1284
        abort();
1285
    }
1286
    SRC_EA(src, opsize, 1, NULL);
1287
    op = (insn >> 6) & 7;
1288
    if (op == 1) {
1289
        /* movea */
1290
        /* The value will already have been sign extended.  */
1291
        dest = AREG(insn, 9);
1292
        tcg_gen_mov_i32(dest, src);
1293
    } else {
1294
        /* normal move */
1295
        uint16_t dest_ea;
1296
        dest_ea = ((insn >> 9) & 7) | (op << 3);
1297
        DEST_EA(dest_ea, opsize, src, NULL);
1298
        /* This will be correct because loads sign extend.  */
1299
        gen_logic_cc(s, src);
1300
    }
1301
}
1302

    
1303
DISAS_INSN(negx)
1304
{
1305
    TCGv reg;
1306

    
1307
    gen_flush_flags(s);
1308
    reg = DREG(insn, 0);
1309
    gen_helper_subx_cc(reg, cpu_env, tcg_const_i32(0), reg);
1310
}
1311

    
1312
DISAS_INSN(lea)
1313
{
1314
    TCGv reg;
1315
    TCGv tmp;
1316

    
1317
    reg = AREG(insn, 9);
1318
    tmp = gen_lea(s, insn, OS_LONG);
1319
    if (IS_NULL_QREG(tmp)) {
1320
        gen_addr_fault(s);
1321
        return;
1322
    }
1323
    tcg_gen_mov_i32(reg, tmp);
1324
}
1325

    
1326
DISAS_INSN(clr)
1327
{
1328
    int opsize;
1329

    
1330
    switch ((insn >> 6) & 3) {
1331
    case 0: /* clr.b */
1332
        opsize = OS_BYTE;
1333
        break;
1334
    case 1: /* clr.w */
1335
        opsize = OS_WORD;
1336
        break;
1337
    case 2: /* clr.l */
1338
        opsize = OS_LONG;
1339
        break;
1340
    default:
1341
        abort();
1342
    }
1343
    DEST_EA(insn, opsize, gen_im32(0), NULL);
1344
    gen_logic_cc(s, gen_im32(0));
1345
}
1346

    
1347
static TCGv gen_get_ccr(DisasContext *s)
1348
{
1349
    TCGv dest;
1350

    
1351
    gen_flush_flags(s);
1352
    dest = gen_new_qreg(QMODE_I32);
1353
    tcg_gen_shli_i32(dest, QREG_CC_X, 4);
1354
    tcg_gen_or_i32(dest, dest, QREG_CC_DEST);
1355
    return dest;
1356
}
1357

    
1358
DISAS_INSN(move_from_ccr)
1359
{
1360
    TCGv reg;
1361
    TCGv ccr;
1362

    
1363
    ccr = gen_get_ccr(s);
1364
    reg = DREG(insn, 0);
1365
    gen_partset_reg(OS_WORD, reg, ccr);
1366
}
1367

    
1368
DISAS_INSN(neg)
1369
{
1370
    TCGv reg;
1371
    TCGv src1;
1372

    
1373
    reg = DREG(insn, 0);
1374
    src1 = gen_new_qreg(QMODE_I32);
1375
    tcg_gen_mov_i32(src1, reg);
1376
    tcg_gen_neg_i32(reg, src1);
1377
    s->cc_op = CC_OP_SUB;
1378
    gen_update_cc_add(reg, src1);
1379
    gen_helper_xflag_lt(QREG_CC_X, tcg_const_i32(0), src1);
1380
    s->cc_op = CC_OP_SUB;
1381
}
1382

    
1383
static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
1384
{
1385
    tcg_gen_movi_i32(QREG_CC_DEST, val & 0xf);
1386
    tcg_gen_movi_i32(QREG_CC_X, (val & 0x10) >> 4);
1387
    if (!ccr_only) {
1388
        gen_helper_set_sr(cpu_env, tcg_const_i32(val & 0xff00));
1389
    }
1390
}
1391

    
1392
static void gen_set_sr(DisasContext *s, uint16_t insn, int ccr_only)
1393
{
1394
    TCGv tmp;
1395
    TCGv reg;
1396

    
1397
    s->cc_op = CC_OP_FLAGS;
1398
    if ((insn & 0x38) == 0)
1399
      {
1400
        tmp = gen_new_qreg(QMODE_I32);
1401
        reg = DREG(insn, 0);
1402
        tcg_gen_andi_i32(QREG_CC_DEST, reg, 0xf);
1403
        tcg_gen_shri_i32(tmp, reg, 4);
1404
        tcg_gen_andi_i32(QREG_CC_X, tmp, 1);
1405
        if (!ccr_only) {
1406
            gen_helper_set_sr(cpu_env, reg);
1407
        }
1408
      }
1409
    else if ((insn & 0x3f) == 0x3c)
1410
      {
1411
        uint16_t val;
1412
        val = lduw_code(s->pc);
1413
        s->pc += 2;
1414
        gen_set_sr_im(s, val, ccr_only);
1415
      }
1416
    else
1417
        disas_undef(s, insn);
1418
}
1419

    
1420
DISAS_INSN(move_to_ccr)
1421
{
1422
    gen_set_sr(s, insn, 1);
1423
}
1424

    
1425
DISAS_INSN(not)
1426
{
1427
    TCGv reg;
1428

    
1429
    reg = DREG(insn, 0);
1430
    tcg_gen_not_i32(reg, reg);
1431
    gen_logic_cc(s, reg);
1432
}
1433

    
1434
DISAS_INSN(swap)
1435
{
1436
    TCGv src1;
1437
    TCGv src2;
1438
    TCGv reg;
1439

    
1440
    src1 = gen_new_qreg(QMODE_I32);
1441
    src2 = gen_new_qreg(QMODE_I32);
1442
    reg = DREG(insn, 0);
1443
    tcg_gen_shli_i32(src1, reg, 16);
1444
    tcg_gen_shri_i32(src2, reg, 16);
1445
    tcg_gen_or_i32(reg, src1, src2);
1446
    gen_logic_cc(s, reg);
1447
}
1448

    
1449
DISAS_INSN(pea)
1450
{
1451
    TCGv tmp;
1452

    
1453
    tmp = gen_lea(s, insn, OS_LONG);
1454
    if (IS_NULL_QREG(tmp)) {
1455
        gen_addr_fault(s);
1456
        return;
1457
    }
1458
    gen_push(s, tmp);
1459
}
1460

    
1461
DISAS_INSN(ext)
1462
{
1463
    int op;
1464
    TCGv reg;
1465
    TCGv tmp;
1466

    
1467
    reg = DREG(insn, 0);
1468
    op = (insn >> 6) & 7;
1469
    tmp = gen_new_qreg(QMODE_I32);
1470
    if (op == 3)
1471
        tcg_gen_ext16s_i32(tmp, reg);
1472
    else
1473
        tcg_gen_ext8s_i32(tmp, reg);
1474
    if (op == 2)
1475
        gen_partset_reg(OS_WORD, reg, tmp);
1476
    else
1477
        tcg_gen_mov_i32(reg, tmp);
1478
    gen_logic_cc(s, tmp);
1479
}
1480

    
1481
DISAS_INSN(tst)
1482
{
1483
    int opsize;
1484
    TCGv tmp;
1485

    
1486
    switch ((insn >> 6) & 3) {
1487
    case 0: /* tst.b */
1488
        opsize = OS_BYTE;
1489
        break;
1490
    case 1: /* tst.w */
1491
        opsize = OS_WORD;
1492
        break;
1493
    case 2: /* tst.l */
1494
        opsize = OS_LONG;
1495
        break;
1496
    default:
1497
        abort();
1498
    }
1499
    SRC_EA(tmp, opsize, 1, NULL);
1500
    gen_logic_cc(s, tmp);
1501
}
1502

    
1503
DISAS_INSN(pulse)
1504
{
1505
  /* Implemented as a NOP.  */
1506
}
1507

    
1508
DISAS_INSN(illegal)
1509
{
1510
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
1511
}
1512

    
1513
/* ??? This should be atomic.  */
1514
DISAS_INSN(tas)
1515
{
1516
    TCGv dest;
1517
    TCGv src1;
1518
    TCGv addr;
1519

    
1520
    dest = gen_new_qreg(QMODE_I32);
1521
    SRC_EA(src1, OS_BYTE, 1, &addr);
1522
    gen_logic_cc(s, src1);
1523
    tcg_gen_ori_i32(dest, src1, 0x80);
1524
    DEST_EA(insn, OS_BYTE, dest, &addr);
1525
}
1526

    
1527
DISAS_INSN(mull)
1528
{
1529
    uint16_t ext;
1530
    TCGv reg;
1531
    TCGv src1;
1532
    TCGv dest;
1533

    
1534
    /* The upper 32 bits of the product are discarded, so
1535
       muls.l and mulu.l are functionally equivalent.  */
1536
    ext = lduw_code(s->pc);
1537
    s->pc += 2;
1538
    if (ext & 0x87ff) {
1539
        gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
1540
        return;
1541
    }
1542
    reg = DREG(ext, 12);
1543
    SRC_EA(src1, OS_LONG, 0, NULL);
1544
    dest = gen_new_qreg(QMODE_I32);
1545
    tcg_gen_mul_i32(dest, src1, reg);
1546
    tcg_gen_mov_i32(reg, dest);
1547
    /* Unlike m68k, coldfire always clears the overflow bit.  */
1548
    gen_logic_cc(s, dest);
1549
}
1550

    
1551
DISAS_INSN(link)
1552
{
1553
    int16_t offset;
1554
    TCGv reg;
1555
    TCGv tmp;
1556

    
1557
    offset = ldsw_code(s->pc);
1558
    s->pc += 2;
1559
    reg = AREG(insn, 0);
1560
    tmp = gen_new_qreg(QMODE_I32);
1561
    tcg_gen_subi_i32(tmp, QREG_SP, 4);
1562
    gen_store(s, OS_LONG, tmp, reg);
1563
    if ((insn & 7) != 7)
1564
        tcg_gen_mov_i32(reg, tmp);
1565
    tcg_gen_addi_i32(QREG_SP, tmp, offset);
1566
}
1567

    
1568
DISAS_INSN(unlk)
1569
{
1570
    TCGv src;
1571
    TCGv reg;
1572
    TCGv tmp;
1573

    
1574
    src = gen_new_qreg(QMODE_I32);
1575
    reg = AREG(insn, 0);
1576
    tcg_gen_mov_i32(src, reg);
1577
    tmp = gen_load(s, OS_LONG, src, 0);
1578
    tcg_gen_mov_i32(reg, tmp);
1579
    tcg_gen_addi_i32(QREG_SP, src, 4);
1580
}
1581

    
1582
DISAS_INSN(nop)
1583
{
1584
}
1585

    
1586
DISAS_INSN(rts)
1587
{
1588
    TCGv tmp;
1589

    
1590
    tmp = gen_load(s, OS_LONG, QREG_SP, 0);
1591
    tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
1592
    gen_jmp(s, tmp);
1593
}
1594

    
1595
DISAS_INSN(jump)
1596
{
1597
    TCGv tmp;
1598

    
1599
    /* Load the target address first to ensure correct exception
1600
       behavior.  */
1601
    tmp = gen_lea(s, insn, OS_LONG);
1602
    if (IS_NULL_QREG(tmp)) {
1603
        gen_addr_fault(s);
1604
        return;
1605
    }
1606
    if ((insn & 0x40) == 0) {
1607
        /* jsr */
1608
        gen_push(s, gen_im32(s->pc));
1609
    }
1610
    gen_jmp(s, tmp);
1611
}
1612

    
1613
DISAS_INSN(addsubq)
1614
{
1615
    TCGv src1;
1616
    TCGv src2;
1617
    TCGv dest;
1618
    int val;
1619
    TCGv addr;
1620

    
1621
    SRC_EA(src1, OS_LONG, 0, &addr);
1622
    val = (insn >> 9) & 7;
1623
    if (val == 0)
1624
        val = 8;
1625
    dest = gen_new_qreg(QMODE_I32);
1626
    tcg_gen_mov_i32(dest, src1);
1627
    if ((insn & 0x38) == 0x08) {
1628
        /* Don't update condition codes if the destination is an
1629
           address register.  */
1630
        if (insn & 0x0100) {
1631
            tcg_gen_subi_i32(dest, dest, val);
1632
        } else {
1633
            tcg_gen_addi_i32(dest, dest, val);
1634
        }
1635
    } else {
1636
        src2 = gen_im32(val);
1637
        if (insn & 0x0100) {
1638
            gen_helper_xflag_lt(QREG_CC_X, dest, src2);
1639
            tcg_gen_subi_i32(dest, dest, val);
1640
            s->cc_op = CC_OP_SUB;
1641
        } else {
1642
            tcg_gen_addi_i32(dest, dest, val);
1643
            gen_helper_xflag_lt(QREG_CC_X, dest, src2);
1644
            s->cc_op = CC_OP_ADD;
1645
        }
1646
        gen_update_cc_add(dest, src2);
1647
    }
1648
    DEST_EA(insn, OS_LONG, dest, &addr);
1649
}
1650

    
1651
DISAS_INSN(tpf)
1652
{
1653
    switch (insn & 7) {
1654
    case 2: /* One extension word.  */
1655
        s->pc += 2;
1656
        break;
1657
    case 3: /* Two extension words.  */
1658
        s->pc += 4;
1659
        break;
1660
    case 4: /* No extension words.  */
1661
        break;
1662
    default:
1663
        disas_undef(s, insn);
1664
    }
1665
}
1666

    
1667
DISAS_INSN(branch)
1668
{
1669
    int32_t offset;
1670
    uint32_t base;
1671
    int op;
1672
    int l1;
1673

    
1674
    base = s->pc;
1675
    op = (insn >> 8) & 0xf;
1676
    offset = (int8_t)insn;
1677
    if (offset == 0) {
1678
        offset = ldsw_code(s->pc);
1679
        s->pc += 2;
1680
    } else if (offset == -1) {
1681
        offset = read_im32(s);
1682
    }
1683
    if (op == 1) {
1684
        /* bsr */
1685
        gen_push(s, gen_im32(s->pc));
1686
    }
1687
    gen_flush_cc_op(s);
1688
    if (op > 1) {
1689
        /* Bcc */
1690
        l1 = gen_new_label();
1691
        gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
1692
        gen_jmp_tb(s, 1, base + offset);
1693
        gen_set_label(l1);
1694
        gen_jmp_tb(s, 0, s->pc);
1695
    } else {
1696
        /* Unconditional branch.  */
1697
        gen_jmp_tb(s, 0, base + offset);
1698
    }
1699
}
1700

    
1701
DISAS_INSN(moveq)
1702
{
1703
    uint32_t val;
1704

    
1705
    val = (int8_t)insn;
1706
    tcg_gen_movi_i32(DREG(insn, 9), val);
1707
    gen_logic_cc(s, tcg_const_i32(val));
1708
}
1709

    
1710
DISAS_INSN(mvzs)
1711
{
1712
    int opsize;
1713
    TCGv src;
1714
    TCGv reg;
1715

    
1716
    if (insn & 0x40)
1717
        opsize = OS_WORD;
1718
    else
1719
        opsize = OS_BYTE;
1720
    SRC_EA(src, opsize, (insn & 0x80) != 0, NULL);
1721
    reg = DREG(insn, 9);
1722
    tcg_gen_mov_i32(reg, src);
1723
    gen_logic_cc(s, src);
1724
}
1725

    
1726
DISAS_INSN(or)
1727
{
1728
    TCGv reg;
1729
    TCGv dest;
1730
    TCGv src;
1731
    TCGv addr;
1732

    
1733
    reg = DREG(insn, 9);
1734
    dest = gen_new_qreg(QMODE_I32);
1735
    if (insn & 0x100) {
1736
        SRC_EA(src, OS_LONG, 0, &addr);
1737
        tcg_gen_or_i32(dest, src, reg);
1738
        DEST_EA(insn, OS_LONG, dest, &addr);
1739
    } else {
1740
        SRC_EA(src, OS_LONG, 0, NULL);
1741
        tcg_gen_or_i32(dest, src, reg);
1742
        tcg_gen_mov_i32(reg, dest);
1743
    }
1744
    gen_logic_cc(s, dest);
1745
}
1746

    
1747
DISAS_INSN(suba)
1748
{
1749
    TCGv src;
1750
    TCGv reg;
1751

    
1752
    SRC_EA(src, OS_LONG, 0, NULL);
1753
    reg = AREG(insn, 9);
1754
    tcg_gen_sub_i32(reg, reg, src);
1755
}
1756

    
1757
DISAS_INSN(subx)
1758
{
1759
    TCGv reg;
1760
    TCGv src;
1761

    
1762
    gen_flush_flags(s);
1763
    reg = DREG(insn, 9);
1764
    src = DREG(insn, 0);
1765
    gen_helper_subx_cc(reg, cpu_env, reg, src);
1766
}
1767

    
1768
DISAS_INSN(mov3q)
1769
{
1770
    TCGv src;
1771
    int val;
1772

    
1773
    val = (insn >> 9) & 7;
1774
    if (val == 0)
1775
        val = -1;
1776
    src = gen_im32(val);
1777
    gen_logic_cc(s, src);
1778
    DEST_EA(insn, OS_LONG, src, NULL);
1779
}
1780

    
1781
DISAS_INSN(cmp)
1782
{
1783
    int op;
1784
    TCGv src;
1785
    TCGv reg;
1786
    TCGv dest;
1787
    int opsize;
1788

    
1789
    op = (insn >> 6) & 3;
1790
    switch (op) {
1791
    case 0: /* cmp.b */
1792
        opsize = OS_BYTE;
1793
        s->cc_op = CC_OP_CMPB;
1794
        break;
1795
    case 1: /* cmp.w */
1796
        opsize = OS_WORD;
1797
        s->cc_op = CC_OP_CMPW;
1798
        break;
1799
    case 2: /* cmp.l */
1800
        opsize = OS_LONG;
1801
        s->cc_op = CC_OP_SUB;
1802
        break;
1803
    default:
1804
        abort();
1805
    }
1806
    SRC_EA(src, opsize, 1, NULL);
1807
    reg = DREG(insn, 9);
1808
    dest = gen_new_qreg(QMODE_I32);
1809
    tcg_gen_sub_i32(dest, reg, src);
1810
    gen_update_cc_add(dest, src);
1811
}
1812

    
1813
DISAS_INSN(cmpa)
1814
{
1815
    int opsize;
1816
    TCGv src;
1817
    TCGv reg;
1818
    TCGv dest;
1819

    
1820
    if (insn & 0x100) {
1821
        opsize = OS_LONG;
1822
    } else {
1823
        opsize = OS_WORD;
1824
    }
1825
    SRC_EA(src, opsize, 1, NULL);
1826
    reg = AREG(insn, 9);
1827
    dest = gen_new_qreg(QMODE_I32);
1828
    tcg_gen_sub_i32(dest, reg, src);
1829
    gen_update_cc_add(dest, src);
1830
    s->cc_op = CC_OP_SUB;
1831
}
1832

    
1833
DISAS_INSN(eor)
1834
{
1835
    TCGv src;
1836
    TCGv reg;
1837
    TCGv dest;
1838
    TCGv addr;
1839

    
1840
    SRC_EA(src, OS_LONG, 0, &addr);
1841
    reg = DREG(insn, 9);
1842
    dest = gen_new_qreg(QMODE_I32);
1843
    tcg_gen_xor_i32(dest, src, reg);
1844
    gen_logic_cc(s, dest);
1845
    DEST_EA(insn, OS_LONG, dest, &addr);
1846
}
1847

    
1848
DISAS_INSN(and)
1849
{
1850
    TCGv src;
1851
    TCGv reg;
1852
    TCGv dest;
1853
    TCGv addr;
1854

    
1855
    reg = DREG(insn, 9);
1856
    dest = gen_new_qreg(QMODE_I32);
1857
    if (insn & 0x100) {
1858
        SRC_EA(src, OS_LONG, 0, &addr);
1859
        tcg_gen_and_i32(dest, src, reg);
1860
        DEST_EA(insn, OS_LONG, dest, &addr);
1861
    } else {
1862
        SRC_EA(src, OS_LONG, 0, NULL);
1863
        tcg_gen_and_i32(dest, src, reg);
1864
        tcg_gen_mov_i32(reg, dest);
1865
    }
1866
    gen_logic_cc(s, dest);
1867
}
1868

    
1869
DISAS_INSN(adda)
1870
{
1871
    TCGv src;
1872
    TCGv reg;
1873

    
1874
    SRC_EA(src, OS_LONG, 0, NULL);
1875
    reg = AREG(insn, 9);
1876
    tcg_gen_add_i32(reg, reg, src);
1877
}
1878

    
1879
DISAS_INSN(addx)
1880
{
1881
    TCGv reg;
1882
    TCGv src;
1883

    
1884
    gen_flush_flags(s);
1885
    reg = DREG(insn, 9);
1886
    src = DREG(insn, 0);
1887
    gen_helper_addx_cc(reg, cpu_env, reg, src);
1888
    s->cc_op = CC_OP_FLAGS;
1889
}
1890

    
1891
/* TODO: This could be implemented without helper functions.  */
1892
DISAS_INSN(shift_im)
1893
{
1894
    TCGv reg;
1895
    int tmp;
1896
    TCGv shift;
1897

    
1898
    reg = DREG(insn, 0);
1899
    tmp = (insn >> 9) & 7;
1900
    if (tmp == 0)
1901
        tmp = 8;
1902
    shift = gen_im32(tmp);
1903
    /* No need to flush flags becuse we know we will set C flag.  */
1904
    if (insn & 0x100) {
1905
        gen_helper_shl_cc(reg, cpu_env, reg, shift);
1906
    } else {
1907
        if (insn & 8) {
1908
            gen_helper_shr_cc(reg, cpu_env, reg, shift);
1909
        } else {
1910
            gen_helper_sar_cc(reg, cpu_env, reg, shift);
1911
        }
1912
    }
1913
    s->cc_op = CC_OP_SHIFT;
1914
}
1915

    
1916
DISAS_INSN(shift_reg)
1917
{
1918
    TCGv reg;
1919
    TCGv shift;
1920

    
1921
    reg = DREG(insn, 0);
1922
    shift = DREG(insn, 9);
1923
    /* Shift by zero leaves C flag unmodified.   */
1924
    gen_flush_flags(s);
1925
    if (insn & 0x100) {
1926
        gen_helper_shl_cc(reg, cpu_env, reg, shift);
1927
    } else {
1928
        if (insn & 8) {
1929
            gen_helper_shr_cc(reg, cpu_env, reg, shift);
1930
        } else {
1931
            gen_helper_sar_cc(reg, cpu_env, reg, shift);
1932
        }
1933
    }
1934
    s->cc_op = CC_OP_SHIFT;
1935
}
1936

    
1937
DISAS_INSN(ff1)
1938
{
1939
    TCGv reg;
1940
    reg = DREG(insn, 0);
1941
    gen_logic_cc(s, reg);
1942
    gen_helper_ff1(reg, reg);
1943
}
1944

    
1945
static TCGv gen_get_sr(DisasContext *s)
1946
{
1947
    TCGv ccr;
1948
    TCGv sr;
1949

    
1950
    ccr = gen_get_ccr(s);
1951
    sr = gen_new_qreg(QMODE_I32);
1952
    tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
1953
    tcg_gen_or_i32(sr, sr, ccr);
1954
    return sr;
1955
}
1956

    
1957
DISAS_INSN(strldsr)
1958
{
1959
    uint16_t ext;
1960
    uint32_t addr;
1961

    
1962
    addr = s->pc - 2;
1963
    ext = lduw_code(s->pc);
1964
    s->pc += 2;
1965
    if (ext != 0x46FC) {
1966
        gen_exception(s, addr, EXCP_UNSUPPORTED);
1967
        return;
1968
    }
1969
    ext = lduw_code(s->pc);
1970
    s->pc += 2;
1971
    if (IS_USER(s) || (ext & SR_S) == 0) {
1972
        gen_exception(s, addr, EXCP_PRIVILEGE);
1973
        return;
1974
    }
1975
    gen_push(s, gen_get_sr(s));
1976
    gen_set_sr_im(s, ext, 0);
1977
}
1978

    
1979
DISAS_INSN(move_from_sr)
1980
{
1981
    TCGv reg;
1982
    TCGv sr;
1983

    
1984
    if (IS_USER(s)) {
1985
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1986
        return;
1987
    }
1988
    sr = gen_get_sr(s);
1989
    reg = DREG(insn, 0);
1990
    gen_partset_reg(OS_WORD, reg, sr);
1991
}
1992

    
1993
DISAS_INSN(move_to_sr)
1994
{
1995
    if (IS_USER(s)) {
1996
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1997
        return;
1998
    }
1999
    gen_set_sr(s, insn, 0);
2000
    gen_lookup_tb(s);
2001
}
2002

    
2003
DISAS_INSN(move_from_usp)
2004
{
2005
    if (IS_USER(s)) {
2006
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2007
        return;
2008
    }
2009
    /* TODO: Implement USP.  */
2010
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2011
}
2012

    
2013
DISAS_INSN(move_to_usp)
2014
{
2015
    if (IS_USER(s)) {
2016
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2017
        return;
2018
    }
2019
    /* TODO: Implement USP.  */
2020
    gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2021
}
2022

    
2023
DISAS_INSN(halt)
2024
{
2025
    gen_exception(s, s->pc, EXCP_HALT_INSN);
2026
}
2027

    
2028
DISAS_INSN(stop)
2029
{
2030
    uint16_t ext;
2031

    
2032
    if (IS_USER(s)) {
2033
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2034
        return;
2035
    }
2036

    
2037
    ext = lduw_code(s->pc);
2038
    s->pc += 2;
2039

    
2040
    gen_set_sr_im(s, ext, 0);
2041
    tcg_gen_movi_i32(QREG_HALTED, 1);
2042
    gen_exception(s, s->pc, EXCP_HLT);
2043
}
2044

    
2045
DISAS_INSN(rte)
2046
{
2047
    if (IS_USER(s)) {
2048
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2049
        return;
2050
    }
2051
    gen_exception(s, s->pc - 2, EXCP_RTE);
2052
}
2053

    
2054
DISAS_INSN(movec)
2055
{
2056
    uint16_t ext;
2057
    TCGv reg;
2058

    
2059
    if (IS_USER(s)) {
2060
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2061
        return;
2062
    }
2063

    
2064
    ext = lduw_code(s->pc);
2065
    s->pc += 2;
2066

    
2067
    if (ext & 0x8000) {
2068
        reg = AREG(ext, 12);
2069
    } else {
2070
        reg = DREG(ext, 12);
2071
    }
2072
    gen_helper_movec(cpu_env, tcg_const_i32(ext & 0xfff), reg);
2073
    gen_lookup_tb(s);
2074
}
2075

    
2076
DISAS_INSN(intouch)
2077
{
2078
    if (IS_USER(s)) {
2079
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2080
        return;
2081
    }
2082
    /* ICache fetch.  Implement as no-op.  */
2083
}
2084

    
2085
DISAS_INSN(cpushl)
2086
{
2087
    if (IS_USER(s)) {
2088
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2089
        return;
2090
    }
2091
    /* Cache push/invalidate.  Implement as no-op.  */
2092
}
2093

    
2094
DISAS_INSN(wddata)
2095
{
2096
    gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2097
}
2098

    
2099
DISAS_INSN(wdebug)
2100
{
2101
    if (IS_USER(s)) {
2102
        gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2103
        return;
2104
    }
2105
    /* TODO: Implement wdebug.  */
2106
    qemu_assert(0, "WDEBUG not implemented");
2107
}
2108

    
2109
DISAS_INSN(trap)
2110
{
2111
    gen_exception(s, s->pc - 2, EXCP_TRAP0 + (insn & 0xf));
2112
}
2113

    
2114
/* ??? FP exceptions are not implemented.  Most exceptions are deferred until
2115
   immediately before the next FP instruction is executed.  */
2116
DISAS_INSN(fpu)
2117
{
2118
    uint16_t ext;
2119
    int opmode;
2120
    TCGv src;
2121
    TCGv dest;
2122
    TCGv res;
2123
    int round;
2124
    int opsize;
2125

    
2126
    ext = lduw_code(s->pc);
2127
    s->pc += 2;
2128
    opmode = ext & 0x7f;
2129
    switch ((ext >> 13) & 7) {
2130
    case 0: case 2:
2131
        break;
2132
    case 1:
2133
        goto undef;
2134
    case 3: /* fmove out */
2135
        src = FREG(ext, 7);
2136
        /* fmove */
2137
        /* ??? TODO: Proper behavior on overflow.  */
2138
        switch ((ext >> 10) & 7) {
2139
        case 0:
2140
            opsize = OS_LONG;
2141
            res = gen_new_qreg(QMODE_I32);
2142
            gen_helper_f64_to_i32(res, cpu_env, src);
2143
            break;
2144
        case 1:
2145
            opsize = OS_SINGLE;
2146
            res = gen_new_qreg(QMODE_F32);
2147
            gen_helper_f64_to_f32(res, cpu_env, src);
2148
            break;
2149
        case 4:
2150
            opsize = OS_WORD;
2151
            res = gen_new_qreg(QMODE_I32);
2152
            gen_helper_f64_to_i32(res, cpu_env, src);
2153
            break;
2154
        case 5:
2155
            opsize = OS_DOUBLE;
2156
            res = src;
2157
            break;
2158
        case 6:
2159
            opsize = OS_BYTE;
2160
            res = gen_new_qreg(QMODE_I32);
2161
            gen_helper_f64_to_i32(res, cpu_env, src);
2162
            break;
2163
        default:
2164
            goto undef;
2165
        }
2166
        DEST_EA(insn, opsize, res, NULL);
2167
        return;
2168
    case 4: /* fmove to control register.  */
2169
        switch ((ext >> 10) & 7) {
2170
        case 4: /* FPCR */
2171
            /* Not implemented.  Ignore writes.  */
2172
            break;
2173
        case 1: /* FPIAR */
2174
        case 2: /* FPSR */
2175
        default:
2176
            cpu_abort(NULL, "Unimplemented: fmove to control %d",
2177
                      (ext >> 10) & 7);
2178
        }
2179
        break;
2180
    case 5: /* fmove from control register.  */
2181
        switch ((ext >> 10) & 7) {
2182
        case 4: /* FPCR */
2183
            /* Not implemented.  Always return zero.  */
2184
            res = gen_im32(0);
2185
            break;
2186
        case 1: /* FPIAR */
2187
        case 2: /* FPSR */
2188
        default:
2189
            cpu_abort(NULL, "Unimplemented: fmove from control %d",
2190
                      (ext >> 10) & 7);
2191
            goto undef;
2192
        }
2193
        DEST_EA(insn, OS_LONG, res, NULL);
2194
        break;
2195
    case 6: /* fmovem */
2196
    case 7:
2197
        {
2198
            TCGv addr;
2199
            uint16_t mask;
2200
            int i;
2201
            if ((ext & 0x1f00) != 0x1000 || (ext & 0xff) == 0)
2202
                goto undef;
2203
            src = gen_lea(s, insn, OS_LONG);
2204
            if (IS_NULL_QREG(src)) {
2205
                gen_addr_fault(s);
2206
                return;
2207
            }
2208
            addr = gen_new_qreg(QMODE_I32);
2209
            tcg_gen_mov_i32(addr, src);
2210
            mask = 0x80;
2211
            for (i = 0; i < 8; i++) {
2212
                if (ext & mask) {
2213
                    s->is_mem = 1;
2214
                    dest = FREG(i, 0);
2215
                    if (ext & (1 << 13)) {
2216
                        /* store */
2217
                        tcg_gen_qemu_stf64(dest, addr, IS_USER(s));
2218
                    } else {
2219
                        /* load */
2220
                        tcg_gen_qemu_ldf64(dest, addr, IS_USER(s));
2221
                    }
2222
                    if (ext & (mask - 1))
2223
                        tcg_gen_addi_i32(addr, addr, 8);
2224
                }
2225
                mask >>= 1;
2226
            }
2227
        }
2228
        return;
2229
    }
2230
    if (ext & (1 << 14)) {
2231
        TCGv tmp;
2232

    
2233
        /* Source effective address.  */
2234
        switch ((ext >> 10) & 7) {
2235
        case 0: opsize = OS_LONG; break;
2236
        case 1: opsize = OS_SINGLE; break;
2237
        case 4: opsize = OS_WORD; break;
2238
        case 5: opsize = OS_DOUBLE; break;
2239
        case 6: opsize = OS_BYTE; break;
2240
        default:
2241
            goto undef;
2242
        }
2243
        SRC_EA(tmp, opsize, 1, NULL);
2244
        if (opsize == OS_DOUBLE) {
2245
            src = tmp;
2246
        } else {
2247
            src = gen_new_qreg(QMODE_F64);
2248
            switch (opsize) {
2249
            case OS_LONG:
2250
            case OS_WORD:
2251
            case OS_BYTE:
2252
                gen_helper_i32_to_f64(src, cpu_env, tmp);
2253
                break;
2254
            case OS_SINGLE:
2255
                gen_helper_f32_to_f64(src, cpu_env, tmp);
2256
                break;
2257
            }
2258
        }
2259
    } else {
2260
        /* Source register.  */
2261
        src = FREG(ext, 10);
2262
    }
2263
    dest = FREG(ext, 7);
2264
    res = gen_new_qreg(QMODE_F64);
2265
    if (opmode != 0x3a)
2266
        tcg_gen_mov_f64(res, dest);
2267
    round = 1;
2268
    switch (opmode) {
2269
    case 0: case 0x40: case 0x44: /* fmove */
2270
        tcg_gen_mov_f64(res, src);
2271
        break;
2272
    case 1: /* fint */
2273
        gen_helper_iround_f64(res, cpu_env, src);
2274
        round = 0;
2275
        break;
2276
    case 3: /* fintrz */
2277
        gen_helper_itrunc_f64(res, cpu_env, src);
2278
        round = 0;
2279
        break;
2280
    case 4: case 0x41: case 0x45: /* fsqrt */
2281
        gen_helper_sqrt_f64(res, cpu_env, src);
2282
        break;
2283
    case 0x18: case 0x58: case 0x5c: /* fabs */
2284
        gen_helper_abs_f64(res, src);
2285
        break;
2286
    case 0x1a: case 0x5a: case 0x5e: /* fneg */
2287
        gen_helper_chs_f64(res, src);
2288
        break;
2289
    case 0x20: case 0x60: case 0x64: /* fdiv */
2290
        gen_helper_div_f64(res, cpu_env, res, src);
2291
        break;
2292
    case 0x22: case 0x62: case 0x66: /* fadd */
2293
        gen_helper_add_f64(res, cpu_env, res, src);
2294
        break;
2295
    case 0x23: case 0x63: case 0x67: /* fmul */
2296
        gen_helper_mul_f64(res, cpu_env, res, src);
2297
        break;
2298
    case 0x28: case 0x68: case 0x6c: /* fsub */
2299
        gen_helper_sub_f64(res, cpu_env, res, src);
2300
        break;
2301
    case 0x38: /* fcmp */
2302
        gen_helper_sub_cmp_f64(res, cpu_env, res, src);
2303
        dest = NULL_QREG;
2304
        round = 0;
2305
        break;
2306
    case 0x3a: /* ftst */
2307
        tcg_gen_mov_f64(res, src);
2308
        dest = NULL_QREG;
2309
        round = 0;
2310
        break;
2311
    default:
2312
        goto undef;
2313
    }
2314
    if (round) {
2315
        if (opmode & 0x40) {
2316
            if ((opmode & 0x4) != 0)
2317
                round = 0;
2318
        } else if ((s->fpcr & M68K_FPCR_PREC) == 0) {
2319
            round = 0;
2320
        }
2321
    }
2322
    if (round) {
2323
        TCGv tmp;
2324

    
2325
        tmp = gen_new_qreg(QMODE_F32);
2326
        gen_helper_f64_to_f32(tmp, cpu_env, res);
2327
        gen_helper_f32_to_f64(res, cpu_env, tmp);
2328
    }
2329
    tcg_gen_mov_f64(QREG_FP_RESULT, res);
2330
    if (!IS_NULL_QREG(dest)) {
2331
        tcg_gen_mov_f64(dest, res);
2332
    }
2333
    return;
2334
undef:
2335
    s->pc -= 2;
2336
    disas_undef_fpu(s, insn);
2337
}
2338

    
2339
DISAS_INSN(fbcc)
2340
{
2341
    uint32_t offset;
2342
    uint32_t addr;
2343
    TCGv flag;
2344
    int l1;
2345

    
2346
    addr = s->pc;
2347
    offset = ldsw_code(s->pc);
2348
    s->pc += 2;
2349
    if (insn & (1 << 6)) {
2350
        offset = (offset << 16) | lduw_code(s->pc);
2351
        s->pc += 2;
2352
    }
2353

    
2354
    l1 = gen_new_label();
2355
    /* TODO: Raise BSUN exception.  */
2356
    flag = gen_new_qreg(QMODE_I32);
2357
    gen_helper_compare_f64(flag, cpu_env, QREG_FP_RESULT);
2358
    /* Jump to l1 if condition is true.  */
2359
    switch (insn & 0xf) {
2360
    case 0: /* f */
2361
        break;
2362
    case 1: /* eq (=0) */
2363
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(0), l1);
2364
        break;
2365
    case 2: /* ogt (=1) */
2366
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(1), l1);
2367
        break;
2368
    case 3: /* oge (=0 or =1) */
2369
        tcg_gen_brcond_i32(TCG_COND_LEU, flag, tcg_const_i32(1), l1);
2370
        break;
2371
    case 4: /* olt (=-1) */
2372
        tcg_gen_brcond_i32(TCG_COND_LT, flag, tcg_const_i32(0), l1);
2373
        break;
2374
    case 5: /* ole (=-1 or =0) */
2375
        tcg_gen_brcond_i32(TCG_COND_LE, flag, tcg_const_i32(0), l1);
2376
        break;
2377
    case 6: /* ogl (=-1 or =1) */
2378
        tcg_gen_andi_i32(flag, flag, 1);
2379
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(0), l1);
2380
        break;
2381
    case 7: /* or (=2) */
2382
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(2), l1);
2383
        break;
2384
    case 8: /* un (<2) */
2385
        tcg_gen_brcond_i32(TCG_COND_LT, flag, tcg_const_i32(2), l1);
2386
        break;
2387
    case 9: /* ueq (=0 or =2) */
2388
        tcg_gen_andi_i32(flag, flag, 1);
2389
        tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(0), l1);
2390
        break;
2391
    case 10: /* ugt (>0) */
2392
        tcg_gen_brcond_i32(TCG_COND_GT, flag, tcg_const_i32(0), l1);
2393
        break;
2394
    case 11: /* uge (>=0) */
2395
        tcg_gen_brcond_i32(TCG_COND_GE, flag, tcg_const_i32(0), l1);
2396
        break;
2397
    case 12: /* ult (=-1 or =2) */
2398
        tcg_gen_brcond_i32(TCG_COND_GEU, flag, tcg_const_i32(2), l1);
2399
        break;
2400
    case 13: /* ule (!=1) */
2401
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(1), l1);
2402
        break;
2403
    case 14: /* ne (!=0) */
2404
        tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(0), l1);
2405
        break;
2406
    case 15: /* t */
2407
        tcg_gen_br(l1);
2408
        break;
2409
    }
2410
    gen_jmp_tb(s, 0, s->pc);
2411
    gen_set_label(l1);
2412
    gen_jmp_tb(s, 1, addr + offset);
2413
}
2414

    
2415
DISAS_INSN(frestore)
2416
{
2417
    /* TODO: Implement frestore.  */
2418
    qemu_assert(0, "FRESTORE not implemented");
2419
}
2420

    
2421
DISAS_INSN(fsave)
2422
{
2423
    /* TODO: Implement fsave.  */
2424
    qemu_assert(0, "FSAVE not implemented");
2425
}
2426

    
2427
static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
2428
{
2429
    TCGv tmp = gen_new_qreg(QMODE_I32);
2430
    if (s->env->macsr & MACSR_FI) {
2431
        if (upper)
2432
            tcg_gen_andi_i32(tmp, val, 0xffff0000);
2433
        else
2434
            tcg_gen_shli_i32(tmp, val, 16);
2435
    } else if (s->env->macsr & MACSR_SU) {
2436
        if (upper)
2437
            tcg_gen_sari_i32(tmp, val, 16);
2438
        else
2439
            tcg_gen_ext16s_i32(tmp, val);
2440
    } else {
2441
        if (upper)
2442
            tcg_gen_shri_i32(tmp, val, 16);
2443
        else
2444
            tcg_gen_ext16u_i32(tmp, val);
2445
    }
2446
    return tmp;
2447
}
2448

    
2449
static void gen_mac_clear_flags(void)
2450
{
2451
    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
2452
                     ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
2453
}
2454

    
2455
DISAS_INSN(mac)
2456
{
2457
    TCGv rx;
2458
    TCGv ry;
2459
    uint16_t ext;
2460
    int acc;
2461
    TCGv tmp;
2462
    TCGv addr;
2463
    TCGv loadval;
2464
    int dual;
2465
    TCGv saved_flags;
2466

    
2467
    if (IS_NULL_QREG(s->mactmp))
2468
        s->mactmp = tcg_temp_new(TCG_TYPE_I64);
2469

    
2470
    ext = lduw_code(s->pc);
2471
    s->pc += 2;
2472

    
2473
    acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
2474
    dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
2475
    if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
2476
        disas_undef(s, insn);
2477
        return;
2478
    }
2479
    if (insn & 0x30) {
2480
        /* MAC with load.  */
2481
        tmp = gen_lea(s, insn, OS_LONG);
2482
        addr = gen_new_qreg(QMODE_I32);
2483
        tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
2484
        /* Load the value now to ensure correct exception behavior.
2485
           Perform writeback after reading the MAC inputs.  */
2486
        loadval = gen_load(s, OS_LONG, addr, 0);
2487

    
2488
        acc ^= 1;
2489
        rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
2490
        ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
2491
    } else {
2492
        loadval = addr = NULL_QREG;
2493
        rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
2494
        ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2495
    }
2496

    
2497
    gen_mac_clear_flags();
2498
#if 0
2499
    l1 = -1;
2500
    /* Disabled because conditional branches clobber temporary vars.  */
2501
    if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
2502
        /* Skip the multiply if we know we will ignore it.  */
2503
        l1 = gen_new_label();
2504
        tmp = gen_new_qreg(QMODE_I32);
2505
        tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
2506
        gen_op_jmp_nz32(tmp, l1);
2507
    }
2508
#endif
2509

    
2510
    if ((ext & 0x0800) == 0) {
2511
        /* Word.  */
2512
        rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
2513
        ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
2514
    }
2515
    if (s->env->macsr & MACSR_FI) {
2516
        gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
2517
    } else {
2518
        if (s->env->macsr & MACSR_SU)
2519
            gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
2520
        else
2521
            gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
2522
        switch ((ext >> 9) & 3) {
2523
        case 1:
2524
            tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
2525
            break;
2526
        case 3:
2527
            tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
2528
            break;
2529
        }
2530
    }
2531

    
2532
    if (dual) {
2533
        /* Save the overflow flag from the multiply.  */
2534
        saved_flags = gen_new_qreg(QMODE_I32);
2535
        tcg_gen_mov_i32(saved_flags, QREG_MACSR);
2536
    } else {
2537
        saved_flags = NULL_QREG;
2538
    }
2539

    
2540
#if 0
2541
    /* Disabled because conditional branches clobber temporary vars.  */
2542
    if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
2543
        /* Skip the accumulate if the value is already saturated.  */
2544
        l1 = gen_new_label();
2545
        tmp = gen_new_qreg(QMODE_I32);
2546
        gen_op_and32(tmp, QREG_MACSR, gen_im32(MACSR_PAV0 << acc));
2547
        gen_op_jmp_nz32(tmp, l1);
2548
    }
2549
#endif
2550

    
2551
    if (insn & 0x100)
2552
        tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
2553
    else
2554
        tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
2555

    
2556
    if (s->env->macsr & MACSR_FI)
2557
        gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
2558
    else if (s->env->macsr & MACSR_SU)
2559
        gen_helper_macsats(cpu_env, tcg_const_i32(acc));
2560
    else
2561
        gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
2562

    
2563
#if 0
2564
    /* Disabled because conditional branches clobber temporary vars.  */
2565
    if (l1 != -1)
2566
        gen_set_label(l1);
2567
#endif
2568

    
2569
    if (dual) {
2570
        /* Dual accumulate variant.  */
2571
        acc = (ext >> 2) & 3;
2572
        /* Restore the overflow flag from the multiplier.  */
2573
        tcg_gen_mov_i32(QREG_MACSR, saved_flags);
2574
#if 0
2575
        /* Disabled because conditional branches clobber temporary vars.  */
2576
        if ((s->env->macsr & MACSR_OMC) != 0) {
2577
            /* Skip the accumulate if the value is already saturated.  */
2578
            l1 = gen_new_label();
2579
            tmp = gen_new_qreg(QMODE_I32);
2580
            gen_op_and32(tmp, QREG_MACSR, gen_im32(MACSR_PAV0 << acc));
2581
            gen_op_jmp_nz32(tmp, l1);
2582
        }
2583
#endif
2584
        if (ext & 2)
2585
            tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
2586
        else
2587
            tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
2588
        if (s->env->macsr & MACSR_FI)
2589
            gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
2590
        else if (s->env->macsr & MACSR_SU)
2591
            gen_helper_macsats(cpu_env, tcg_const_i32(acc));
2592
        else
2593
            gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
2594
#if 0
2595
        /* Disabled because conditional branches clobber temporary vars.  */
2596
        if (l1 != -1)
2597
            gen_set_label(l1);
2598
#endif
2599
    }
2600
    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
2601

    
2602
    if (insn & 0x30) {
2603
        TCGv rw;
2604
        rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
2605
        tcg_gen_mov_i32(rw, loadval);
2606
        /* FIXME: Should address writeback happen with the masked or
2607
           unmasked value?  */
2608
        switch ((insn >> 3) & 7) {
2609
        case 3: /* Post-increment.  */
2610
            tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
2611
            break;
2612
        case 4: /* Pre-decrement.  */
2613
            tcg_gen_mov_i32(AREG(insn, 0), addr);
2614
        }
2615
    }
2616
}
2617

    
2618
DISAS_INSN(from_mac)
2619
{
2620
    TCGv rx;
2621
    TCGv acc;
2622
    int accnum;
2623

    
2624
    rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2625
    accnum = (insn >> 9) & 3;
2626
    acc = MACREG(accnum);
2627
    if (s->env->macsr & MACSR_FI) {
2628
        gen_helper_get_macf(cpu_env, rx, acc);
2629
    } else if ((s->env->macsr & MACSR_OMC) == 0) {
2630
        tcg_gen_trunc_i64_i32(rx, acc);
2631
    } else if (s->env->macsr & MACSR_SU) {
2632
        gen_helper_get_macs(rx, acc);
2633
    } else {
2634
        gen_helper_get_macu(rx, acc);
2635
    }
2636
    if (insn & 0x40) {
2637
        tcg_gen_movi_i64(acc, 0);
2638
        tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
2639
    }
2640
}
2641

    
2642
DISAS_INSN(move_mac)
2643
{
2644
    /* FIXME: This can be done without a helper.  */
2645
    int src;
2646
    TCGv dest;
2647
    src = insn & 3;
2648
    dest = tcg_const_i32((insn >> 9) & 3);
2649
    gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
2650
    gen_mac_clear_flags();
2651
    gen_helper_mac_set_flags(cpu_env, dest);
2652
}
2653

    
2654
DISAS_INSN(from_macsr)
2655
{
2656
    TCGv reg;
2657

    
2658
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2659
    tcg_gen_mov_i32(reg, QREG_MACSR);
2660
}
2661

    
2662
DISAS_INSN(from_mask)
2663
{
2664
    TCGv reg;
2665
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2666
    tcg_gen_mov_i32(reg, QREG_MAC_MASK);
2667
}
2668

    
2669
DISAS_INSN(from_mext)
2670
{
2671
    TCGv reg;
2672
    TCGv acc;
2673
    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
2674
    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
2675
    if (s->env->macsr & MACSR_FI)
2676
        gen_helper_get_mac_extf(reg, cpu_env, acc);
2677
    else
2678
        gen_helper_get_mac_exti(reg, cpu_env, acc);
2679
}
2680

    
2681
DISAS_INSN(macsr_to_ccr)
2682
{
2683
    tcg_gen_movi_i32(QREG_CC_X, 0);
2684
    tcg_gen_andi_i32(QREG_CC_DEST, QREG_MACSR, 0xf);
2685
    s->cc_op = CC_OP_FLAGS;
2686
}
2687

    
2688
DISAS_INSN(to_mac)
2689
{
2690
    TCGv acc;
2691
    TCGv val;
2692
    int accnum;
2693
    accnum = (insn >> 9) & 3;
2694
    acc = MACREG(accnum);
2695
    SRC_EA(val, OS_LONG, 0, NULL);
2696
    if (s->env->macsr & MACSR_FI) {
2697
        tcg_gen_ext_i32_i64(acc, val);
2698
        tcg_gen_shli_i64(acc, acc, 8);
2699
    } else if (s->env->macsr & MACSR_SU) {
2700
        tcg_gen_ext_i32_i64(acc, val);
2701
    } else {
2702
        tcg_gen_extu_i32_i64(acc, val);
2703
    }
2704
    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
2705
    gen_mac_clear_flags();
2706
    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
2707
}
2708

    
2709
DISAS_INSN(to_macsr)
2710
{
2711
    TCGv val;
2712
    SRC_EA(val, OS_LONG, 0, NULL);
2713
    gen_helper_set_macsr(cpu_env, val);
2714
    gen_lookup_tb(s);
2715
}
2716

    
2717
DISAS_INSN(to_mask)
2718
{
2719
    TCGv val;
2720
    SRC_EA(val, OS_LONG, 0, NULL);
2721
    tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
2722
}
2723

    
2724
DISAS_INSN(to_mext)
2725
{
2726
    TCGv val;
2727
    TCGv acc;
2728
    SRC_EA(val, OS_LONG, 0, NULL);
2729
    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
2730
    if (s->env->macsr & MACSR_FI)
2731
        gen_helper_set_mac_extf(cpu_env, val, acc);
2732
    else if (s->env->macsr & MACSR_SU)
2733
        gen_helper_set_mac_exts(cpu_env, val, acc);
2734
    else
2735
        gen_helper_set_mac_extu(cpu_env, val, acc);
2736
}
2737

    
2738
static disas_proc opcode_table[65536];
2739

    
2740
static void
2741
register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
2742
{
2743
  int i;
2744
  int from;
2745
  int to;
2746

    
2747
  /* Sanity check.  All set bits must be included in the mask.  */
2748
  if (opcode & ~mask) {
2749
      fprintf(stderr,
2750
              "qemu internal error: bogus opcode definition %04x/%04x\n",
2751
              opcode, mask);
2752
      abort();
2753
  }
2754
  /* This could probably be cleverer.  For now just optimize the case where
2755
     the top bits are known.  */
2756
  /* Find the first zero bit in the mask.  */
2757
  i = 0x8000;
2758
  while ((i & mask) != 0)
2759
      i >>= 1;
2760
  /* Iterate over all combinations of this and lower bits.  */
2761
  if (i == 0)
2762
      i = 1;
2763
  else
2764
      i <<= 1;
2765
  from = opcode & ~(i - 1);
2766
  to = from + i;
2767
  for (i = from; i < to; i++) {
2768
      if ((i & mask) == opcode)
2769
          opcode_table[i] = proc;
2770
  }
2771
}
2772

    
2773
/* Register m68k opcode handlers.  Order is important.
2774
   Later insn override earlier ones.  */
2775
void register_m68k_insns (CPUM68KState *env)
2776
{
2777
#define INSN(name, opcode, mask, feature) do { \
2778
    if (m68k_feature(env, M68K_FEATURE_##feature)) \
2779
        register_opcode(disas_##name, 0x##opcode, 0x##mask); \
2780
    } while(0)
2781
    INSN(undef,     0000, 0000, CF_ISA_A);
2782
    INSN(arith_im,  0080, fff8, CF_ISA_A);
2783
    INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
2784
    INSN(bitop_reg, 0100, f1c0, CF_ISA_A);
2785
    INSN(bitop_reg, 0140, f1c0, CF_ISA_A);
2786
    INSN(bitop_reg, 0180, f1c0, CF_ISA_A);
2787
    INSN(bitop_reg, 01c0, f1c0, CF_ISA_A);
2788
    INSN(arith_im,  0280, fff8, CF_ISA_A);
2789
    INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
2790
    INSN(arith_im,  0480, fff8, CF_ISA_A);
2791
    INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
2792
    INSN(arith_im,  0680, fff8, CF_ISA_A);
2793
    INSN(bitop_im,  0800, ffc0, CF_ISA_A);
2794
    INSN(bitop_im,  0840, ffc0, CF_ISA_A);
2795
    INSN(bitop_im,  0880, ffc0, CF_ISA_A);
2796
    INSN(bitop_im,  08c0, ffc0, CF_ISA_A);
2797
    INSN(arith_im,  0a80, fff8, CF_ISA_A);
2798
    INSN(arith_im,  0c00, ff38, CF_ISA_A);
2799
    INSN(move,      1000, f000, CF_ISA_A);
2800
    INSN(move,      2000, f000, CF_ISA_A);
2801
    INSN(move,      3000, f000, CF_ISA_A);
2802
    INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
2803
    INSN(negx,      4080, fff8, CF_ISA_A);
2804
    INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
2805
    INSN(lea,       41c0, f1c0, CF_ISA_A);
2806
    INSN(clr,       4200, ff00, CF_ISA_A);
2807
    INSN(undef,     42c0, ffc0, CF_ISA_A);
2808
    INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
2809
    INSN(neg,       4480, fff8, CF_ISA_A);
2810
    INSN(move_to_ccr, 44c0, ffc0, CF_ISA_A);
2811
    INSN(not,       4680, fff8, CF_ISA_A);
2812
    INSN(move_to_sr, 46c0, ffc0, CF_ISA_A);
2813
    INSN(pea,       4840, ffc0, CF_ISA_A);
2814
    INSN(swap,      4840, fff8, CF_ISA_A);
2815
    INSN(movem,     48c0, fbc0, CF_ISA_A);
2816
    INSN(ext,       4880, fff8, CF_ISA_A);
2817
    INSN(ext,       48c0, fff8, CF_ISA_A);
2818
    INSN(ext,       49c0, fff8, CF_ISA_A);
2819
    INSN(tst,       4a00, ff00, CF_ISA_A);
2820
    INSN(tas,       4ac0, ffc0, CF_ISA_B);
2821
    INSN(halt,      4ac8, ffff, CF_ISA_A);
2822
    INSN(pulse,     4acc, ffff, CF_ISA_A);
2823
    INSN(illegal,   4afc, ffff, CF_ISA_A);
2824
    INSN(mull,      4c00, ffc0, CF_ISA_A);
2825
    INSN(divl,      4c40, ffc0, CF_ISA_A);
2826
    INSN(sats,      4c80, fff8, CF_ISA_B);
2827
    INSN(trap,      4e40, fff0, CF_ISA_A);
2828
    INSN(link,      4e50, fff8, CF_ISA_A);
2829
    INSN(unlk,      4e58, fff8, CF_ISA_A);
2830
    INSN(move_to_usp, 4e60, fff8, USP);
2831
    INSN(move_from_usp, 4e68, fff8, USP);
2832
    INSN(nop,       4e71, ffff, CF_ISA_A);
2833
    INSN(stop,      4e72, ffff, CF_ISA_A);
2834
    INSN(rte,       4e73, ffff, CF_ISA_A);
2835
    INSN(rts,       4e75, ffff, CF_ISA_A);
2836
    INSN(movec,     4e7b, ffff, CF_ISA_A);
2837
    INSN(jump,      4e80, ffc0, CF_ISA_A);
2838
    INSN(jump,      4ec0, ffc0, CF_ISA_A);
2839
    INSN(addsubq,   5180, f1c0, CF_ISA_A);
2840
    INSN(scc,       50c0, f0f8, CF_ISA_A);
2841
    INSN(addsubq,   5080, f1c0, CF_ISA_A);
2842
    INSN(tpf,       51f8, fff8, CF_ISA_A);
2843

    
2844
    /* Branch instructions.  */
2845
    INSN(branch,    6000, f000, CF_ISA_A);
2846
    /* Disable long branch instructions, then add back the ones we want.  */
2847
    INSN(undef,     60ff, f0ff, CF_ISA_A); /* All long branches.  */
2848
    INSN(branch,    60ff, f0ff, CF_ISA_B);
2849
    INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
2850
    INSN(branch,    60ff, ffff, BRAL);
2851

    
2852
    INSN(moveq,     7000, f100, CF_ISA_A);
2853
    INSN(mvzs,      7100, f100, CF_ISA_B);
2854
    INSN(or,        8000, f000, CF_ISA_A);
2855
    INSN(divw,      80c0, f0c0, CF_ISA_A);
2856
    INSN(addsub,    9000, f000, CF_ISA_A);
2857
    INSN(subx,      9180, f1f8, CF_ISA_A);
2858
    INSN(suba,      91c0, f1c0, CF_ISA_A);
2859

    
2860
    INSN(undef_mac, a000, f000, CF_ISA_A);
2861
    INSN(mac,       a000, f100, CF_EMAC);
2862
    INSN(from_mac,  a180, f9b0, CF_EMAC);
2863
    INSN(move_mac,  a110, f9fc, CF_EMAC);
2864
    INSN(from_macsr,a980, f9f0, CF_EMAC);
2865
    INSN(from_mask, ad80, fff0, CF_EMAC);
2866
    INSN(from_mext, ab80, fbf0, CF_EMAC);
2867
    INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
2868
    INSN(to_mac,    a100, f9c0, CF_EMAC);
2869
    INSN(to_macsr,  a900, ffc0, CF_EMAC);
2870
    INSN(to_mext,   ab00, fbc0, CF_EMAC);
2871
    INSN(to_mask,   ad00, ffc0, CF_EMAC);
2872

    
2873
    INSN(mov3q,     a140, f1c0, CF_ISA_B);
2874
    INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
2875
    INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
2876
    INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
2877
    INSN(cmp,       b080, f1c0, CF_ISA_A);
2878
    INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
2879
    INSN(eor,       b180, f1c0, CF_ISA_A);
2880
    INSN(and,       c000, f000, CF_ISA_A);
2881
    INSN(mulw,      c0c0, f0c0, CF_ISA_A);
2882
    INSN(addsub,    d000, f000, CF_ISA_A);
2883
    INSN(addx,      d180, f1f8, CF_ISA_A);
2884
    INSN(adda,      d1c0, f1c0, CF_ISA_A);
2885
    INSN(shift_im,  e080, f0f0, CF_ISA_A);
2886
    INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
2887
    INSN(undef_fpu, f000, f000, CF_ISA_A);
2888
    INSN(fpu,       f200, ffc0, CF_FPU);
2889
    INSN(fbcc,      f280, ffc0, CF_FPU);
2890
    INSN(frestore,  f340, ffc0, CF_FPU);
2891
    INSN(fsave,     f340, ffc0, CF_FPU);
2892
    INSN(intouch,   f340, ffc0, CF_ISA_A);
2893
    INSN(cpushl,    f428, ff38, CF_ISA_A);
2894
    INSN(wddata,    fb00, ff00, CF_ISA_A);
2895
    INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
2896
#undef INSN
2897
}
2898

    
2899
/* ??? Some of this implementation is not exception safe.  We should always
2900
   write back the result to memory before setting the condition codes.  */
2901
static void disas_m68k_insn(CPUState * env, DisasContext *s)
2902
{
2903
    uint16_t insn;
2904

    
2905
    insn = lduw_code(s->pc);
2906
    s->pc += 2;
2907

    
2908
    opcode_table[insn](s, insn);
2909
}
2910

    
2911
/* generate intermediate code for basic block 'tb'.  */
2912
static inline int
2913
gen_intermediate_code_internal(CPUState *env, TranslationBlock *tb,
2914
                               int search_pc)
2915
{
2916
    DisasContext dc1, *dc = &dc1;
2917
    uint16_t *gen_opc_end;
2918
    int j, lj;
2919
    target_ulong pc_start;
2920
    int pc_offset;
2921
    int last_cc_op;
2922

    
2923
    /* generate intermediate code */
2924
    pc_start = tb->pc;
2925

    
2926
    dc->tb = tb;
2927

    
2928
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2929

    
2930
    dc->env = env;
2931
    dc->is_jmp = DISAS_NEXT;
2932
    dc->pc = pc_start;
2933
    dc->cc_op = CC_OP_DYNAMIC;
2934
    dc->singlestep_enabled = env->singlestep_enabled;
2935
    dc->fpcr = env->fpcr;
2936
    dc->user = (env->sr & SR_S) == 0;
2937
    dc->is_mem = 0;
2938
    dc->mactmp = NULL_QREG;
2939
    lj = -1;
2940
    do {
2941
        pc_offset = dc->pc - pc_start;
2942
        gen_throws_exception = NULL;
2943
        if (env->nb_breakpoints > 0) {
2944
            for(j = 0; j < env->nb_breakpoints; j++) {
2945
                if (env->breakpoints[j] == dc->pc) {
2946
                    gen_exception(dc, dc->pc, EXCP_DEBUG);
2947
                    dc->is_jmp = DISAS_JUMP;
2948
                    break;
2949
                }
2950
            }
2951
            if (dc->is_jmp)
2952
                break;
2953
        }
2954
        if (search_pc) {
2955
            j = gen_opc_ptr - gen_opc_buf;
2956
            if (lj < j) {
2957
                lj++;
2958
                while (lj < j)
2959
                    gen_opc_instr_start[lj++] = 0;
2960
            }
2961
            gen_opc_pc[lj] = dc->pc;
2962
            gen_opc_instr_start[lj] = 1;
2963
        }
2964
        last_cc_op = dc->cc_op;
2965
        dc->insn_pc = dc->pc;
2966
        disas_m68k_insn(env, dc);
2967

    
2968
        /* Terminate the TB on memory ops if watchpoints are present.  */
2969
        /* FIXME: This should be replacd by the deterministic execution
2970
         * IRQ raising bits.  */
2971
        if (dc->is_mem && env->nb_watchpoints)
2972
            break;
2973
    } while (!dc->is_jmp && gen_opc_ptr < gen_opc_end &&
2974
             !env->singlestep_enabled &&
2975
             (pc_offset) < (TARGET_PAGE_SIZE - 32));
2976

    
2977
    if (__builtin_expect(env->singlestep_enabled, 0)) {
2978
        /* Make sure the pc is updated, and raise a debug exception.  */
2979
        if (!dc->is_jmp) {
2980
            gen_flush_cc_op(dc);
2981
            tcg_gen_movi_i32(QREG_PC, dc->pc);
2982
        }
2983
        gen_helper_raise_exception(tcg_const_i32(EXCP_DEBUG));
2984
    } else {
2985
        switch(dc->is_jmp) {
2986
        case DISAS_NEXT:
2987
            gen_flush_cc_op(dc);
2988
            gen_jmp_tb(dc, 0, dc->pc);
2989
            break;
2990
        default:
2991
        case DISAS_JUMP:
2992
        case DISAS_UPDATE:
2993
            gen_flush_cc_op(dc);
2994
            /* indicate that the hash table must be used to find the next TB */
2995
            tcg_gen_exit_tb(0);
2996
            break;
2997
        case DISAS_TB_JUMP:
2998
            /* nothing more to generate */
2999
            break;
3000
        }
3001
    }
3002
    *gen_opc_ptr = INDEX_op_end;
3003

    
3004
#ifdef DEBUG_DISAS
3005
    if (loglevel & CPU_LOG_TB_IN_ASM) {
3006
        fprintf(logfile, "----------------\n");
3007
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
3008
        target_disas(logfile, pc_start, dc->pc - pc_start, 0);
3009
        fprintf(logfile, "\n");
3010
    }
3011
#endif
3012
    if (search_pc) {
3013
        j = gen_opc_ptr - gen_opc_buf;
3014
        lj++;
3015
        while (lj <= j)
3016
            gen_opc_instr_start[lj++] = 0;
3017
    } else {
3018
        tb->size = dc->pc - pc_start;
3019
    }
3020

    
3021
    //optimize_flags();
3022
    //expand_target_qops();
3023
    return 0;
3024
}
3025

    
3026
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
3027
{
3028
    return gen_intermediate_code_internal(env, tb, 0);
3029
}
3030

    
3031
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
3032
{
3033
    return gen_intermediate_code_internal(env, tb, 1);
3034
}
3035

    
3036
void cpu_dump_state(CPUState *env, FILE *f,
3037
                    int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
3038
                    int flags)
3039
{
3040
    int i;
3041
    uint16_t sr;
3042
    CPU_DoubleU u;
3043
    for (i = 0; i < 8; i++)
3044
      {
3045
        u.d = env->fregs[i];
3046
        cpu_fprintf (f, "D%d = %08x   A%d = %08x   F%d = %08x%08x (%12g)\n",
3047
                     i, env->dregs[i], i, env->aregs[i],
3048
                     i, u.l.upper, u.l.lower, *(double *)&u.d);
3049
      }
3050
    cpu_fprintf (f, "PC = %08x   ", env->pc);
3051
    sr = env->sr;
3052
    cpu_fprintf (f, "SR = %04x %c%c%c%c%c ", sr, (sr & 0x10) ? 'X' : '-',
3053
                 (sr & CCF_N) ? 'N' : '-', (sr & CCF_Z) ? 'Z' : '-',
3054
                 (sr & CCF_V) ? 'V' : '-', (sr & CCF_C) ? 'C' : '-');
3055
    cpu_fprintf (f, "FPRESULT = %12g\n", *(double *)&env->fp_result);
3056
}
3057

    
3058
void gen_pc_load(CPUState *env, TranslationBlock *tb,
3059
                unsigned long searched_pc, int pc_pos, void *puc)
3060
{
3061
    env->pc = gen_opc_pc[pc_pos];
3062
}