Statistics
| Branch: | Revision:

root / target-sh4 / translate.c @ c047da1a

History | View | Annotate | Download (51.6 kB)

1
/*
2
 *  SH4 translation
3
 *
4
 *  Copyright (c) 2005 Samuel Tardieu
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <assert.h>
26

    
27
#define DEBUG_DISAS
28
#define SH4_DEBUG_DISAS
29
//#define SH4_SINGLE_STEP
30

    
31
#include "cpu.h"
32
#include "exec-all.h"
33
#include "disas.h"
34
#include "helper.h"
35
#include "tcg-op.h"
36
#include "qemu-common.h"
37

    
38
typedef struct DisasContext {
39
    struct TranslationBlock *tb;
40
    target_ulong pc;
41
    uint32_t sr;
42
    uint32_t fpscr;
43
    uint16_t opcode;
44
    uint32_t flags;
45
    int bstate;
46
    int memidx;
47
    uint32_t delayed_pc;
48
    int singlestep_enabled;
49
} DisasContext;
50

    
51
enum {
52
    BS_NONE     = 0, /* We go out of the TB without reaching a branch or an
53
                      * exception condition
54
                      */
55
    BS_STOP     = 1, /* We want to stop translation for any reason */
56
    BS_BRANCH   = 2, /* We reached a branch condition     */
57
    BS_EXCP     = 3, /* We reached an exception condition */
58
};
59

    
60
/* global register indexes */
61
static TCGv cpu_env;
62
static TCGv cpu_gregs[24];
63
static TCGv cpu_pc, cpu_sr, cpu_ssr, cpu_spc, cpu_gbr;
64
static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl;
65
static TCGv cpu_pr, cpu_fpscr, cpu_fpul, cpu_flags;
66

    
67
/* internal register indexes */
68
static TCGv cpu_flags, cpu_delayed_pc;
69

    
70
/* dyngen register indexes */
71
static TCGv cpu_T[2];
72

    
73
#include "gen-icount.h"
74

    
75
static void sh4_translate_init(void)
76
{
77
    int i;
78
    static int done_init = 0;
79
    static const char * const gregnames[24] = {
80
        "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
81
        "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
82
        "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
83
        "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
84
        "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
85
    };
86

    
87
    if (done_init)
88
        return;
89

    
90
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
91
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_I32, TCG_AREG1, "T0");
92
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_I32, TCG_AREG2, "T1");
93

    
94
    for (i = 0; i < 24; i++)
95
        cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
96
                                          offsetof(CPUState, gregs[i]),
97
                                          gregnames[i]);
98

    
99
    cpu_pc = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
100
                                offsetof(CPUState, pc), "PC");
101
    cpu_sr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
102
                                offsetof(CPUState, sr), "SR");
103
    cpu_ssr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
104
                                 offsetof(CPUState, ssr), "SSR");
105
    cpu_spc = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
106
                                 offsetof(CPUState, spc), "SPC");
107
    cpu_gbr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
108
                                 offsetof(CPUState, gbr), "GBR");
109
    cpu_vbr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
110
                                 offsetof(CPUState, vbr), "VBR");
111
    cpu_sgr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
112
                                 offsetof(CPUState, sgr), "SGR");
113
    cpu_dbr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
114
                                 offsetof(CPUState, dbr), "DBR");
115
    cpu_mach = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
116
                                  offsetof(CPUState, mach), "MACH");
117
    cpu_macl = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
118
                                  offsetof(CPUState, macl), "MACL");
119
    cpu_pr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
120
                                offsetof(CPUState, pr), "PR");
121
    cpu_fpscr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
122
                                   offsetof(CPUState, fpscr), "FPSCR");
123
    cpu_fpul = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
124
                                  offsetof(CPUState, fpul), "FPUL");
125

    
126
    cpu_flags = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
127
                                   offsetof(CPUState, flags), "_flags_");
128
    cpu_delayed_pc = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
129
                                        offsetof(CPUState, delayed_pc),
130
                                        "_delayed_pc_");
131

    
132
    /* register helpers */
133
#undef DEF_HELPER
134
#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
135
#include "helper.h"
136

    
137
    done_init = 1;
138
}
139

    
140
#ifdef CONFIG_USER_ONLY
141

    
142
#define GEN_OP_LD(width, reg) \
143
  void gen_op_ld##width##_T0_##reg (DisasContext *ctx) { \
144
    gen_op_ld##width##_T0_##reg##_raw(); \
145
  }
146
#define GEN_OP_ST(width, reg) \
147
  void gen_op_st##width##_##reg##_T1 (DisasContext *ctx) { \
148
    gen_op_st##width##_##reg##_T1_raw(); \
149
  }
150

    
151
#else
152

    
153
#define GEN_OP_LD(width, reg) \
154
  void gen_op_ld##width##_T0_##reg (DisasContext *ctx) { \
155
    if (ctx->memidx) gen_op_ld##width##_T0_##reg##_kernel(); \
156
    else gen_op_ld##width##_T0_##reg##_user();\
157
  }
158
#define GEN_OP_ST(width, reg) \
159
  void gen_op_st##width##_##reg##_T1 (DisasContext *ctx) { \
160
    if (ctx->memidx) gen_op_st##width##_##reg##_T1_kernel(); \
161
    else gen_op_st##width##_##reg##_T1_user();\
162
  }
163

    
164
#endif
165

    
166
GEN_OP_LD(fl, FT0)
167
GEN_OP_ST(fl, FT0)
168
GEN_OP_LD(fq, DT0)
169
GEN_OP_ST(fq, DT0)
170

    
171
void cpu_dump_state(CPUState * env, FILE * f,
172
                    int (*cpu_fprintf) (FILE * f, const char *fmt, ...),
173
                    int flags)
174
{
175
    int i;
176
    cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
177
                env->pc, env->sr, env->pr, env->fpscr);
178
    cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
179
                env->spc, env->ssr, env->gbr, env->vbr);
180
    cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
181
                env->sgr, env->dbr, env->delayed_pc, env->fpul);
182
    for (i = 0; i < 24; i += 4) {
183
        cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
184
                    i, env->gregs[i], i + 1, env->gregs[i + 1],
185
                    i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]);
186
    }
187
    if (env->flags & DELAY_SLOT) {
188
        cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n",
189
                    env->delayed_pc);
190
    } else if (env->flags & DELAY_SLOT_CONDITIONAL) {
191
        cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n",
192
                    env->delayed_pc);
193
    }
194
}
195

    
196
void cpu_sh4_reset(CPUSH4State * env)
197
{
198
#if defined(CONFIG_USER_ONLY)
199
    env->sr = SR_FD;            /* FD - kernel does lazy fpu context switch */
200
#else
201
    env->sr = 0x700000F0;        /* MD, RB, BL, I3-I0 */
202
#endif
203
    env->vbr = 0;
204
    env->pc = 0xA0000000;
205
#if defined(CONFIG_USER_ONLY)
206
    env->fpscr = FPSCR_PR; /* value for userspace according to the kernel */
207
    set_float_rounding_mode(float_round_nearest_even, &env->fp_status); /* ?! */
208
#else
209
    env->fpscr = 0x00040001; /* CPU reset value according to SH4 manual */
210
    set_float_rounding_mode(float_round_to_zero, &env->fp_status);
211
#endif
212
    env->mmucr = 0;
213
}
214

    
215
CPUSH4State *cpu_sh4_init(const char *cpu_model)
216
{
217
    CPUSH4State *env;
218

    
219
    env = qemu_mallocz(sizeof(CPUSH4State));
220
    if (!env)
221
        return NULL;
222
    cpu_exec_init(env);
223
    sh4_translate_init();
224
    cpu_sh4_reset(env);
225
    tlb_flush(env, 1);
226
    return env;
227
}
228

    
229
static void gen_goto_tb(DisasContext * ctx, int n, target_ulong dest)
230
{
231
    TranslationBlock *tb;
232
    tb = ctx->tb;
233

    
234
    if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
235
        !ctx->singlestep_enabled) {
236
        /* Use a direct jump if in same page and singlestep not enabled */
237
        tcg_gen_goto_tb(n);
238
        tcg_gen_movi_i32(cpu_pc, dest);
239
        tcg_gen_exit_tb((long) tb + n);
240
    } else {
241
        tcg_gen_movi_i32(cpu_pc, dest);
242
        if (ctx->singlestep_enabled)
243
            tcg_gen_helper_0_0(helper_debug);
244
        tcg_gen_exit_tb(0);
245
    }
246
}
247

    
248
static void gen_jump(DisasContext * ctx)
249
{
250
    if (ctx->delayed_pc == (uint32_t) - 1) {
251
        /* Target is not statically known, it comes necessarily from a
252
           delayed jump as immediate jump are conditinal jumps */
253
        tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc);
254
        if (ctx->singlestep_enabled)
255
            tcg_gen_helper_0_0(helper_debug);
256
        tcg_gen_exit_tb(0);
257
    } else {
258
        gen_goto_tb(ctx, 0, ctx->delayed_pc);
259
    }
260
}
261

    
262
static inline void gen_branch_slot(uint32_t delayed_pc, int t)
263
{
264
    int label = gen_new_label();
265
    tcg_gen_movi_i32(cpu_delayed_pc, delayed_pc);
266
    tcg_gen_andi_i32(cpu_T[0], cpu_sr, SR_T);
267
    tcg_gen_brcondi_i32(TCG_COND_NE, cpu_T[0], t ? SR_T : 0, label);
268
    tcg_gen_ori_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
269
    gen_set_label(label);
270
}
271

    
272
/* Immediate conditional jump (bt or bf) */
273
static void gen_conditional_jump(DisasContext * ctx,
274
                                 target_ulong ift, target_ulong ifnott)
275
{
276
    int l1;
277

    
278
    l1 = gen_new_label();
279
    tcg_gen_andi_i32(cpu_T[0], cpu_sr, SR_T);
280
    tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_T[0], SR_T, l1);
281
    gen_goto_tb(ctx, 0, ifnott);
282
    gen_set_label(l1);
283
    gen_goto_tb(ctx, 1, ift);
284
}
285

    
286
/* Delayed conditional jump (bt or bf) */
287
static void gen_delayed_conditional_jump(DisasContext * ctx)
288
{
289
    int l1;
290

    
291
    l1 = gen_new_label();
292
    tcg_gen_andi_i32(cpu_T[0], cpu_flags, DELAY_SLOT_TRUE);
293
    tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_T[0], DELAY_SLOT_TRUE, l1);
294
    gen_goto_tb(ctx, 1, ctx->pc + 2);
295
    gen_set_label(l1);
296
    tcg_gen_andi_i32(cpu_flags, cpu_flags, ~DELAY_SLOT_TRUE);
297
    gen_jump(ctx);
298
}
299

    
300
static inline void gen_set_t(void)
301
{
302
    tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_T);
303
}
304

    
305
static inline void gen_clr_t(void)
306
{
307
    tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
308
}
309

    
310
static inline void gen_cmp(int cond, TCGv t0, TCGv t1)
311
{
312
    int label1 = gen_new_label();
313
    int label2 = gen_new_label();
314
    tcg_gen_brcond_i32(cond, t1, t0, label1);
315
    gen_clr_t();
316
    tcg_gen_br(label2);
317
    gen_set_label(label1);
318
    gen_set_t();
319
    gen_set_label(label2);
320
}
321

    
322
static inline void gen_cmp_imm(int cond, TCGv t0, int32_t imm)
323
{
324
    int label1 = gen_new_label();
325
    int label2 = gen_new_label();
326
    tcg_gen_brcondi_i32(cond, t0, imm, label1);
327
    gen_clr_t();
328
    tcg_gen_br(label2);
329
    gen_set_label(label1);
330
    gen_set_t();
331
    gen_set_label(label2);
332
}
333

    
334
static inline void gen_store_flags(uint32_t flags)
335
{
336
    tcg_gen_andi_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
337
    tcg_gen_ori_i32(cpu_flags, cpu_flags, flags);
338
}
339

    
340
#define B3_0 (ctx->opcode & 0xf)
341
#define B6_4 ((ctx->opcode >> 4) & 0x7)
342
#define B7_4 ((ctx->opcode >> 4) & 0xf)
343
#define B7_0 (ctx->opcode & 0xff)
344
#define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
345
#define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
346
  (ctx->opcode & 0xfff))
347
#define B11_8 ((ctx->opcode >> 8) & 0xf)
348
#define B15_12 ((ctx->opcode >> 12) & 0xf)
349

    
350
#define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
351
                (x) + 16 : (x))
352

    
353
#define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
354
                ? (x) + 16 : (x))
355

    
356
#define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
357
#define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
358
#define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
359
#define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
360

    
361
#define CHECK_NOT_DELAY_SLOT \
362
  if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) \
363
  {tcg_gen_helper_0_0(helper_raise_slot_illegal_instruction); ctx->bstate = BS_EXCP; \
364
   return;}
365

    
366
void _decode_opc(DisasContext * ctx)
367
{
368
#if 0
369
    fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode);
370
#endif
371
    switch (ctx->opcode) {
372
    case 0x0019:                /* div0u */
373
        tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(SR_M | SR_Q | SR_T));
374
        return;
375
    case 0x000b:                /* rts */
376
        CHECK_NOT_DELAY_SLOT
377
        tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr);
378
        ctx->flags |= DELAY_SLOT;
379
        ctx->delayed_pc = (uint32_t) - 1;
380
        return;
381
    case 0x0028:                /* clrmac */
382
        tcg_gen_movi_i32(cpu_mach, 0);
383
        tcg_gen_movi_i32(cpu_macl, 0);
384
        return;
385
    case 0x0048:                /* clrs */
386
        tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_S);
387
        return;
388
    case 0x0008:                /* clrt */
389
        gen_clr_t();
390
        return;
391
    case 0x0038:                /* ldtlb */
392
#if defined(CONFIG_USER_ONLY)
393
        assert(0);                /* XXXXX */
394
#else
395
        tcg_gen_helper_0_0(helper_ldtlb);
396
#endif
397
        return;
398
    case 0x002b:                /* rte */
399
        CHECK_NOT_DELAY_SLOT
400
        tcg_gen_mov_i32(cpu_sr, cpu_ssr);
401
        tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc);
402
        ctx->flags |= DELAY_SLOT;
403
        ctx->delayed_pc = (uint32_t) - 1;
404
        return;
405
    case 0x0058:                /* sets */
406
        tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_S);
407
        return;
408
    case 0x0018:                /* sett */
409
        gen_set_t();
410
        return;
411
    case 0xfbfd:                /* frchg */
412
        tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR);
413
        ctx->bstate = BS_STOP;
414
        return;
415
    case 0xf3fd:                /* fschg */
416
        tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ);
417
        ctx->bstate = BS_STOP;
418
        return;
419
    case 0x0009:                /* nop */
420
        return;
421
    case 0x001b:                /* sleep */
422
        if (ctx->memidx) {
423
                tcg_gen_helper_0_0(helper_sleep);
424
        } else {
425
                tcg_gen_helper_0_0(helper_raise_illegal_instruction);
426
                ctx->bstate = BS_EXCP;
427
        }
428
        return;
429
    }
430

    
431
    switch (ctx->opcode & 0xf000) {
432
    case 0x1000:                /* mov.l Rm,@(disp,Rn) */
433
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
434
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
435
        tcg_gen_addi_i32(cpu_T[1], cpu_T[1], B3_0 * 4);
436
        tcg_gen_qemu_st32(cpu_T[0], cpu_T[1], ctx->memidx);
437
        return;
438
    case 0x5000:                /* mov.l @(disp,Rm),Rn */
439
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
440
        tcg_gen_addi_i32(cpu_T[0], cpu_T[0], B3_0 * 4);
441
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
442
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
443
        return;
444
    case 0xe000:                /* mov #imm,Rn */
445
        tcg_gen_movi_i32(cpu_gregs[REG(B11_8)], B7_0s);
446
        return;
447
    case 0x9000:                /* mov.w @(disp,PC),Rn */
448
        tcg_gen_movi_i32(cpu_T[0], ctx->pc + 4 + B7_0 * 2);
449
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_T[0], ctx->memidx);
450
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
451
        return;
452
    case 0xd000:                /* mov.l @(disp,PC),Rn */
453
        tcg_gen_movi_i32(cpu_T[0], (ctx->pc + 4 + B7_0 * 4) & ~3);
454
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
455
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
456
        return;
457
    case 0x7000:                /* add #imm,Rn */
458
        tcg_gen_addi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], B7_0s);
459
        return;
460
    case 0xa000:                /* bra disp */
461
        CHECK_NOT_DELAY_SLOT
462
        ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
463
        tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
464
        ctx->flags |= DELAY_SLOT;
465
        return;
466
    case 0xb000:                /* bsr disp */
467
        CHECK_NOT_DELAY_SLOT
468
        tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
469
        ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
470
        tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
471
        ctx->flags |= DELAY_SLOT;
472
        return;
473
    }
474

    
475
    switch (ctx->opcode & 0xf00f) {
476
    case 0x6003:                /* mov Rm,Rn */
477
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
478
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
479
        return;
480
    case 0x2000:                /* mov.b Rm,@Rn */
481
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
482
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
483
        tcg_gen_qemu_st8(cpu_T[0], cpu_T[1], ctx->memidx);
484
        return;
485
    case 0x2001:                /* mov.w Rm,@Rn */
486
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
487
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
488
        tcg_gen_qemu_st16(cpu_T[0], cpu_T[1], ctx->memidx);
489
        return;
490
    case 0x2002:                /* mov.l Rm,@Rn */
491
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
492
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
493
        tcg_gen_qemu_st32(cpu_T[0], cpu_T[1], ctx->memidx);
494
        return;
495
    case 0x6000:                /* mov.b @Rm,Rn */
496
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
497
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_T[0], ctx->memidx);
498
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
499
        return;
500
    case 0x6001:                /* mov.w @Rm,Rn */
501
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
502
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_T[0], ctx->memidx);
503
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
504
        return;
505
    case 0x6002:                /* mov.l @Rm,Rn */
506
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
507
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
508
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
509
        return;
510
    case 0x2004:                /* mov.b Rm,@-Rn */
511
        tcg_gen_subi_i32(cpu_T[1], cpu_gregs[REG(B11_8)], 1);
512
        tcg_gen_qemu_st8(cpu_gregs[REG(B7_4)], cpu_T[1], ctx->memidx);        /* might cause re-execution */
513
        tcg_gen_subi_i32(cpu_gregs[REG(B11_8)],
514
                         cpu_gregs[REG(B11_8)], 1);                        /* modify register status */
515
        return;
516
    case 0x2005:                /* mov.w Rm,@-Rn */
517
        tcg_gen_subi_i32(cpu_T[1], cpu_gregs[REG(B11_8)], 2);
518
        tcg_gen_qemu_st16(cpu_gregs[REG(B7_4)], cpu_T[1], ctx->memidx);
519
        tcg_gen_subi_i32(cpu_gregs[REG(B11_8)],
520
                         cpu_gregs[REG(B11_8)], 2);
521
        return;
522
    case 0x2006:                /* mov.l Rm,@-Rn */
523
        tcg_gen_subi_i32(cpu_T[1], cpu_gregs[REG(B11_8)], 4);
524
        tcg_gen_qemu_st32(cpu_gregs[REG(B7_4)], cpu_T[1], ctx->memidx);
525
        tcg_gen_subi_i32(cpu_gregs[REG(B11_8)],
526
                         cpu_gregs[REG(B11_8)], 4);
527
        return;
528
    case 0x6004:                /* mov.b @Rm+,Rn */
529
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
530
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_T[0], ctx->memidx);
531
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
532
        if ( B11_8 != B7_4 )
533
                tcg_gen_addi_i32(cpu_gregs[REG(B7_4)],
534
                                 cpu_gregs[REG(B7_4)], 1);
535
        return;
536
    case 0x6005:                /* mov.w @Rm+,Rn */
537
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
538
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_T[0], ctx->memidx);
539
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
540
        if ( B11_8 != B7_4 )
541
                tcg_gen_addi_i32(cpu_gregs[REG(B7_4)],
542
                                 cpu_gregs[REG(B7_4)], 2);
543
        return;
544
    case 0x6006:                /* mov.l @Rm+,Rn */
545
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
546
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
547
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
548
        if ( B11_8 != B7_4 )
549
                tcg_gen_addi_i32(cpu_gregs[REG(B7_4)],
550
                                 cpu_gregs[REG(B7_4)], 4);
551
        return;
552
    case 0x0004:                /* mov.b Rm,@(R0,Rn) */
553
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
554
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
555
        tcg_gen_add_i32(cpu_T[1], cpu_T[1], cpu_gregs[REG(0)]);
556
        tcg_gen_qemu_st8(cpu_T[0], cpu_T[1], ctx->memidx);
557
        return;
558
    case 0x0005:                /* mov.w Rm,@(R0,Rn) */
559
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
560
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
561
        tcg_gen_add_i32(cpu_T[1], cpu_T[1], cpu_gregs[REG(0)]);
562
        tcg_gen_qemu_st16(cpu_T[0], cpu_T[1], ctx->memidx);
563
        return;
564
    case 0x0006:                /* mov.l Rm,@(R0,Rn) */
565
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
566
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
567
        tcg_gen_add_i32(cpu_T[1], cpu_T[1], cpu_gregs[REG(0)]);
568
        tcg_gen_qemu_st32(cpu_T[0], cpu_T[1], ctx->memidx);
569
        return;
570
    case 0x000c:                /* mov.b @(R0,Rm),Rn */
571
        tcg_gen_add_i32(cpu_T[0], cpu_gregs[REG(B7_4)], cpu_gregs[REG(0)]);
572
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_T[0], ctx->memidx);
573
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
574
        return;
575
    case 0x000d:                /* mov.w @(R0,Rm),Rn */
576
        tcg_gen_add_i32(cpu_T[0], cpu_gregs[REG(B7_4)], cpu_gregs[REG(0)]);
577
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_T[0], ctx->memidx);
578
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
579
        return;
580
    case 0x000e:                /* mov.l @(R0,Rm),Rn */
581
        tcg_gen_add_i32(cpu_T[0], cpu_gregs[REG(B7_4)], cpu_gregs[REG(0)]);
582
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
583
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
584
        return;
585
    case 0x6008:                /* swap.b Rm,Rn */
586
        tcg_gen_andi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)], 0xffff0000);
587
        tcg_gen_andi_i32(cpu_T[0], cpu_gregs[REG(B7_4)], 0xff);
588
        tcg_gen_shli_i32(cpu_T[0], cpu_T[0], 8);
589
        tcg_gen_or_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], cpu_T[0]);
590
        tcg_gen_shri_i32(cpu_T[0], cpu_gregs[REG(B7_4)], 8);
591
        tcg_gen_andi_i32(cpu_T[0], cpu_T[0], 0xff);
592
        tcg_gen_or_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], cpu_T[0]);
593
        return;
594
    case 0x6009:                /* swap.w Rm,Rn */
595
        tcg_gen_andi_i32(cpu_T[0], cpu_gregs[REG(B7_4)], 0xffff);
596
        tcg_gen_shli_i32(cpu_T[0], cpu_T[0], 16);
597
        tcg_gen_shri_i32(cpu_T[1], cpu_gregs[REG(B7_4)], 16);
598
        tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0xffff);
599
        tcg_gen_or_i32(cpu_gregs[REG(B11_8)], cpu_T[0], cpu_T[1]);
600
        return;
601
    case 0x200d:                /* xtrct Rm,Rn */
602
        tcg_gen_andi_i32(cpu_T[0], cpu_gregs[REG(B7_4)], 0xffff);
603
        tcg_gen_shli_i32(cpu_T[0], cpu_T[0], 16);
604
        tcg_gen_shri_i32(cpu_T[1], cpu_gregs[REG(B11_8)], 16);
605
        tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0xffff);
606
        tcg_gen_or_i32(cpu_gregs[REG(B11_8)], cpu_T[0], cpu_T[1]);
607
        return;
608
    case 0x300c:                /* add Rm,Rn */
609
        tcg_gen_add_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)]);
610
        return;
611
    case 0x300e:                /* addc Rm,Rn */
612
        tcg_gen_helper_1_2(helper_addc, cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)], cpu_gregs[REG(B11_8)]);
613
        return;
614
    case 0x300f:                /* addv Rm,Rn */
615
        tcg_gen_helper_1_2(helper_addv, cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)], cpu_gregs[REG(B11_8)]);
616
        return;
617
    case 0x2009:                /* and Rm,Rn */
618
        tcg_gen_and_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)]);
619
        return;
620
    case 0x3000:                /* cmp/eq Rm,Rn */
621
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
622
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
623
        gen_cmp(TCG_COND_EQ, cpu_T[0], cpu_T[1]);
624
        return;
625
    case 0x3003:                /* cmp/ge Rm,Rn */
626
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
627
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
628
        gen_cmp(TCG_COND_GE, cpu_T[0], cpu_T[1]);
629
        return;
630
    case 0x3007:                /* cmp/gt Rm,Rn */
631
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
632
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
633
        gen_cmp(TCG_COND_GT, cpu_T[0], cpu_T[1]);
634
        return;
635
    case 0x3006:                /* cmp/hi Rm,Rn */
636
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
637
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
638
        gen_cmp(TCG_COND_GTU, cpu_T[0], cpu_T[1]);
639
        return;
640
    case 0x3002:                /* cmp/hs Rm,Rn */
641
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
642
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
643
        gen_cmp(TCG_COND_GEU, cpu_T[0], cpu_T[1]);
644
        return;
645
    case 0x200c:                /* cmp/str Rm,Rn */
646
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
647
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
648
        gen_op_cmp_str_T0_T1();
649
        return;
650
    case 0x2007:                /* div0s Rm,Rn */
651
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
652
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
653
        gen_op_div0s_T0_T1();
654
        return;
655
    case 0x3004:                /* div1 Rm,Rn */
656
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
657
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
658
        gen_op_div1_T0_T1();
659
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[1]);
660
        return;
661
    case 0x300d:                /* dmuls.l Rm,Rn */
662
        {
663
            TCGv tmp1 = tcg_temp_new(TCG_TYPE_I64);
664
            TCGv tmp2 = tcg_temp_new(TCG_TYPE_I64);
665

    
666
            tcg_gen_ext_i32_i64(tmp1, cpu_gregs[REG(B7_4)]);
667
            tcg_gen_ext_i32_i64(tmp2, cpu_gregs[REG(B11_8)]);
668
            tcg_gen_mul_i64(tmp1, tmp1, tmp2);
669
            tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
670
            tcg_gen_shri_i64(tmp1, tmp1, 32);
671
            tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
672

    
673
            tcg_temp_free(tmp1);
674
            tcg_temp_free(tmp2);
675
        }
676
        return;
677
    case 0x3005:                /* dmulu.l Rm,Rn */
678
        {
679
            TCGv tmp1 = tcg_temp_new(TCG_TYPE_I64);
680
            TCGv tmp2 = tcg_temp_new(TCG_TYPE_I64);
681

    
682
            tcg_gen_extu_i32_i64(tmp1, cpu_gregs[REG(B7_4)]);
683
            tcg_gen_extu_i32_i64(tmp2, cpu_gregs[REG(B11_8)]);
684
            tcg_gen_mul_i64(tmp1, tmp1, tmp2);
685
            tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
686
            tcg_gen_shri_i64(tmp1, tmp1, 32);
687
            tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
688

    
689
            tcg_temp_free(tmp1);
690
            tcg_temp_free(tmp2);
691
        }
692
        return;
693
    case 0x600e:                /* exts.b Rm,Rn */
694
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
695
        tcg_gen_andi_i32(cpu_T[0], cpu_T[0], 0xff);
696
        tcg_gen_ext8s_i32(cpu_T[0], cpu_T[0]);
697
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
698
        return;
699
    case 0x600f:                /* exts.w Rm,Rn */
700
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
701
        tcg_gen_andi_i32(cpu_T[0], cpu_T[0], 0xffff);
702
        tcg_gen_ext16s_i32(cpu_T[0], cpu_T[0]);
703
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
704
        return;
705
    case 0x600c:                /* extu.b Rm,Rn */
706
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
707
        tcg_gen_andi_i32(cpu_T[0], cpu_T[0], 0xff);
708
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
709
        return;
710
    case 0x600d:                /* extu.w Rm,Rn */
711
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
712
        tcg_gen_andi_i32(cpu_T[0], cpu_T[0], 0xffff);
713
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
714
        return;
715
    case 0x000f:                /* mac.l @Rm+,@Rn+ */
716
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
717
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
718
        tcg_gen_mov_i32(cpu_T[1], cpu_T[0]);
719
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
720
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
721
        tcg_gen_helper_0_2(helper_macl, cpu_T[0], cpu_T[1]);
722
        tcg_gen_addi_i32(cpu_gregs[REG(B7_4)], cpu_gregs[REG(B7_4)], 4);
723
        tcg_gen_addi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
724
        return;
725
    case 0x400f:                /* mac.w @Rm+,@Rn+ */
726
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
727
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
728
        tcg_gen_mov_i32(cpu_T[1], cpu_T[0]);
729
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
730
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
731
        tcg_gen_helper_0_2(helper_macw, cpu_T[0], cpu_T[1]);
732
        tcg_gen_addi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 2);
733
        tcg_gen_addi_i32(cpu_gregs[REG(B7_4)], cpu_gregs[REG(B7_4)], 2);
734
        return;
735
    case 0x0007:                /* mul.l Rm,Rn */
736
        tcg_gen_mul_i32(cpu_macl, cpu_gregs[REG(B7_4)], cpu_gregs[REG(B11_8)]);
737
        return;
738
    case 0x200f:                /* muls.w Rm,Rn */
739
        tcg_gen_ext16s_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
740
        tcg_gen_ext16s_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
741
        tcg_gen_mul_i32(cpu_macl, cpu_T[0], cpu_T[1]);
742
        return;
743
    case 0x200e:                /* mulu.w Rm,Rn */
744
        tcg_gen_ext16u_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
745
        tcg_gen_ext16u_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
746
        tcg_gen_mul_i32(cpu_macl, cpu_T[0], cpu_T[1]);
747
        return;
748
    case 0x600b:                /* neg Rm,Rn */
749
        tcg_gen_neg_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)]);
750
        return;
751
    case 0x600a:                /* negc Rm,Rn */
752
        tcg_gen_helper_1_1(helper_negc, cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)]);
753
        return;
754
    case 0x6007:                /* not Rm,Rn */
755
        tcg_gen_not_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)]);
756
        return;
757
    case 0x200b:                /* or Rm,Rn */
758
        tcg_gen_or_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)]);
759
        return;
760
    case 0x400c:                /* shad Rm,Rn */
761
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
762
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
763
        gen_op_shad_T0_T1();
764
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[1]);
765
        return;
766
    case 0x400d:                /* shld Rm,Rn */
767
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
768
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
769
        gen_op_shld_T0_T1();
770
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[1]);
771
        return;
772
    case 0x3008:                /* sub Rm,Rn */
773
        tcg_gen_sub_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)]);
774
        return;
775
    case 0x300a:                /* subc Rm,Rn */
776
        tcg_gen_helper_1_2(helper_subc, cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)], cpu_gregs[REG(B11_8)]);
777
        return;
778
    case 0x300b:                /* subv Rm,Rn */
779
        tcg_gen_helper_1_2(helper_subv, cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)], cpu_gregs[REG(B11_8)]);
780
        return;
781
    case 0x2008:                /* tst Rm,Rn */
782
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
783
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
784
        tcg_gen_and_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
785
        gen_cmp_imm(TCG_COND_EQ, cpu_T[0], 0);
786
        return;
787
    case 0x200a:                /* xor Rm,Rn */
788
        tcg_gen_xor_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], cpu_gregs[REG(B7_4)]);
789
        return;
790
    case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
791
        if (ctx->fpscr & FPSCR_SZ) {
792
            gen_op_fmov_drN_DT0(XREG(B7_4));
793
            gen_op_fmov_DT0_drN(XREG(B11_8));
794
        } else {
795
            gen_op_fmov_frN_FT0(FREG(B7_4));
796
            gen_op_fmov_FT0_frN(FREG(B11_8));
797
        }
798
        return;
799
    case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
800
        if (ctx->fpscr & FPSCR_SZ) {
801
            gen_op_fmov_drN_DT0(XREG(B7_4));
802
            tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
803
            gen_op_stfq_DT0_T1(ctx);
804
        } else {
805
            gen_op_fmov_frN_FT0(FREG(B7_4));
806
            tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
807
            gen_op_stfl_FT0_T1(ctx);
808
        }
809
        return;
810
    case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
811
        if (ctx->fpscr & FPSCR_SZ) {
812
            tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
813
            gen_op_ldfq_T0_DT0(ctx);
814
            gen_op_fmov_DT0_drN(XREG(B11_8));
815
        } else {
816
            tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
817
            gen_op_ldfl_T0_FT0(ctx);
818
            gen_op_fmov_FT0_frN(FREG(B11_8));
819
        }
820
        return;
821
    case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
822
        if (ctx->fpscr & FPSCR_SZ) {
823
            tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
824
            gen_op_ldfq_T0_DT0(ctx);
825
            gen_op_fmov_DT0_drN(XREG(B11_8));
826
            tcg_gen_addi_i32(cpu_gregs[REG(B7_4)],
827
                             cpu_gregs[REG(B7_4)], 8);
828
        } else {
829
            tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
830
            gen_op_ldfl_T0_FT0(ctx);
831
            gen_op_fmov_FT0_frN(FREG(B11_8));
832
            tcg_gen_addi_i32(cpu_gregs[REG(B7_4)],
833
                             cpu_gregs[REG(B7_4)], 4);
834
        }
835
        return;
836
    case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
837
        if (ctx->fpscr & FPSCR_SZ) {
838
            tcg_gen_subi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 8);
839
            gen_op_fmov_drN_DT0(XREG(B7_4));
840
            tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
841
            tcg_gen_addi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 8);
842
            gen_op_stfq_DT0_T1(ctx);
843
            tcg_gen_subi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 8);
844
        } else {
845
            tcg_gen_subi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
846
            gen_op_fmov_frN_FT0(FREG(B7_4));
847
            tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
848
            tcg_gen_addi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
849
            gen_op_stfl_FT0_T1(ctx);
850
            tcg_gen_subi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
851
        }
852
        return;
853
    case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
854
        tcg_gen_add_i32(cpu_T[0], cpu_gregs[REG(B7_4)], cpu_gregs[REG(0)]);
855
        if (ctx->fpscr & FPSCR_SZ) {
856
            gen_op_ldfq_T0_DT0(ctx);
857
            gen_op_fmov_DT0_drN(XREG(B11_8));
858
        } else {
859
            gen_op_ldfl_T0_FT0(ctx);
860
            gen_op_fmov_FT0_frN(FREG(B11_8));
861
        }
862
        return;
863
    case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
864
        if (ctx->fpscr & FPSCR_SZ) {
865
            gen_op_fmov_drN_DT0(XREG(B7_4));
866
            tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
867
            tcg_gen_add_i32(cpu_T[1], cpu_T[1], cpu_gregs[REG(0)]);
868
            gen_op_stfq_DT0_T1(ctx);
869
        } else {
870
            gen_op_fmov_frN_FT0(FREG(B7_4));
871
            tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
872
            tcg_gen_add_i32(cpu_T[1], cpu_T[1], cpu_gregs[REG(0)]);
873
            gen_op_stfl_FT0_T1(ctx);
874
        }
875
        return;
876
    case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
877
    case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
878
    case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
879
    case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
880
    case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
881
    case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
882
        if (ctx->fpscr & FPSCR_PR) {
883
            if (ctx->opcode & 0x0110)
884
                break; /* illegal instruction */
885
            gen_op_fmov_drN_DT1(DREG(B7_4));
886
            gen_op_fmov_drN_DT0(DREG(B11_8));
887
        }
888
        else {
889
            gen_op_fmov_frN_FT1(FREG(B7_4));
890
            gen_op_fmov_frN_FT0(FREG(B11_8));
891
        }
892

    
893
        switch (ctx->opcode & 0xf00f) {
894
        case 0xf000:                /* fadd Rm,Rn */
895
            ctx->fpscr & FPSCR_PR ? gen_op_fadd_DT() : gen_op_fadd_FT();
896
            break;
897
        case 0xf001:                /* fsub Rm,Rn */
898
            ctx->fpscr & FPSCR_PR ? gen_op_fsub_DT() : gen_op_fsub_FT();
899
            break;
900
        case 0xf002:                /* fmul Rm,Rn */
901
            ctx->fpscr & FPSCR_PR ? gen_op_fmul_DT() : gen_op_fmul_FT();
902
            break;
903
        case 0xf003:                /* fdiv Rm,Rn */
904
            ctx->fpscr & FPSCR_PR ? gen_op_fdiv_DT() : gen_op_fdiv_FT();
905
            break;
906
        case 0xf004:                /* fcmp/eq Rm,Rn */
907
            ctx->fpscr & FPSCR_PR ? gen_op_fcmp_eq_DT() : gen_op_fcmp_eq_FT();
908
            return;
909
        case 0xf005:                /* fcmp/gt Rm,Rn */
910
            ctx->fpscr & FPSCR_PR ? gen_op_fcmp_gt_DT() : gen_op_fcmp_gt_FT();
911
            return;
912
        }
913

    
914
        if (ctx->fpscr & FPSCR_PR) {
915
            gen_op_fmov_DT0_drN(DREG(B11_8));
916
        }
917
        else {
918
            gen_op_fmov_FT0_frN(FREG(B11_8));
919
        }
920
        return;
921
    }
922

    
923
    switch (ctx->opcode & 0xff00) {
924
    case 0xc900:                /* and #imm,R0 */
925
        tcg_gen_andi_i32(cpu_gregs[REG(0)], cpu_gregs[REG(0)], B7_0);
926
        return;
927
    case 0xcd00:                /* and.b #imm,@(R0,GBR) */
928
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(0)]);
929
        tcg_gen_add_i32(cpu_T[0], cpu_T[0], cpu_gbr);
930
        tcg_gen_mov_i32(cpu_T[1], cpu_T[0]);
931
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_T[0], ctx->memidx);
932
        tcg_gen_andi_i32(cpu_T[0], cpu_T[0], B7_0);
933
        tcg_gen_qemu_st8(cpu_T[0], cpu_T[1], ctx->memidx);
934
        return;
935
    case 0x8b00:                /* bf label */
936
        CHECK_NOT_DELAY_SLOT
937
            gen_conditional_jump(ctx, ctx->pc + 2,
938
                                 ctx->pc + 4 + B7_0s * 2);
939
        ctx->bstate = BS_BRANCH;
940
        return;
941
    case 0x8f00:                /* bf/s label */
942
        CHECK_NOT_DELAY_SLOT
943
        gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 0);
944
        ctx->flags |= DELAY_SLOT_CONDITIONAL;
945
        return;
946
    case 0x8900:                /* bt label */
947
        CHECK_NOT_DELAY_SLOT
948
            gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2,
949
                                 ctx->pc + 2);
950
        ctx->bstate = BS_BRANCH;
951
        return;
952
    case 0x8d00:                /* bt/s label */
953
        CHECK_NOT_DELAY_SLOT
954
        gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 1);
955
        ctx->flags |= DELAY_SLOT_CONDITIONAL;
956
        return;
957
    case 0x8800:                /* cmp/eq #imm,R0 */
958
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(0)]);
959
        gen_cmp_imm(TCG_COND_EQ, cpu_T[0], B7_0s);
960
        return;
961
    case 0xc400:                /* mov.b @(disp,GBR),R0 */
962
        tcg_gen_addi_i32(cpu_T[0], cpu_gbr, B7_0);
963
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_T[0], ctx->memidx);
964
        tcg_gen_mov_i32(cpu_gregs[REG(0)], cpu_T[0]);
965
        return;
966
    case 0xc500:                /* mov.w @(disp,GBR),R0 */
967
        tcg_gen_addi_i32(cpu_T[0], cpu_gbr, B7_0 * 2);
968
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_T[0], ctx->memidx);
969
        tcg_gen_mov_i32(cpu_gregs[REG(0)], cpu_T[0]);
970
        return;
971
    case 0xc600:                /* mov.l @(disp,GBR),R0 */
972
        tcg_gen_addi_i32(cpu_T[0], cpu_gbr, B7_0 * 4);
973
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
974
        tcg_gen_mov_i32(cpu_gregs[REG(0)], cpu_T[0]);
975
        return;
976
    case 0xc000:                /* mov.b R0,@(disp,GBR) */
977
        tcg_gen_addi_i32(cpu_T[0], cpu_gbr, B7_0);
978
        tcg_gen_mov_i32(cpu_T[1], cpu_T[0]);
979
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(0)]);
980
        tcg_gen_qemu_st8(cpu_T[0], cpu_T[1], ctx->memidx);
981
        return;
982
    case 0xc100:                /* mov.w R0,@(disp,GBR) */
983
        tcg_gen_addi_i32(cpu_T[0], cpu_gbr, B7_0 * 2);
984
        tcg_gen_mov_i32(cpu_T[1], cpu_T[0]);
985
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(0)]);
986
        tcg_gen_qemu_st16(cpu_T[0], cpu_T[1], ctx->memidx);
987
        return;
988
    case 0xc200:                /* mov.l R0,@(disp,GBR) */
989
        tcg_gen_addi_i32(cpu_T[0], cpu_gbr, B7_0 * 4);
990
        tcg_gen_mov_i32(cpu_T[1], cpu_T[0]);
991
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(0)]);
992
        tcg_gen_qemu_st32(cpu_T[0], cpu_T[1], ctx->memidx);
993
        return;
994
    case 0x8000:                /* mov.b R0,@(disp,Rn) */
995
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(0)]);
996
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B7_4)]);
997
        tcg_gen_addi_i32(cpu_T[1], cpu_T[1], B3_0);
998
        tcg_gen_qemu_st8(cpu_T[0], cpu_T[1], ctx->memidx);
999
        return;
1000
    case 0x8100:                /* mov.w R0,@(disp,Rn) */
1001
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(0)]);
1002
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B7_4)]);
1003
        tcg_gen_addi_i32(cpu_T[1], cpu_T[1], B3_0 * 2);
1004
        tcg_gen_qemu_st16(cpu_T[0], cpu_T[1], ctx->memidx);
1005
        return;
1006
    case 0x8400:                /* mov.b @(disp,Rn),R0 */
1007
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
1008
        tcg_gen_addi_i32(cpu_T[0], cpu_T[0], B3_0);
1009
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_T[0], ctx->memidx);
1010
        tcg_gen_mov_i32(cpu_gregs[REG(0)], cpu_T[0]);
1011
        return;
1012
    case 0x8500:                /* mov.w @(disp,Rn),R0 */
1013
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B7_4)]);
1014
        tcg_gen_addi_i32(cpu_T[0], cpu_T[0], B3_0 * 2);
1015
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_T[0], ctx->memidx);
1016
        tcg_gen_mov_i32(cpu_gregs[REG(0)], cpu_T[0]);
1017
        return;
1018
    case 0xc700:                /* mova @(disp,PC),R0 */
1019
        tcg_gen_movi_i32(cpu_gregs[REG(0)],
1020
                         ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3);
1021
        return;
1022
    case 0xcb00:                /* or #imm,R0 */
1023
        tcg_gen_ori_i32(cpu_gregs[REG(0)], cpu_gregs[REG(0)], B7_0);
1024
        return;
1025
    case 0xcf00:                /* or.b #imm,@(R0,GBR) */
1026
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(0)]);
1027
        tcg_gen_add_i32(cpu_T[0], cpu_T[0], cpu_gbr);
1028
        tcg_gen_mov_i32(cpu_T[0], cpu_T[1]);
1029
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_T[0], ctx->memidx);
1030
        tcg_gen_ori_i32(cpu_T[0], cpu_T[0], B7_0);
1031
        tcg_gen_qemu_st8(cpu_T[0], cpu_T[1], ctx->memidx);
1032
        return;
1033
    case 0xc300:                /* trapa #imm */
1034
        CHECK_NOT_DELAY_SLOT
1035
        tcg_gen_movi_i32(cpu_pc, ctx->pc);
1036
        tcg_gen_movi_i32(cpu_T[0], B7_0);
1037
        tcg_gen_helper_0_1(helper_trapa, cpu_T[0]);
1038
        ctx->bstate = BS_BRANCH;
1039
        return;
1040
    case 0xc800:                /* tst #imm,R0 */
1041
        tcg_gen_andi_i32(cpu_T[0], cpu_gregs[REG(0)], B7_0);
1042
        gen_cmp_imm(TCG_COND_EQ, cpu_T[0], 0);
1043
        return;
1044
    case 0xcc00:                /* tst.b #imm,@(R0,GBR) */
1045
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(0)]);
1046
        tcg_gen_add_i32(cpu_T[0], cpu_T[0], cpu_gbr);
1047
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_T[0], ctx->memidx);
1048
        tcg_gen_andi_i32(cpu_T[0], cpu_T[0], B7_0);
1049
        gen_cmp_imm(TCG_COND_EQ, cpu_T[0], 0);
1050
        return;
1051
    case 0xca00:                /* xor #imm,R0 */
1052
        tcg_gen_xori_i32(cpu_gregs[REG(0)], cpu_gregs[REG(0)], B7_0);
1053
        return;
1054
    case 0xce00:                /* xor.b #imm,@(R0,GBR) */
1055
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(0)]);
1056
        tcg_gen_add_i32(cpu_T[0], cpu_T[0], cpu_gbr);
1057
        tcg_gen_mov_i32(cpu_T[1], cpu_T[0]);
1058
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_T[0], ctx->memidx);
1059
        tcg_gen_xori_i32(cpu_T[0], cpu_T[0], B7_0);
1060
        tcg_gen_qemu_st8(cpu_T[0], cpu_T[1], ctx->memidx);
1061
        return;
1062
    }
1063

    
1064
    switch (ctx->opcode & 0xf08f) {
1065
    case 0x408e:                /* ldc Rm,Rn_BANK */
1066
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1067
        tcg_gen_mov_i32(cpu_gregs[ALTREG(B6_4)], cpu_T[0]);
1068
        return;
1069
    case 0x4087:                /* ldc.l @Rm+,Rn_BANK */
1070
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1071
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
1072
        tcg_gen_mov_i32(cpu_gregs[ALTREG(B6_4)], cpu_T[0]);
1073
        tcg_gen_addi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
1074
        return;
1075
    case 0x0082:                /* stc Rm_BANK,Rn */
1076
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[ALTREG(B6_4)]);
1077
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
1078
        return;
1079
    case 0x4083:                /* stc.l Rm_BANK,@-Rn */
1080
        tcg_gen_subi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
1081
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
1082
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[ALTREG(B6_4)]);
1083
        tcg_gen_addi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
1084
        tcg_gen_qemu_st32(cpu_T[0], cpu_T[1], ctx->memidx);
1085
        tcg_gen_subi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
1086
        return;
1087
    }
1088

    
1089
    switch (ctx->opcode & 0xf0ff) {
1090
    case 0x0023:                /* braf Rn */
1091
        CHECK_NOT_DELAY_SLOT tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1092
        tcg_gen_addi_i32(cpu_delayed_pc, cpu_T[0], ctx->pc + 4);
1093
        ctx->flags |= DELAY_SLOT;
1094
        ctx->delayed_pc = (uint32_t) - 1;
1095
        return;
1096
    case 0x0003:                /* bsrf Rn */
1097
        CHECK_NOT_DELAY_SLOT tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1098
        tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1099
        tcg_gen_add_i32(cpu_delayed_pc, cpu_T[0], cpu_pr);
1100
        ctx->flags |= DELAY_SLOT;
1101
        ctx->delayed_pc = (uint32_t) - 1;
1102
        return;
1103
    case 0x4015:                /* cmp/pl Rn */
1104
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1105
        gen_cmp_imm(TCG_COND_GT, cpu_T[0], 0);
1106
        return;
1107
    case 0x4011:                /* cmp/pz Rn */
1108
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1109
        gen_cmp_imm(TCG_COND_GE, cpu_T[0], 0);
1110
        return;
1111
    case 0x4010:                /* dt Rn */
1112
        tcg_gen_subi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 1);
1113
        gen_cmp_imm(TCG_COND_EQ, cpu_gregs[REG(B11_8)], 0);
1114
        return;
1115
    case 0x402b:                /* jmp @Rn */
1116
        CHECK_NOT_DELAY_SLOT tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1117
        tcg_gen_mov_i32(cpu_delayed_pc, cpu_T[0]);
1118
        ctx->flags |= DELAY_SLOT;
1119
        ctx->delayed_pc = (uint32_t) - 1;
1120
        return;
1121
    case 0x400b:                /* jsr @Rn */
1122
        CHECK_NOT_DELAY_SLOT tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1123
        tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1124
        tcg_gen_mov_i32(cpu_delayed_pc, cpu_T[0]);
1125
        ctx->flags |= DELAY_SLOT;
1126
        ctx->delayed_pc = (uint32_t) - 1;
1127
        return;
1128
    case 0x400e:                /* lds Rm,SR */
1129
        tcg_gen_andi_i32(cpu_sr, cpu_gregs[REG(B11_8)], 0x700083f3);
1130
        ctx->bstate = BS_STOP;
1131
        return;
1132
    case 0x4007:                /* lds.l @Rm+,SR */
1133
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_gregs[REG(B11_8)], ctx->memidx);
1134
        tcg_gen_addi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
1135
        tcg_gen_andi_i32(cpu_sr, cpu_T[0], 0x700083f3);
1136
        ctx->bstate = BS_STOP;
1137
        return;
1138
    case 0x0002:                /* sts SR,Rn */
1139
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_sr);
1140
        return;
1141
    case 0x4003:                /* sts SR,@-Rn */
1142
        tcg_gen_subi_i32(cpu_T[0], cpu_gregs[REG(B11_8)], 4);
1143
        tcg_gen_qemu_st32(cpu_sr, cpu_T[0], ctx->memidx);
1144
        tcg_gen_subi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
1145
        return;
1146
#define LDST(reg,ldnum,ldpnum,stnum,stpnum)                        \
1147
  case ldnum:                                                        \
1148
    tcg_gen_mov_i32 (cpu_##reg, cpu_gregs[REG(B11_8)]);                \
1149
    return;                                                        \
1150
  case ldpnum:                                                        \
1151
    tcg_gen_qemu_ld32s (cpu_##reg, cpu_gregs[REG(B11_8)], ctx->memidx);        \
1152
    tcg_gen_addi_i32(cpu_gregs[REG(B11_8)],                         \
1153
                     cpu_gregs[REG(B11_8)], 4);                        \
1154
    return;                                                        \
1155
  case stnum:                                                        \
1156
    tcg_gen_mov_i32 (cpu_gregs[REG(B11_8)], cpu_##reg);                \
1157
    return;                                                        \
1158
  case stpnum:                                                        \
1159
    tcg_gen_subi_i32(cpu_T[1], cpu_gregs[REG(B11_8)], 4);        \
1160
    tcg_gen_qemu_st32 (cpu_##reg, cpu_T[1], ctx->memidx);        \
1161
    tcg_gen_subi_i32(cpu_gregs[REG(B11_8)],                         \
1162
                     cpu_gregs[REG(B11_8)], 4);                        \
1163
    return;
1164
        LDST(gbr,  0x401e, 0x4017, 0x0012, 0x4013)
1165
        LDST(vbr,  0x402e, 0x4027, 0x0022, 0x4023)
1166
        LDST(ssr,  0x403e, 0x4037, 0x0032, 0x4033)
1167
        LDST(spc,  0x404e, 0x4047, 0x0042, 0x4043)
1168
        LDST(dbr,  0x40fa, 0x40f6, 0x00fa, 0x40f2)
1169
        LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002)
1170
        LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012)
1171
        LDST(pr,   0x402a, 0x4026, 0x002a, 0x4022)
1172
        LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052)
1173
    case 0x406a:                /* lds Rm,FPSCR */
1174
        tcg_gen_helper_0_1(helper_ld_fpscr, cpu_gregs[REG(B11_8)]);
1175
        ctx->bstate = BS_STOP;
1176
        return;
1177
    case 0x4066:                /* lds.l @Rm+,FPSCR */
1178
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_gregs[REG(B11_8)], ctx->memidx);
1179
        tcg_gen_addi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
1180
        tcg_gen_helper_0_1(helper_ld_fpscr, cpu_T[0]);
1181
        ctx->bstate = BS_STOP;
1182
        return;
1183
    case 0x006a:                /* sts FPSCR,Rn */
1184
        tcg_gen_andi_i32(cpu_T[0], cpu_fpscr, 0x003fffff);
1185
        tcg_gen_mov_i32(cpu_gregs[REG(B11_8)], cpu_T[0]);
1186
        return;
1187
    case 0x4062:                /* sts FPSCR,@-Rn */
1188
        tcg_gen_andi_i32(cpu_T[0], cpu_fpscr, 0x003fffff);
1189
        tcg_gen_subi_i32(cpu_T[1], cpu_gregs[REG(B11_8)], 4);
1190
        tcg_gen_qemu_st32(cpu_T[0], cpu_T[1], ctx->memidx);
1191
        tcg_gen_subi_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 4);
1192
        return;
1193
    case 0x00c3:                /* movca.l R0,@Rm */
1194
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(0)]);
1195
        tcg_gen_mov_i32(cpu_T[1], cpu_gregs[REG(B11_8)]);
1196
        tcg_gen_qemu_st32(cpu_T[0], cpu_T[1], ctx->memidx);
1197
        return;
1198
    case 0x0029:                /* movt Rn */
1199
        tcg_gen_andi_i32(cpu_gregs[REG(B11_8)], cpu_sr, SR_T);
1200
        return;
1201
    case 0x0093:                /* ocbi @Rn */
1202
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1203
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
1204
        return;
1205
    case 0x00a3:                /* ocbp @Rn */
1206
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1207
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
1208
        return;
1209
    case 0x00b3:                /* ocbwb @Rn */
1210
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1211
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_T[0], ctx->memidx);
1212
        return;
1213
    case 0x0083:                /* pref @Rn */
1214
        return;
1215
    case 0x4024:                /* rotcl Rn */
1216
        gen_op_rotcl_Rn(REG(B11_8));
1217
        return;
1218
    case 0x4025:                /* rotcr Rn */
1219
        gen_op_rotcr_Rn(REG(B11_8));
1220
        return;
1221
    case 0x4004:                /* rotl Rn */
1222
        gen_op_rotl_Rn(REG(B11_8));
1223
        return;
1224
    case 0x4005:                /* rotr Rn */
1225
        gen_op_rotr_Rn(REG(B11_8));
1226
        return;
1227
    case 0x4000:                /* shll Rn */
1228
    case 0x4020:                /* shal Rn */
1229
        tcg_gen_andi_i32(cpu_T[0], cpu_gregs[REG(B11_8)], 0x80000000);
1230
        gen_cmp_imm(TCG_COND_NE, cpu_T[0], 0);
1231
        tcg_gen_shli_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 1);
1232
        return;
1233
    case 0x4021:                /* shar Rn */
1234
        tcg_gen_andi_i32(cpu_T[0], cpu_gregs[REG(B11_8)], 1);
1235
        gen_cmp_imm(TCG_COND_NE, cpu_T[0], 0);
1236
        tcg_gen_sari_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 1);
1237
        return;
1238
    case 0x4001:                /* shlr Rn */
1239
        tcg_gen_andi_i32(cpu_T[0], cpu_gregs[REG(B11_8)], 1);
1240
        gen_cmp_imm(TCG_COND_NE, cpu_T[0], 0);
1241
        tcg_gen_shri_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 1);
1242
        return;
1243
    case 0x4008:                /* shll2 Rn */
1244
        tcg_gen_shli_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 2);
1245
        return;
1246
    case 0x4018:                /* shll8 Rn */
1247
        tcg_gen_shli_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 8);
1248
        return;
1249
    case 0x4028:                /* shll16 Rn */
1250
        tcg_gen_shli_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 16);
1251
        return;
1252
    case 0x4009:                /* shlr2 Rn */
1253
        tcg_gen_shri_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 2);
1254
        return;
1255
    case 0x4019:                /* shlr8 Rn */
1256
        tcg_gen_shri_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 8);
1257
        return;
1258
    case 0x4029:                /* shlr16 Rn */
1259
        tcg_gen_shri_i32(cpu_gregs[REG(B11_8)], cpu_gregs[REG(B11_8)], 16);
1260
        return;
1261
    case 0x401b:                /* tas.b @Rn */
1262
        tcg_gen_mov_i32(cpu_T[0], cpu_gregs[REG(B11_8)]);
1263
        tcg_gen_mov_i32(cpu_T[1], cpu_T[0]);
1264
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_T[0], ctx->memidx);
1265
        gen_cmp_imm(TCG_COND_EQ, cpu_T[0], 0);
1266
        tcg_gen_ori_i32(cpu_T[0], cpu_T[0], 0x80);
1267
        tcg_gen_qemu_st8(cpu_T[0], cpu_T[1], ctx->memidx);
1268
        return;
1269
    case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1270
        gen_op_movl_fpul_FT0();
1271
        gen_op_fmov_FT0_frN(FREG(B11_8));
1272
        return;
1273
    case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1274
        gen_op_fmov_frN_FT0(FREG(B11_8));
1275
        gen_op_movl_FT0_fpul();
1276
        return;
1277
    case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1278
        if (ctx->fpscr & FPSCR_PR) {
1279
            if (ctx->opcode & 0x0100)
1280
                break; /* illegal instruction */
1281
            gen_op_float_DT();
1282
            gen_op_fmov_DT0_drN(DREG(B11_8));
1283
        }
1284
        else {
1285
            gen_op_float_FT();
1286
            gen_op_fmov_FT0_frN(FREG(B11_8));
1287
        }
1288
        return;
1289
    case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1290
        if (ctx->fpscr & FPSCR_PR) {
1291
            if (ctx->opcode & 0x0100)
1292
                break; /* illegal instruction */
1293
            gen_op_fmov_drN_DT0(DREG(B11_8));
1294
            gen_op_ftrc_DT();
1295
        }
1296
        else {
1297
            gen_op_fmov_frN_FT0(FREG(B11_8));
1298
            gen_op_ftrc_FT();
1299
        }
1300
        return;
1301
    case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1302
        gen_op_fneg_frN(FREG(B11_8));
1303
        return;
1304
    case 0xf05d: /* fabs FRn/DRn */
1305
        if (ctx->fpscr & FPSCR_PR) {
1306
            if (ctx->opcode & 0x0100)
1307
                break; /* illegal instruction */
1308
            gen_op_fmov_drN_DT0(DREG(B11_8));
1309
            gen_op_fabs_DT();
1310
            gen_op_fmov_DT0_drN(DREG(B11_8));
1311
        } else {
1312
            gen_op_fmov_frN_FT0(FREG(B11_8));
1313
            gen_op_fabs_FT();
1314
            gen_op_fmov_FT0_frN(FREG(B11_8));
1315
        }
1316
        return;
1317
    case 0xf06d: /* fsqrt FRn */
1318
        if (ctx->fpscr & FPSCR_PR) {
1319
            if (ctx->opcode & 0x0100)
1320
                break; /* illegal instruction */
1321
            gen_op_fmov_drN_DT0(FREG(B11_8));
1322
            gen_op_fsqrt_DT();
1323
            gen_op_fmov_DT0_drN(FREG(B11_8));
1324
        } else {
1325
            gen_op_fmov_frN_FT0(FREG(B11_8));
1326
            gen_op_fsqrt_FT();
1327
            gen_op_fmov_FT0_frN(FREG(B11_8));
1328
        }
1329
        return;
1330
    case 0xf07d: /* fsrra FRn */
1331
        break;
1332
    case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1333
        if (!(ctx->fpscr & FPSCR_PR)) {
1334
            tcg_gen_movi_i32(cpu_T[0], 0);
1335
            gen_op_fmov_T0_frN(FREG(B11_8));
1336
            return;
1337
        }
1338
        break;
1339
    case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1340
        if (!(ctx->fpscr & FPSCR_PR)) {
1341
            tcg_gen_movi_i32(cpu_T[0], 0x3f800000);
1342
            gen_op_fmov_T0_frN(FREG(B11_8));
1343
            return;
1344
        }
1345
        break;
1346
    case 0xf0ad: /* fcnvsd FPUL,DRn */
1347
        gen_op_movl_fpul_FT0();
1348
        gen_op_fcnvsd_FT_DT();
1349
        gen_op_fmov_DT0_drN(DREG(B11_8));
1350
        return;
1351
    case 0xf0bd: /* fcnvds DRn,FPUL */
1352
        gen_op_fmov_drN_DT0(DREG(B11_8));
1353
        gen_op_fcnvds_DT_FT();
1354
        gen_op_movl_FT0_fpul();
1355
        return;
1356
    }
1357

    
1358
    fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n",
1359
            ctx->opcode, ctx->pc);
1360
    tcg_gen_helper_0_0(helper_raise_illegal_instruction);
1361
    ctx->bstate = BS_EXCP;
1362
}
1363

    
1364
void decode_opc(DisasContext * ctx)
1365
{
1366
    uint32_t old_flags = ctx->flags;
1367

    
1368
    _decode_opc(ctx);
1369

    
1370
    if (old_flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1371
        if (ctx->flags & DELAY_SLOT_CLEARME) {
1372
            gen_store_flags(0);
1373
        } else {
1374
            /* go out of the delay slot */
1375
            uint32_t new_flags = ctx->flags;
1376
            new_flags &= ~(DELAY_SLOT | DELAY_SLOT_CONDITIONAL);
1377
            gen_store_flags(new_flags);
1378
        }
1379
        ctx->flags = 0;
1380
        ctx->bstate = BS_BRANCH;
1381
        if (old_flags & DELAY_SLOT_CONDITIONAL) {
1382
            gen_delayed_conditional_jump(ctx);
1383
        } else if (old_flags & DELAY_SLOT) {
1384
            gen_jump(ctx);
1385
        }
1386

    
1387
    }
1388

    
1389
    /* go into a delay slot */
1390
    if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))
1391
        gen_store_flags(ctx->flags);
1392
}
1393

    
1394
static inline void
1395
gen_intermediate_code_internal(CPUState * env, TranslationBlock * tb,
1396
                               int search_pc)
1397
{
1398
    DisasContext ctx;
1399
    target_ulong pc_start;
1400
    static uint16_t *gen_opc_end;
1401
    int i, ii;
1402
    int num_insns;
1403
    int max_insns;
1404

    
1405
    pc_start = tb->pc;
1406
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
1407
    ctx.pc = pc_start;
1408
    ctx.flags = (uint32_t)tb->flags;
1409
    ctx.bstate = BS_NONE;
1410
    ctx.sr = env->sr;
1411
    ctx.fpscr = env->fpscr;
1412
    ctx.memidx = (env->sr & SR_MD) ? 1 : 0;
1413
    /* We don't know if the delayed pc came from a dynamic or static branch,
1414
       so assume it is a dynamic branch.  */
1415
    ctx.delayed_pc = -1; /* use delayed pc from env pointer */
1416
    ctx.tb = tb;
1417
    ctx.singlestep_enabled = env->singlestep_enabled;
1418

    
1419
#ifdef DEBUG_DISAS
1420
    if (loglevel & CPU_LOG_TB_CPU) {
1421
        fprintf(logfile,
1422
                "------------------------------------------------\n");
1423
        cpu_dump_state(env, logfile, fprintf, 0);
1424
    }
1425
#endif
1426

    
1427
    ii = -1;
1428
    num_insns = 0;
1429
    max_insns = tb->cflags & CF_COUNT_MASK;
1430
    if (max_insns == 0)
1431
        max_insns = CF_COUNT_MASK;
1432
    gen_icount_start();
1433
    while (ctx.bstate == BS_NONE && gen_opc_ptr < gen_opc_end) {
1434
        if (env->nb_breakpoints > 0) {
1435
            for (i = 0; i < env->nb_breakpoints; i++) {
1436
                if (ctx.pc == env->breakpoints[i]) {
1437
                    /* We have hit a breakpoint - make sure PC is up-to-date */
1438
                    tcg_gen_movi_i32(cpu_pc, ctx.pc);
1439
                    tcg_gen_helper_0_0(helper_debug);
1440
                    ctx.bstate = BS_EXCP;
1441
                    break;
1442
                }
1443
            }
1444
        }
1445
        if (search_pc) {
1446
            i = gen_opc_ptr - gen_opc_buf;
1447
            if (ii < i) {
1448
                ii++;
1449
                while (ii < i)
1450
                    gen_opc_instr_start[ii++] = 0;
1451
            }
1452
            gen_opc_pc[ii] = ctx.pc;
1453
            gen_opc_hflags[ii] = ctx.flags;
1454
            gen_opc_instr_start[ii] = 1;
1455
            gen_opc_icount[ii] = num_insns;
1456
        }
1457
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
1458
            gen_io_start();
1459
#if 0
1460
        fprintf(stderr, "Loading opcode at address 0x%08x\n", ctx.pc);
1461
        fflush(stderr);
1462
#endif
1463
        ctx.opcode = lduw_code(ctx.pc);
1464
        decode_opc(&ctx);
1465
        num_insns++;
1466
        ctx.pc += 2;
1467
        if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
1468
            break;
1469
        if (env->singlestep_enabled)
1470
            break;
1471
        if (num_insns >= max_insns)
1472
            break;
1473
#ifdef SH4_SINGLE_STEP
1474
        break;
1475
#endif
1476
    }
1477
    if (tb->cflags & CF_LAST_IO)
1478
        gen_io_end();
1479
    if (env->singlestep_enabled) {
1480
        tcg_gen_helper_0_0(helper_debug);
1481
    } else {
1482
        switch (ctx.bstate) {
1483
        case BS_STOP:
1484
            /* gen_op_interrupt_restart(); */
1485
            /* fall through */
1486
        case BS_NONE:
1487
            if (ctx.flags) {
1488
                gen_store_flags(ctx.flags | DELAY_SLOT_CLEARME);
1489
            }
1490
            gen_goto_tb(&ctx, 0, ctx.pc);
1491
            break;
1492
        case BS_EXCP:
1493
            /* gen_op_interrupt_restart(); */
1494
            tcg_gen_exit_tb(0);
1495
            break;
1496
        case BS_BRANCH:
1497
        default:
1498
            break;
1499
        }
1500
    }
1501

    
1502
    gen_icount_end(tb, num_insns);
1503
    *gen_opc_ptr = INDEX_op_end;
1504
    if (search_pc) {
1505
        i = gen_opc_ptr - gen_opc_buf;
1506
        ii++;
1507
        while (ii <= i)
1508
            gen_opc_instr_start[ii++] = 0;
1509
    } else {
1510
        tb->size = ctx.pc - pc_start;
1511
        tb->icount = num_insns;
1512
    }
1513

    
1514
#ifdef DEBUG_DISAS
1515
#ifdef SH4_DEBUG_DISAS
1516
    if (loglevel & CPU_LOG_TB_IN_ASM)
1517
        fprintf(logfile, "\n");
1518
#endif
1519
    if (loglevel & CPU_LOG_TB_IN_ASM) {
1520
        fprintf(logfile, "IN:\n");        /* , lookup_symbol(pc_start)); */
1521
        target_disas(logfile, pc_start, ctx.pc - pc_start, 0);
1522
        fprintf(logfile, "\n");
1523
    }
1524
#endif
1525
}
1526

    
1527
void gen_intermediate_code(CPUState * env, struct TranslationBlock *tb)
1528
{
1529
    gen_intermediate_code_internal(env, tb, 0);
1530
}
1531

    
1532
void gen_intermediate_code_pc(CPUState * env, struct TranslationBlock *tb)
1533
{
1534
    gen_intermediate_code_internal(env, tb, 1);
1535
}
1536

    
1537
void gen_pc_load(CPUState *env, TranslationBlock *tb,
1538
                unsigned long searched_pc, int pc_pos, void *puc)
1539
{
1540
    env->pc = gen_opc_pc[pc_pos];
1541
    env->flags = gen_opc_hflags[pc_pos];
1542
}