Statistics
| Branch: | Revision:

root / target-sh4 / translate.c @ ad8d25a1

History | View | Annotate | Download (64.4 kB)

1
/*
2
 *  SH4 translation
3
 *
4
 *  Copyright (c) 2005 Samuel Tardieu
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18
 */
19

    
20
#define DEBUG_DISAS
21
#define SH4_DEBUG_DISAS
22
//#define SH4_SINGLE_STEP
23

    
24
#include "cpu.h"
25
#include "disas.h"
26
#include "tcg-op.h"
27

    
28
#include "helper.h"
29
#define GEN_HELPER 1
30
#include "helper.h"
31

    
32
typedef struct DisasContext {
33
    struct TranslationBlock *tb;
34
    target_ulong pc;
35
    uint32_t sr;
36
    uint32_t fpscr;
37
    uint16_t opcode;
38
    uint32_t flags;
39
    int bstate;
40
    int memidx;
41
    uint32_t delayed_pc;
42
    int singlestep_enabled;
43
    uint32_t features;
44
    int has_movcal;
45
} DisasContext;
46

    
47
#if defined(CONFIG_USER_ONLY)
48
#define IS_USER(ctx) 1
49
#else
50
#define IS_USER(ctx) (!(ctx->sr & SR_MD))
51
#endif
52

    
53
enum {
54
    BS_NONE     = 0, /* We go out of the TB without reaching a branch or an
55
                      * exception condition
56
                      */
57
    BS_STOP     = 1, /* We want to stop translation for any reason */
58
    BS_BRANCH   = 2, /* We reached a branch condition     */
59
    BS_EXCP     = 3, /* We reached an exception condition */
60
};
61

    
62
/* global register indexes */
63
static TCGv_ptr cpu_env;
64
static TCGv cpu_gregs[24];
65
static TCGv cpu_pc, cpu_sr, cpu_ssr, cpu_spc, cpu_gbr;
66
static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl;
67
static TCGv cpu_pr, cpu_fpscr, cpu_fpul, cpu_ldst;
68
static TCGv cpu_fregs[32];
69

    
70
/* internal register indexes */
71
static TCGv cpu_flags, cpu_delayed_pc;
72

    
73
static uint32_t gen_opc_hflags[OPC_BUF_SIZE];
74

    
75
#include "gen-icount.h"
76

    
77
static void sh4_translate_init(void)
78
{
79
    int i;
80
    static int done_init = 0;
81
    static const char * const gregnames[24] = {
82
        "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
83
        "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
84
        "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
85
        "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
86
        "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
87
    };
88
    static const char * const fregnames[32] = {
89
         "FPR0_BANK0",  "FPR1_BANK0",  "FPR2_BANK0",  "FPR3_BANK0",
90
         "FPR4_BANK0",  "FPR5_BANK0",  "FPR6_BANK0",  "FPR7_BANK0",
91
         "FPR8_BANK0",  "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
92
        "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
93
         "FPR0_BANK1",  "FPR1_BANK1",  "FPR2_BANK1",  "FPR3_BANK1",
94
         "FPR4_BANK1",  "FPR5_BANK1",  "FPR6_BANK1",  "FPR7_BANK1",
95
         "FPR8_BANK1",  "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
96
        "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
97
    };
98

    
99
    if (done_init)
100
        return;
101

    
102
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
103

    
104
    for (i = 0; i < 24; i++)
105
        cpu_gregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
106
                                              offsetof(CPUSH4State, gregs[i]),
107
                                              gregnames[i]);
108

    
109
    cpu_pc = tcg_global_mem_new_i32(TCG_AREG0,
110
                                    offsetof(CPUSH4State, pc), "PC");
111
    cpu_sr = tcg_global_mem_new_i32(TCG_AREG0,
112
                                    offsetof(CPUSH4State, sr), "SR");
113
    cpu_ssr = tcg_global_mem_new_i32(TCG_AREG0,
114
                                     offsetof(CPUSH4State, ssr), "SSR");
115
    cpu_spc = tcg_global_mem_new_i32(TCG_AREG0,
116
                                     offsetof(CPUSH4State, spc), "SPC");
117
    cpu_gbr = tcg_global_mem_new_i32(TCG_AREG0,
118
                                     offsetof(CPUSH4State, gbr), "GBR");
119
    cpu_vbr = tcg_global_mem_new_i32(TCG_AREG0,
120
                                     offsetof(CPUSH4State, vbr), "VBR");
121
    cpu_sgr = tcg_global_mem_new_i32(TCG_AREG0,
122
                                     offsetof(CPUSH4State, sgr), "SGR");
123
    cpu_dbr = tcg_global_mem_new_i32(TCG_AREG0,
124
                                     offsetof(CPUSH4State, dbr), "DBR");
125
    cpu_mach = tcg_global_mem_new_i32(TCG_AREG0,
126
                                      offsetof(CPUSH4State, mach), "MACH");
127
    cpu_macl = tcg_global_mem_new_i32(TCG_AREG0,
128
                                      offsetof(CPUSH4State, macl), "MACL");
129
    cpu_pr = tcg_global_mem_new_i32(TCG_AREG0,
130
                                    offsetof(CPUSH4State, pr), "PR");
131
    cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
132
                                       offsetof(CPUSH4State, fpscr), "FPSCR");
133
    cpu_fpul = tcg_global_mem_new_i32(TCG_AREG0,
134
                                      offsetof(CPUSH4State, fpul), "FPUL");
135

    
136
    cpu_flags = tcg_global_mem_new_i32(TCG_AREG0,
137
                                       offsetof(CPUSH4State, flags), "_flags_");
138
    cpu_delayed_pc = tcg_global_mem_new_i32(TCG_AREG0,
139
                                            offsetof(CPUSH4State, delayed_pc),
140
                                            "_delayed_pc_");
141
    cpu_ldst = tcg_global_mem_new_i32(TCG_AREG0,
142
                                      offsetof(CPUSH4State, ldst), "_ldst_");
143

    
144
    for (i = 0; i < 32; i++)
145
        cpu_fregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
146
                                              offsetof(CPUSH4State, fregs[i]),
147
                                              fregnames[i]);
148

    
149
    /* register helpers */
150
#define GEN_HELPER 2
151
#include "helper.h"
152

    
153
    done_init = 1;
154
}
155

    
156
void cpu_dump_state(CPUSH4State * env, FILE * f,
157
                    int (*cpu_fprintf) (FILE * f, const char *fmt, ...),
158
                    int flags)
159
{
160
    int i;
161
    cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
162
                env->pc, env->sr, env->pr, env->fpscr);
163
    cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
164
                env->spc, env->ssr, env->gbr, env->vbr);
165
    cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
166
                env->sgr, env->dbr, env->delayed_pc, env->fpul);
167
    for (i = 0; i < 24; i += 4) {
168
        cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
169
                    i, env->gregs[i], i + 1, env->gregs[i + 1],
170
                    i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]);
171
    }
172
    if (env->flags & DELAY_SLOT) {
173
        cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n",
174
                    env->delayed_pc);
175
    } else if (env->flags & DELAY_SLOT_CONDITIONAL) {
176
        cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n",
177
                    env->delayed_pc);
178
    }
179
}
180

    
181
typedef struct {
182
    const char *name;
183
    int id;
184
    uint32_t pvr;
185
    uint32_t prr;
186
    uint32_t cvr;
187
    uint32_t features;
188
} sh4_def_t;
189

    
190
static sh4_def_t sh4_defs[] = {
191
    {
192
        .name = "SH7750R",
193
        .id = SH_CPU_SH7750R,
194
        .pvr = 0x00050000,
195
        .prr = 0x00000100,
196
        .cvr = 0x00110000,
197
        .features = SH_FEATURE_BCR3_AND_BCR4,
198
    }, {
199
        .name = "SH7751R",
200
        .id = SH_CPU_SH7751R,
201
        .pvr = 0x04050005,
202
        .prr = 0x00000113,
203
        .cvr = 0x00110000,        /* Neutered caches, should be 0x20480000 */
204
        .features = SH_FEATURE_BCR3_AND_BCR4,
205
    }, {
206
        .name = "SH7785",
207
        .id = SH_CPU_SH7785,
208
        .pvr = 0x10300700,
209
        .prr = 0x00000200,
210
        .cvr = 0x71440211,
211
        .features = SH_FEATURE_SH4A,
212
     },
213
};
214

    
215
static const sh4_def_t *cpu_sh4_find_by_name(const char *name)
216
{
217
    int i;
218

    
219
    if (strcasecmp(name, "any") == 0)
220
        return &sh4_defs[0];
221

    
222
    for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
223
        if (strcasecmp(name, sh4_defs[i].name) == 0)
224
            return &sh4_defs[i];
225

    
226
    return NULL;
227
}
228

    
229
void sh4_cpu_list(FILE *f, fprintf_function cpu_fprintf)
230
{
231
    int i;
232

    
233
    for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
234
        (*cpu_fprintf)(f, "%s\n", sh4_defs[i].name);
235
}
236

    
237
static void cpu_register(CPUSH4State *env, const sh4_def_t *def)
238
{
239
    env->pvr = def->pvr;
240
    env->prr = def->prr;
241
    env->cvr = def->cvr;
242
    env->id = def->id;
243
}
244

    
245
SuperHCPU *cpu_sh4_init(const char *cpu_model)
246
{
247
    SuperHCPU *cpu;
248
    CPUSH4State *env;
249
    const sh4_def_t *def;
250

    
251
    def = cpu_sh4_find_by_name(cpu_model);
252
    if (!def)
253
        return NULL;
254
    cpu = SUPERH_CPU(object_new(TYPE_SUPERH_CPU));
255
    env = &cpu->env;
256
    env->features = def->features;
257
    sh4_translate_init();
258
    env->cpu_model_str = cpu_model;
259
    cpu_reset(CPU(cpu));
260
    cpu_register(env, def);
261
    qemu_init_vcpu(env);
262
    return cpu;
263
}
264

    
265
static void gen_goto_tb(DisasContext * ctx, int n, target_ulong dest)
266
{
267
    TranslationBlock *tb;
268
    tb = ctx->tb;
269

    
270
    if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
271
        !ctx->singlestep_enabled) {
272
        /* Use a direct jump if in same page and singlestep not enabled */
273
        tcg_gen_goto_tb(n);
274
        tcg_gen_movi_i32(cpu_pc, dest);
275
        tcg_gen_exit_tb((tcg_target_long)tb + n);
276
    } else {
277
        tcg_gen_movi_i32(cpu_pc, dest);
278
        if (ctx->singlestep_enabled)
279
            gen_helper_debug(cpu_env);
280
        tcg_gen_exit_tb(0);
281
    }
282
}
283

    
284
static void gen_jump(DisasContext * ctx)
285
{
286
    if (ctx->delayed_pc == (uint32_t) - 1) {
287
        /* Target is not statically known, it comes necessarily from a
288
           delayed jump as immediate jump are conditinal jumps */
289
        tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc);
290
        if (ctx->singlestep_enabled)
291
            gen_helper_debug(cpu_env);
292
        tcg_gen_exit_tb(0);
293
    } else {
294
        gen_goto_tb(ctx, 0, ctx->delayed_pc);
295
    }
296
}
297

    
298
static inline void gen_branch_slot(uint32_t delayed_pc, int t)
299
{
300
    TCGv sr;
301
    int label = gen_new_label();
302
    tcg_gen_movi_i32(cpu_delayed_pc, delayed_pc);
303
    sr = tcg_temp_new();
304
    tcg_gen_andi_i32(sr, cpu_sr, SR_T);
305
    tcg_gen_brcondi_i32(t ? TCG_COND_EQ:TCG_COND_NE, sr, 0, label);
306
    tcg_gen_ori_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
307
    gen_set_label(label);
308
}
309

    
310
/* Immediate conditional jump (bt or bf) */
311
static void gen_conditional_jump(DisasContext * ctx,
312
                                 target_ulong ift, target_ulong ifnott)
313
{
314
    int l1;
315
    TCGv sr;
316

    
317
    l1 = gen_new_label();
318
    sr = tcg_temp_new();
319
    tcg_gen_andi_i32(sr, cpu_sr, SR_T);
320
    tcg_gen_brcondi_i32(TCG_COND_NE, sr, 0, l1);
321
    gen_goto_tb(ctx, 0, ifnott);
322
    gen_set_label(l1);
323
    gen_goto_tb(ctx, 1, ift);
324
}
325

    
326
/* Delayed conditional jump (bt or bf) */
327
static void gen_delayed_conditional_jump(DisasContext * ctx)
328
{
329
    int l1;
330
    TCGv ds;
331

    
332
    l1 = gen_new_label();
333
    ds = tcg_temp_new();
334
    tcg_gen_andi_i32(ds, cpu_flags, DELAY_SLOT_TRUE);
335
    tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1);
336
    gen_goto_tb(ctx, 1, ctx->pc + 2);
337
    gen_set_label(l1);
338
    tcg_gen_andi_i32(cpu_flags, cpu_flags, ~DELAY_SLOT_TRUE);
339
    gen_jump(ctx);
340
}
341

    
342
static inline void gen_set_t(void)
343
{
344
    tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_T);
345
}
346

    
347
static inline void gen_clr_t(void)
348
{
349
    tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
350
}
351

    
352
static inline void gen_cmp(int cond, TCGv t0, TCGv t1)
353
{
354
    TCGv t;
355

    
356
    t = tcg_temp_new();
357
    tcg_gen_setcond_i32(cond, t, t1, t0);
358
    tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
359
    tcg_gen_or_i32(cpu_sr, cpu_sr, t);
360

    
361
    tcg_temp_free(t);
362
}
363

    
364
static inline void gen_cmp_imm(int cond, TCGv t0, int32_t imm)
365
{
366
    TCGv t;
367

    
368
    t = tcg_temp_new();
369
    tcg_gen_setcondi_i32(cond, t, t0, imm);
370
    tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
371
    tcg_gen_or_i32(cpu_sr, cpu_sr, t);
372

    
373
    tcg_temp_free(t);
374
}
375

    
376
static inline void gen_store_flags(uint32_t flags)
377
{
378
    tcg_gen_andi_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
379
    tcg_gen_ori_i32(cpu_flags, cpu_flags, flags);
380
}
381

    
382
static inline void gen_copy_bit_i32(TCGv t0, int p0, TCGv t1, int p1)
383
{
384
    TCGv tmp = tcg_temp_new();
385

    
386
    p0 &= 0x1f;
387
    p1 &= 0x1f;
388

    
389
    tcg_gen_andi_i32(tmp, t1, (1 << p1));
390
    tcg_gen_andi_i32(t0, t0, ~(1 << p0));
391
    if (p0 < p1)
392
        tcg_gen_shri_i32(tmp, tmp, p1 - p0);
393
    else if (p0 > p1)
394
        tcg_gen_shli_i32(tmp, tmp, p0 - p1);
395
    tcg_gen_or_i32(t0, t0, tmp);
396

    
397
    tcg_temp_free(tmp);
398
}
399

    
400
static inline void gen_load_fpr64(TCGv_i64 t, int reg)
401
{
402
    tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]);
403
}
404

    
405
static inline void gen_store_fpr64 (TCGv_i64 t, int reg)
406
{
407
    TCGv_i32 tmp = tcg_temp_new_i32();
408
    tcg_gen_trunc_i64_i32(tmp, t);
409
    tcg_gen_mov_i32(cpu_fregs[reg + 1], tmp);
410
    tcg_gen_shri_i64(t, t, 32);
411
    tcg_gen_trunc_i64_i32(tmp, t);
412
    tcg_gen_mov_i32(cpu_fregs[reg], tmp);
413
    tcg_temp_free_i32(tmp);
414
}
415

    
416
#define B3_0 (ctx->opcode & 0xf)
417
#define B6_4 ((ctx->opcode >> 4) & 0x7)
418
#define B7_4 ((ctx->opcode >> 4) & 0xf)
419
#define B7_0 (ctx->opcode & 0xff)
420
#define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
421
#define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
422
  (ctx->opcode & 0xfff))
423
#define B11_8 ((ctx->opcode >> 8) & 0xf)
424
#define B15_12 ((ctx->opcode >> 12) & 0xf)
425

    
426
#define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
427
                (cpu_gregs[x + 16]) : (cpu_gregs[x]))
428

    
429
#define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
430
                ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
431

    
432
#define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
433
#define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
434
#define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
435
#define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
436

    
437
#define CHECK_NOT_DELAY_SLOT \
438
  if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))     \
439
  {                                                           \
440
      gen_helper_raise_slot_illegal_instruction(cpu_env);     \
441
      ctx->bstate = BS_EXCP;                                  \
442
      return;                                                 \
443
  }
444

    
445
#define CHECK_PRIVILEGED                                        \
446
  if (IS_USER(ctx)) {                                           \
447
      if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
448
          gen_helper_raise_slot_illegal_instruction(cpu_env);   \
449
      } else {                                                  \
450
          gen_helper_raise_illegal_instruction(cpu_env);        \
451
      }                                                         \
452
      ctx->bstate = BS_EXCP;                                    \
453
      return;                                                   \
454
  }
455

    
456
#define CHECK_FPU_ENABLED                                       \
457
  if (ctx->flags & SR_FD) {                                     \
458
      if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
459
          gen_helper_raise_slot_fpu_disable(cpu_env);           \
460
      } else {                                                  \
461
          gen_helper_raise_fpu_disable(cpu_env);                \
462
      }                                                         \
463
      ctx->bstate = BS_EXCP;                                    \
464
      return;                                                   \
465
  }
466

    
467
static void _decode_opc(DisasContext * ctx)
468
{
469
    /* This code tries to make movcal emulation sufficiently
470
       accurate for Linux purposes.  This instruction writes
471
       memory, and prior to that, always allocates a cache line.
472
       It is used in two contexts:
473
       - in memcpy, where data is copied in blocks, the first write
474
       of to a block uses movca.l for performance.
475
       - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
476
       to flush the cache. Here, the data written by movcal.l is never
477
       written to memory, and the data written is just bogus.
478

479
       To simulate this, we simulate movcal.l, we store the value to memory,
480
       but we also remember the previous content. If we see ocbi, we check
481
       if movcal.l for that address was done previously. If so, the write should
482
       not have hit the memory, so we restore the previous content.
483
       When we see an instruction that is neither movca.l
484
       nor ocbi, the previous content is discarded.
485

486
       To optimize, we only try to flush stores when we're at the start of
487
       TB, or if we already saw movca.l in this TB and did not flush stores
488
       yet.  */
489
    if (ctx->has_movcal)
490
        {
491
          int opcode = ctx->opcode & 0xf0ff;
492
          if (opcode != 0x0093 /* ocbi */
493
              && opcode != 0x00c3 /* movca.l */)
494
              {
495
                  gen_helper_discard_movcal_backup(cpu_env);
496
                  ctx->has_movcal = 0;
497
              }
498
        }
499

    
500
#if 0
501
    fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode);
502
#endif
503

    
504
    switch (ctx->opcode) {
505
    case 0x0019:                /* div0u */
506
        tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(SR_M | SR_Q | SR_T));
507
        return;
508
    case 0x000b:                /* rts */
509
        CHECK_NOT_DELAY_SLOT
510
        tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr);
511
        ctx->flags |= DELAY_SLOT;
512
        ctx->delayed_pc = (uint32_t) - 1;
513
        return;
514
    case 0x0028:                /* clrmac */
515
        tcg_gen_movi_i32(cpu_mach, 0);
516
        tcg_gen_movi_i32(cpu_macl, 0);
517
        return;
518
    case 0x0048:                /* clrs */
519
        tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_S);
520
        return;
521
    case 0x0008:                /* clrt */
522
        gen_clr_t();
523
        return;
524
    case 0x0038:                /* ldtlb */
525
        CHECK_PRIVILEGED
526
        gen_helper_ldtlb(cpu_env);
527
        return;
528
    case 0x002b:                /* rte */
529
        CHECK_PRIVILEGED
530
        CHECK_NOT_DELAY_SLOT
531
        tcg_gen_mov_i32(cpu_sr, cpu_ssr);
532
        tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc);
533
        ctx->flags |= DELAY_SLOT;
534
        ctx->delayed_pc = (uint32_t) - 1;
535
        return;
536
    case 0x0058:                /* sets */
537
        tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_S);
538
        return;
539
    case 0x0018:                /* sett */
540
        gen_set_t();
541
        return;
542
    case 0xfbfd:                /* frchg */
543
        tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR);
544
        ctx->bstate = BS_STOP;
545
        return;
546
    case 0xf3fd:                /* fschg */
547
        tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ);
548
        ctx->bstate = BS_STOP;
549
        return;
550
    case 0x0009:                /* nop */
551
        return;
552
    case 0x001b:                /* sleep */
553
        CHECK_PRIVILEGED
554
        gen_helper_sleep(cpu_env, tcg_const_i32(ctx->pc + 2));
555
        return;
556
    }
557

    
558
    switch (ctx->opcode & 0xf000) {
559
    case 0x1000:                /* mov.l Rm,@(disp,Rn) */
560
        {
561
            TCGv addr = tcg_temp_new();
562
            tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4);
563
            tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
564
            tcg_temp_free(addr);
565
        }
566
        return;
567
    case 0x5000:                /* mov.l @(disp,Rm),Rn */
568
        {
569
            TCGv addr = tcg_temp_new();
570
            tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4);
571
            tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
572
            tcg_temp_free(addr);
573
        }
574
        return;
575
    case 0xe000:                /* mov #imm,Rn */
576
        tcg_gen_movi_i32(REG(B11_8), B7_0s);
577
        return;
578
    case 0x9000:                /* mov.w @(disp,PC),Rn */
579
        {
580
            TCGv addr = tcg_const_i32(ctx->pc + 4 + B7_0 * 2);
581
            tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
582
            tcg_temp_free(addr);
583
        }
584
        return;
585
    case 0xd000:                /* mov.l @(disp,PC),Rn */
586
        {
587
            TCGv addr = tcg_const_i32((ctx->pc + 4 + B7_0 * 4) & ~3);
588
            tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
589
            tcg_temp_free(addr);
590
        }
591
        return;
592
    case 0x7000:                /* add #imm,Rn */
593
        tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s);
594
        return;
595
    case 0xa000:                /* bra disp */
596
        CHECK_NOT_DELAY_SLOT
597
        ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
598
        tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
599
        ctx->flags |= DELAY_SLOT;
600
        return;
601
    case 0xb000:                /* bsr disp */
602
        CHECK_NOT_DELAY_SLOT
603
        tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
604
        ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
605
        tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
606
        ctx->flags |= DELAY_SLOT;
607
        return;
608
    }
609

    
610
    switch (ctx->opcode & 0xf00f) {
611
    case 0x6003:                /* mov Rm,Rn */
612
        tcg_gen_mov_i32(REG(B11_8), REG(B7_4));
613
        return;
614
    case 0x2000:                /* mov.b Rm,@Rn */
615
        tcg_gen_qemu_st8(REG(B7_4), REG(B11_8), ctx->memidx);
616
        return;
617
    case 0x2001:                /* mov.w Rm,@Rn */
618
        tcg_gen_qemu_st16(REG(B7_4), REG(B11_8), ctx->memidx);
619
        return;
620
    case 0x2002:                /* mov.l Rm,@Rn */
621
        tcg_gen_qemu_st32(REG(B7_4), REG(B11_8), ctx->memidx);
622
        return;
623
    case 0x6000:                /* mov.b @Rm,Rn */
624
        tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
625
        return;
626
    case 0x6001:                /* mov.w @Rm,Rn */
627
        tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
628
        return;
629
    case 0x6002:                /* mov.l @Rm,Rn */
630
        tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
631
        return;
632
    case 0x2004:                /* mov.b Rm,@-Rn */
633
        {
634
            TCGv addr = tcg_temp_new();
635
            tcg_gen_subi_i32(addr, REG(B11_8), 1);
636
            tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);        /* might cause re-execution */
637
            tcg_gen_mov_i32(REG(B11_8), addr);                        /* modify register status */
638
            tcg_temp_free(addr);
639
        }
640
        return;
641
    case 0x2005:                /* mov.w Rm,@-Rn */
642
        {
643
            TCGv addr = tcg_temp_new();
644
            tcg_gen_subi_i32(addr, REG(B11_8), 2);
645
            tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
646
            tcg_gen_mov_i32(REG(B11_8), addr);
647
            tcg_temp_free(addr);
648
        }
649
        return;
650
    case 0x2006:                /* mov.l Rm,@-Rn */
651
        {
652
            TCGv addr = tcg_temp_new();
653
            tcg_gen_subi_i32(addr, REG(B11_8), 4);
654
            tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
655
            tcg_gen_mov_i32(REG(B11_8), addr);
656
        }
657
        return;
658
    case 0x6004:                /* mov.b @Rm+,Rn */
659
        tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
660
        if ( B11_8 != B7_4 )
661
                tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1);
662
        return;
663
    case 0x6005:                /* mov.w @Rm+,Rn */
664
        tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
665
        if ( B11_8 != B7_4 )
666
                tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
667
        return;
668
    case 0x6006:                /* mov.l @Rm+,Rn */
669
        tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
670
        if ( B11_8 != B7_4 )
671
                tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
672
        return;
673
    case 0x0004:                /* mov.b Rm,@(R0,Rn) */
674
        {
675
            TCGv addr = tcg_temp_new();
676
            tcg_gen_add_i32(addr, REG(B11_8), REG(0));
677
            tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);
678
            tcg_temp_free(addr);
679
        }
680
        return;
681
    case 0x0005:                /* mov.w Rm,@(R0,Rn) */
682
        {
683
            TCGv addr = tcg_temp_new();
684
            tcg_gen_add_i32(addr, REG(B11_8), REG(0));
685
            tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
686
            tcg_temp_free(addr);
687
        }
688
        return;
689
    case 0x0006:                /* mov.l Rm,@(R0,Rn) */
690
        {
691
            TCGv addr = tcg_temp_new();
692
            tcg_gen_add_i32(addr, REG(B11_8), REG(0));
693
            tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
694
            tcg_temp_free(addr);
695
        }
696
        return;
697
    case 0x000c:                /* mov.b @(R0,Rm),Rn */
698
        {
699
            TCGv addr = tcg_temp_new();
700
            tcg_gen_add_i32(addr, REG(B7_4), REG(0));
701
            tcg_gen_qemu_ld8s(REG(B11_8), addr, ctx->memidx);
702
            tcg_temp_free(addr);
703
        }
704
        return;
705
    case 0x000d:                /* mov.w @(R0,Rm),Rn */
706
        {
707
            TCGv addr = tcg_temp_new();
708
            tcg_gen_add_i32(addr, REG(B7_4), REG(0));
709
            tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
710
            tcg_temp_free(addr);
711
        }
712
        return;
713
    case 0x000e:                /* mov.l @(R0,Rm),Rn */
714
        {
715
            TCGv addr = tcg_temp_new();
716
            tcg_gen_add_i32(addr, REG(B7_4), REG(0));
717
            tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
718
            tcg_temp_free(addr);
719
        }
720
        return;
721
    case 0x6008:                /* swap.b Rm,Rn */
722
        {
723
            TCGv high, low;
724
            high = tcg_temp_new();
725
            tcg_gen_andi_i32(high, REG(B7_4), 0xffff0000);
726
            low = tcg_temp_new();
727
            tcg_gen_ext16u_i32(low, REG(B7_4));
728
            tcg_gen_bswap16_i32(low, low);
729
            tcg_gen_or_i32(REG(B11_8), high, low);
730
            tcg_temp_free(low);
731
            tcg_temp_free(high);
732
        }
733
        return;
734
    case 0x6009:                /* swap.w Rm,Rn */
735
        {
736
            TCGv high, low;
737
            high = tcg_temp_new();
738
            tcg_gen_shli_i32(high, REG(B7_4), 16);
739
            low = tcg_temp_new();
740
            tcg_gen_shri_i32(low, REG(B7_4), 16);
741
            tcg_gen_ext16u_i32(low, low);
742
            tcg_gen_or_i32(REG(B11_8), high, low);
743
            tcg_temp_free(low);
744
            tcg_temp_free(high);
745
        }
746
        return;
747
    case 0x200d:                /* xtrct Rm,Rn */
748
        {
749
            TCGv high, low;
750
            high = tcg_temp_new();
751
            tcg_gen_shli_i32(high, REG(B7_4), 16);
752
            low = tcg_temp_new();
753
            tcg_gen_shri_i32(low, REG(B11_8), 16);
754
            tcg_gen_ext16u_i32(low, low);
755
            tcg_gen_or_i32(REG(B11_8), high, low);
756
            tcg_temp_free(low);
757
            tcg_temp_free(high);
758
        }
759
        return;
760
    case 0x300c:                /* add Rm,Rn */
761
        tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4));
762
        return;
763
    case 0x300e:                /* addc Rm,Rn */
764
        {
765
            TCGv t0, t1, t2;
766
            t0 = tcg_temp_new();
767
            tcg_gen_andi_i32(t0, cpu_sr, SR_T);
768
            t1 = tcg_temp_new();
769
            tcg_gen_add_i32(t1, REG(B7_4), REG(B11_8));
770
            tcg_gen_add_i32(t0, t0, t1);
771
            t2 = tcg_temp_new();
772
            tcg_gen_setcond_i32(TCG_COND_GTU, t2, REG(B11_8), t1);
773
            tcg_gen_setcond_i32(TCG_COND_GTU, t1, t1, t0);
774
            tcg_gen_or_i32(t1, t1, t2);
775
            tcg_temp_free(t2);
776
            tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
777
            tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
778
            tcg_temp_free(t1);
779
            tcg_gen_mov_i32(REG(B11_8), t0);
780
            tcg_temp_free(t0);
781
        }
782
        return;
783
    case 0x300f:                /* addv Rm,Rn */
784
        {
785
            TCGv t0, t1, t2;
786
            t0 = tcg_temp_new();
787
            tcg_gen_add_i32(t0, REG(B7_4), REG(B11_8));
788
            t1 = tcg_temp_new();
789
            tcg_gen_xor_i32(t1, t0, REG(B11_8));
790
            t2 = tcg_temp_new();
791
            tcg_gen_xor_i32(t2, REG(B7_4), REG(B11_8));
792
            tcg_gen_andc_i32(t1, t1, t2);
793
            tcg_temp_free(t2);
794
            tcg_gen_shri_i32(t1, t1, 31);
795
            tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
796
            tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
797
            tcg_temp_free(t1);
798
            tcg_gen_mov_i32(REG(B7_4), t0);
799
            tcg_temp_free(t0);
800
        }
801
        return;
802
    case 0x2009:                /* and Rm,Rn */
803
        tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4));
804
        return;
805
    case 0x3000:                /* cmp/eq Rm,Rn */
806
        gen_cmp(TCG_COND_EQ, REG(B7_4), REG(B11_8));
807
        return;
808
    case 0x3003:                /* cmp/ge Rm,Rn */
809
        gen_cmp(TCG_COND_GE, REG(B7_4), REG(B11_8));
810
        return;
811
    case 0x3007:                /* cmp/gt Rm,Rn */
812
        gen_cmp(TCG_COND_GT, REG(B7_4), REG(B11_8));
813
        return;
814
    case 0x3006:                /* cmp/hi Rm,Rn */
815
        gen_cmp(TCG_COND_GTU, REG(B7_4), REG(B11_8));
816
        return;
817
    case 0x3002:                /* cmp/hs Rm,Rn */
818
        gen_cmp(TCG_COND_GEU, REG(B7_4), REG(B11_8));
819
        return;
820
    case 0x200c:                /* cmp/str Rm,Rn */
821
        {
822
            TCGv cmp1 = tcg_temp_new();
823
            TCGv cmp2 = tcg_temp_new();
824
            tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
825
            tcg_gen_xor_i32(cmp1, REG(B7_4), REG(B11_8));
826
            tcg_gen_andi_i32(cmp2, cmp1, 0xff000000);
827
            tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
828
            tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
829
            tcg_gen_andi_i32(cmp2, cmp1, 0x00ff0000);
830
            tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
831
            tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
832
            tcg_gen_andi_i32(cmp2, cmp1, 0x0000ff00);
833
            tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
834
            tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
835
            tcg_gen_andi_i32(cmp2, cmp1, 0x000000ff);
836
            tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
837
            tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
838
            tcg_temp_free(cmp2);
839
            tcg_temp_free(cmp1);
840
        }
841
        return;
842
    case 0x2007:                /* div0s Rm,Rn */
843
        {
844
            gen_copy_bit_i32(cpu_sr, 8, REG(B11_8), 31);        /* SR_Q */
845
            gen_copy_bit_i32(cpu_sr, 9, REG(B7_4), 31);                /* SR_M */
846
            TCGv val = tcg_temp_new();
847
            tcg_gen_xor_i32(val, REG(B7_4), REG(B11_8));
848
            gen_copy_bit_i32(cpu_sr, 0, val, 31);                /* SR_T */
849
            tcg_temp_free(val);
850
        }
851
        return;
852
    case 0x3004:                /* div1 Rm,Rn */
853
        gen_helper_div1(REG(B11_8), cpu_env, REG(B7_4), REG(B11_8));
854
        return;
855
    case 0x300d:                /* dmuls.l Rm,Rn */
856
        {
857
            TCGv_i64 tmp1 = tcg_temp_new_i64();
858
            TCGv_i64 tmp2 = tcg_temp_new_i64();
859

    
860
            tcg_gen_ext_i32_i64(tmp1, REG(B7_4));
861
            tcg_gen_ext_i32_i64(tmp2, REG(B11_8));
862
            tcg_gen_mul_i64(tmp1, tmp1, tmp2);
863
            tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
864
            tcg_gen_shri_i64(tmp1, tmp1, 32);
865
            tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
866

    
867
            tcg_temp_free_i64(tmp2);
868
            tcg_temp_free_i64(tmp1);
869
        }
870
        return;
871
    case 0x3005:                /* dmulu.l Rm,Rn */
872
        {
873
            TCGv_i64 tmp1 = tcg_temp_new_i64();
874
            TCGv_i64 tmp2 = tcg_temp_new_i64();
875

    
876
            tcg_gen_extu_i32_i64(tmp1, REG(B7_4));
877
            tcg_gen_extu_i32_i64(tmp2, REG(B11_8));
878
            tcg_gen_mul_i64(tmp1, tmp1, tmp2);
879
            tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
880
            tcg_gen_shri_i64(tmp1, tmp1, 32);
881
            tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
882

    
883
            tcg_temp_free_i64(tmp2);
884
            tcg_temp_free_i64(tmp1);
885
        }
886
        return;
887
    case 0x600e:                /* exts.b Rm,Rn */
888
        tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4));
889
        return;
890
    case 0x600f:                /* exts.w Rm,Rn */
891
        tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4));
892
        return;
893
    case 0x600c:                /* extu.b Rm,Rn */
894
        tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4));
895
        return;
896
    case 0x600d:                /* extu.w Rm,Rn */
897
        tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4));
898
        return;
899
    case 0x000f:                /* mac.l @Rm+,@Rn+ */
900
        {
901
            TCGv arg0, arg1;
902
            arg0 = tcg_temp_new();
903
            tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
904
            arg1 = tcg_temp_new();
905
            tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
906
            gen_helper_macl(cpu_env, arg0, arg1);
907
            tcg_temp_free(arg1);
908
            tcg_temp_free(arg0);
909
            tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
910
            tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
911
        }
912
        return;
913
    case 0x400f:                /* mac.w @Rm+,@Rn+ */
914
        {
915
            TCGv arg0, arg1;
916
            arg0 = tcg_temp_new();
917
            tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
918
            arg1 = tcg_temp_new();
919
            tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
920
            gen_helper_macw(cpu_env, arg0, arg1);
921
            tcg_temp_free(arg1);
922
            tcg_temp_free(arg0);
923
            tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2);
924
            tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
925
        }
926
        return;
927
    case 0x0007:                /* mul.l Rm,Rn */
928
        tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8));
929
        return;
930
    case 0x200f:                /* muls.w Rm,Rn */
931
        {
932
            TCGv arg0, arg1;
933
            arg0 = tcg_temp_new();
934
            tcg_gen_ext16s_i32(arg0, REG(B7_4));
935
            arg1 = tcg_temp_new();
936
            tcg_gen_ext16s_i32(arg1, REG(B11_8));
937
            tcg_gen_mul_i32(cpu_macl, arg0, arg1);
938
            tcg_temp_free(arg1);
939
            tcg_temp_free(arg0);
940
        }
941
        return;
942
    case 0x200e:                /* mulu.w Rm,Rn */
943
        {
944
            TCGv arg0, arg1;
945
            arg0 = tcg_temp_new();
946
            tcg_gen_ext16u_i32(arg0, REG(B7_4));
947
            arg1 = tcg_temp_new();
948
            tcg_gen_ext16u_i32(arg1, REG(B11_8));
949
            tcg_gen_mul_i32(cpu_macl, arg0, arg1);
950
            tcg_temp_free(arg1);
951
            tcg_temp_free(arg0);
952
        }
953
        return;
954
    case 0x600b:                /* neg Rm,Rn */
955
        tcg_gen_neg_i32(REG(B11_8), REG(B7_4));
956
        return;
957
    case 0x600a:                /* negc Rm,Rn */
958
        {
959
            TCGv t0, t1;
960
            t0 = tcg_temp_new();
961
            tcg_gen_neg_i32(t0, REG(B7_4));
962
            t1 = tcg_temp_new();
963
            tcg_gen_andi_i32(t1, cpu_sr, SR_T);
964
            tcg_gen_sub_i32(REG(B11_8), t0, t1);
965
            tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
966
            tcg_gen_setcondi_i32(TCG_COND_GTU, t1, t0, 0);
967
            tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
968
            tcg_gen_setcond_i32(TCG_COND_GTU, t1, REG(B11_8), t0);
969
            tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
970
            tcg_temp_free(t0);
971
            tcg_temp_free(t1);
972
        }
973
        return;
974
    case 0x6007:                /* not Rm,Rn */
975
        tcg_gen_not_i32(REG(B11_8), REG(B7_4));
976
        return;
977
    case 0x200b:                /* or Rm,Rn */
978
        tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4));
979
        return;
980
    case 0x400c:                /* shad Rm,Rn */
981
        {
982
            int label1 = gen_new_label();
983
            int label2 = gen_new_label();
984
            int label3 = gen_new_label();
985
            int label4 = gen_new_label();
986
            TCGv shift;
987
            tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
988
            /* Rm positive, shift to the left */
989
            shift = tcg_temp_new();
990
            tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
991
            tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
992
            tcg_temp_free(shift);
993
            tcg_gen_br(label4);
994
            /* Rm negative, shift to the right */
995
            gen_set_label(label1);
996
            shift = tcg_temp_new();
997
            tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
998
            tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
999
            tcg_gen_not_i32(shift, REG(B7_4));
1000
            tcg_gen_andi_i32(shift, shift, 0x1f);
1001
            tcg_gen_addi_i32(shift, shift, 1);
1002
            tcg_gen_sar_i32(REG(B11_8), REG(B11_8), shift);
1003
            tcg_temp_free(shift);
1004
            tcg_gen_br(label4);
1005
            /* Rm = -32 */
1006
            gen_set_label(label2);
1007
            tcg_gen_brcondi_i32(TCG_COND_LT, REG(B11_8), 0, label3);
1008
            tcg_gen_movi_i32(REG(B11_8), 0);
1009
            tcg_gen_br(label4);
1010
            gen_set_label(label3);
1011
            tcg_gen_movi_i32(REG(B11_8), 0xffffffff);
1012
            gen_set_label(label4);
1013
        }
1014
        return;
1015
    case 0x400d:                /* shld Rm,Rn */
1016
        {
1017
            int label1 = gen_new_label();
1018
            int label2 = gen_new_label();
1019
            int label3 = gen_new_label();
1020
            TCGv shift;
1021
            tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
1022
            /* Rm positive, shift to the left */
1023
            shift = tcg_temp_new();
1024
            tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
1025
            tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
1026
            tcg_temp_free(shift);
1027
            tcg_gen_br(label3);
1028
            /* Rm negative, shift to the right */
1029
            gen_set_label(label1);
1030
            shift = tcg_temp_new();
1031
            tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
1032
            tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
1033
            tcg_gen_not_i32(shift, REG(B7_4));
1034
            tcg_gen_andi_i32(shift, shift, 0x1f);
1035
            tcg_gen_addi_i32(shift, shift, 1);
1036
            tcg_gen_shr_i32(REG(B11_8), REG(B11_8), shift);
1037
            tcg_temp_free(shift);
1038
            tcg_gen_br(label3);
1039
            /* Rm = -32 */
1040
            gen_set_label(label2);
1041
            tcg_gen_movi_i32(REG(B11_8), 0);
1042
            gen_set_label(label3);
1043
        }
1044
        return;
1045
    case 0x3008:                /* sub Rm,Rn */
1046
        tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4));
1047
        return;
1048
    case 0x300a:                /* subc Rm,Rn */
1049
        {
1050
            TCGv t0, t1, t2;
1051
            t0 = tcg_temp_new();
1052
            tcg_gen_andi_i32(t0, cpu_sr, SR_T);
1053
            t1 = tcg_temp_new();
1054
            tcg_gen_sub_i32(t1, REG(B11_8), REG(B7_4));
1055
            tcg_gen_sub_i32(t0, t1, t0);
1056
            t2 = tcg_temp_new();
1057
            tcg_gen_setcond_i32(TCG_COND_LTU, t2, REG(B11_8), t1);
1058
            tcg_gen_setcond_i32(TCG_COND_LTU, t1, t1, t0);
1059
            tcg_gen_or_i32(t1, t1, t2);
1060
            tcg_temp_free(t2);
1061
            tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
1062
            tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
1063
            tcg_temp_free(t1);
1064
            tcg_gen_mov_i32(REG(B11_8), t0);
1065
            tcg_temp_free(t0);
1066
        }
1067
        return;
1068
    case 0x300b:                /* subv Rm,Rn */
1069
        {
1070
            TCGv t0, t1, t2;
1071
            t0 = tcg_temp_new();
1072
            tcg_gen_sub_i32(t0, REG(B11_8), REG(B7_4));
1073
            t1 = tcg_temp_new();
1074
            tcg_gen_xor_i32(t1, t0, REG(B7_4));
1075
            t2 = tcg_temp_new();
1076
            tcg_gen_xor_i32(t2, REG(B11_8), REG(B7_4));
1077
            tcg_gen_and_i32(t1, t1, t2);
1078
            tcg_temp_free(t2);
1079
            tcg_gen_shri_i32(t1, t1, 31);
1080
            tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
1081
            tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
1082
            tcg_temp_free(t1);
1083
            tcg_gen_mov_i32(REG(B11_8), t0);
1084
            tcg_temp_free(t0);
1085
        }
1086
        return;
1087
    case 0x2008:                /* tst Rm,Rn */
1088
        {
1089
            TCGv val = tcg_temp_new();
1090
            tcg_gen_and_i32(val, REG(B7_4), REG(B11_8));
1091
            gen_cmp_imm(TCG_COND_EQ, val, 0);
1092
            tcg_temp_free(val);
1093
        }
1094
        return;
1095
    case 0x200a:                /* xor Rm,Rn */
1096
        tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4));
1097
        return;
1098
    case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1099
        CHECK_FPU_ENABLED
1100
        if (ctx->fpscr & FPSCR_SZ) {
1101
            TCGv_i64 fp = tcg_temp_new_i64();
1102
            gen_load_fpr64(fp, XREG(B7_4));
1103
            gen_store_fpr64(fp, XREG(B11_8));
1104
            tcg_temp_free_i64(fp);
1105
        } else {
1106
            tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1107
        }
1108
        return;
1109
    case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1110
        CHECK_FPU_ENABLED
1111
        if (ctx->fpscr & FPSCR_SZ) {
1112
            TCGv addr_hi = tcg_temp_new();
1113
            int fr = XREG(B7_4);
1114
            tcg_gen_addi_i32(addr_hi, REG(B11_8), 4);
1115
            tcg_gen_qemu_st32(cpu_fregs[fr  ], REG(B11_8), ctx->memidx);
1116
            tcg_gen_qemu_st32(cpu_fregs[fr+1], addr_hi,           ctx->memidx);
1117
            tcg_temp_free(addr_hi);
1118
        } else {
1119
            tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], REG(B11_8), ctx->memidx);
1120
        }
1121
        return;
1122
    case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1123
        CHECK_FPU_ENABLED
1124
        if (ctx->fpscr & FPSCR_SZ) {
1125
            TCGv addr_hi = tcg_temp_new();
1126
            int fr = XREG(B11_8);
1127
            tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1128
            tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
1129
            tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1130
            tcg_temp_free(addr_hi);
1131
        } else {
1132
            tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1133
        }
1134
        return;
1135
    case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1136
        CHECK_FPU_ENABLED
1137
        if (ctx->fpscr & FPSCR_SZ) {
1138
            TCGv addr_hi = tcg_temp_new();
1139
            int fr = XREG(B11_8);
1140
            tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1141
            tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
1142
            tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1143
            tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8);
1144
            tcg_temp_free(addr_hi);
1145
        } else {
1146
            tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1147
            tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
1148
        }
1149
        return;
1150
    case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1151
        CHECK_FPU_ENABLED
1152
        if (ctx->fpscr & FPSCR_SZ) {
1153
            TCGv addr = tcg_temp_new_i32();
1154
            int fr = XREG(B7_4);
1155
            tcg_gen_subi_i32(addr, REG(B11_8), 4);
1156
            tcg_gen_qemu_st32(cpu_fregs[fr+1], addr, ctx->memidx);
1157
            tcg_gen_subi_i32(addr, addr, 4);
1158
            tcg_gen_qemu_st32(cpu_fregs[fr  ], addr, ctx->memidx);
1159
            tcg_gen_mov_i32(REG(B11_8), addr);
1160
            tcg_temp_free(addr);
1161
        } else {
1162
            TCGv addr;
1163
            addr = tcg_temp_new_i32();
1164
            tcg_gen_subi_i32(addr, REG(B11_8), 4);
1165
            tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1166
            tcg_gen_mov_i32(REG(B11_8), addr);
1167
            tcg_temp_free(addr);
1168
        }
1169
        return;
1170
    case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1171
        CHECK_FPU_ENABLED
1172
        {
1173
            TCGv addr = tcg_temp_new_i32();
1174
            tcg_gen_add_i32(addr, REG(B7_4), REG(0));
1175
            if (ctx->fpscr & FPSCR_SZ) {
1176
                int fr = XREG(B11_8);
1177
                tcg_gen_qemu_ld32u(cpu_fregs[fr         ], addr, ctx->memidx);
1178
                tcg_gen_addi_i32(addr, addr, 4);
1179
                tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1180
            } else {
1181
                tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], addr, ctx->memidx);
1182
            }
1183
            tcg_temp_free(addr);
1184
        }
1185
        return;
1186
    case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1187
        CHECK_FPU_ENABLED
1188
        {
1189
            TCGv addr = tcg_temp_new();
1190
            tcg_gen_add_i32(addr, REG(B11_8), REG(0));
1191
            if (ctx->fpscr & FPSCR_SZ) {
1192
                int fr = XREG(B7_4);
1193
                tcg_gen_qemu_ld32u(cpu_fregs[fr         ], addr, ctx->memidx);
1194
                tcg_gen_addi_i32(addr, addr, 4);
1195
                tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1196
            } else {
1197
                tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1198
            }
1199
            tcg_temp_free(addr);
1200
        }
1201
        return;
1202
    case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1203
    case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1204
    case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1205
    case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1206
    case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1207
    case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1208
        {
1209
            CHECK_FPU_ENABLED
1210
            if (ctx->fpscr & FPSCR_PR) {
1211
                TCGv_i64 fp0, fp1;
1212

    
1213
                if (ctx->opcode & 0x0110)
1214
                    break; /* illegal instruction */
1215
                fp0 = tcg_temp_new_i64();
1216
                fp1 = tcg_temp_new_i64();
1217
                gen_load_fpr64(fp0, DREG(B11_8));
1218
                gen_load_fpr64(fp1, DREG(B7_4));
1219
                switch (ctx->opcode & 0xf00f) {
1220
                case 0xf000:                /* fadd Rm,Rn */
1221
                    gen_helper_fadd_DT(fp0, cpu_env, fp0, fp1);
1222
                    break;
1223
                case 0xf001:                /* fsub Rm,Rn */
1224
                    gen_helper_fsub_DT(fp0, cpu_env, fp0, fp1);
1225
                    break;
1226
                case 0xf002:                /* fmul Rm,Rn */
1227
                    gen_helper_fmul_DT(fp0, cpu_env, fp0, fp1);
1228
                    break;
1229
                case 0xf003:                /* fdiv Rm,Rn */
1230
                    gen_helper_fdiv_DT(fp0, cpu_env, fp0, fp1);
1231
                    break;
1232
                case 0xf004:                /* fcmp/eq Rm,Rn */
1233
                    gen_helper_fcmp_eq_DT(cpu_env, fp0, fp1);
1234
                    return;
1235
                case 0xf005:                /* fcmp/gt Rm,Rn */
1236
                    gen_helper_fcmp_gt_DT(cpu_env, fp0, fp1);
1237
                    return;
1238
                }
1239
                gen_store_fpr64(fp0, DREG(B11_8));
1240
                tcg_temp_free_i64(fp0);
1241
                tcg_temp_free_i64(fp1);
1242
            } else {
1243
                switch (ctx->opcode & 0xf00f) {
1244
                case 0xf000:                /* fadd Rm,Rn */
1245
                    gen_helper_fadd_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1246
                                       cpu_fregs[FREG(B11_8)],
1247
                                       cpu_fregs[FREG(B7_4)]);
1248
                    break;
1249
                case 0xf001:                /* fsub Rm,Rn */
1250
                    gen_helper_fsub_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1251
                                       cpu_fregs[FREG(B11_8)],
1252
                                       cpu_fregs[FREG(B7_4)]);
1253
                    break;
1254
                case 0xf002:                /* fmul Rm,Rn */
1255
                    gen_helper_fmul_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1256
                                       cpu_fregs[FREG(B11_8)],
1257
                                       cpu_fregs[FREG(B7_4)]);
1258
                    break;
1259
                case 0xf003:                /* fdiv Rm,Rn */
1260
                    gen_helper_fdiv_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1261
                                       cpu_fregs[FREG(B11_8)],
1262
                                       cpu_fregs[FREG(B7_4)]);
1263
                    break;
1264
                case 0xf004:                /* fcmp/eq Rm,Rn */
1265
                    gen_helper_fcmp_eq_FT(cpu_env, cpu_fregs[FREG(B11_8)],
1266
                                          cpu_fregs[FREG(B7_4)]);
1267
                    return;
1268
                case 0xf005:                /* fcmp/gt Rm,Rn */
1269
                    gen_helper_fcmp_gt_FT(cpu_env, cpu_fregs[FREG(B11_8)],
1270
                                          cpu_fregs[FREG(B7_4)]);
1271
                    return;
1272
                }
1273
            }
1274
        }
1275
        return;
1276
    case 0xf00e: /* fmac FR0,RM,Rn */
1277
        {
1278
            CHECK_FPU_ENABLED
1279
            if (ctx->fpscr & FPSCR_PR) {
1280
                break; /* illegal instruction */
1281
            } else {
1282
                gen_helper_fmac_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1283
                                   cpu_fregs[FREG(0)], cpu_fregs[FREG(B7_4)],
1284
                                   cpu_fregs[FREG(B11_8)]);
1285
                return;
1286
            }
1287
        }
1288
    }
1289

    
1290
    switch (ctx->opcode & 0xff00) {
1291
    case 0xc900:                /* and #imm,R0 */
1292
        tcg_gen_andi_i32(REG(0), REG(0), B7_0);
1293
        return;
1294
    case 0xcd00:                /* and.b #imm,@(R0,GBR) */
1295
        {
1296
            TCGv addr, val;
1297
            addr = tcg_temp_new();
1298
            tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1299
            val = tcg_temp_new();
1300
            tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1301
            tcg_gen_andi_i32(val, val, B7_0);
1302
            tcg_gen_qemu_st8(val, addr, ctx->memidx);
1303
            tcg_temp_free(val);
1304
            tcg_temp_free(addr);
1305
        }
1306
        return;
1307
    case 0x8b00:                /* bf label */
1308
        CHECK_NOT_DELAY_SLOT
1309
            gen_conditional_jump(ctx, ctx->pc + 2,
1310
                                 ctx->pc + 4 + B7_0s * 2);
1311
        ctx->bstate = BS_BRANCH;
1312
        return;
1313
    case 0x8f00:                /* bf/s label */
1314
        CHECK_NOT_DELAY_SLOT
1315
        gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 0);
1316
        ctx->flags |= DELAY_SLOT_CONDITIONAL;
1317
        return;
1318
    case 0x8900:                /* bt label */
1319
        CHECK_NOT_DELAY_SLOT
1320
            gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2,
1321
                                 ctx->pc + 2);
1322
        ctx->bstate = BS_BRANCH;
1323
        return;
1324
    case 0x8d00:                /* bt/s label */
1325
        CHECK_NOT_DELAY_SLOT
1326
        gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 1);
1327
        ctx->flags |= DELAY_SLOT_CONDITIONAL;
1328
        return;
1329
    case 0x8800:                /* cmp/eq #imm,R0 */
1330
        gen_cmp_imm(TCG_COND_EQ, REG(0), B7_0s);
1331
        return;
1332
    case 0xc400:                /* mov.b @(disp,GBR),R0 */
1333
        {
1334
            TCGv addr = tcg_temp_new();
1335
            tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1336
            tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1337
            tcg_temp_free(addr);
1338
        }
1339
        return;
1340
    case 0xc500:                /* mov.w @(disp,GBR),R0 */
1341
        {
1342
            TCGv addr = tcg_temp_new();
1343
            tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1344
            tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1345
            tcg_temp_free(addr);
1346
        }
1347
        return;
1348
    case 0xc600:                /* mov.l @(disp,GBR),R0 */
1349
        {
1350
            TCGv addr = tcg_temp_new();
1351
            tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1352
            tcg_gen_qemu_ld32s(REG(0), addr, ctx->memidx);
1353
            tcg_temp_free(addr);
1354
        }
1355
        return;
1356
    case 0xc000:                /* mov.b R0,@(disp,GBR) */
1357
        {
1358
            TCGv addr = tcg_temp_new();
1359
            tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1360
            tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1361
            tcg_temp_free(addr);
1362
        }
1363
        return;
1364
    case 0xc100:                /* mov.w R0,@(disp,GBR) */
1365
        {
1366
            TCGv addr = tcg_temp_new();
1367
            tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1368
            tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1369
            tcg_temp_free(addr);
1370
        }
1371
        return;
1372
    case 0xc200:                /* mov.l R0,@(disp,GBR) */
1373
        {
1374
            TCGv addr = tcg_temp_new();
1375
            tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1376
            tcg_gen_qemu_st32(REG(0), addr, ctx->memidx);
1377
            tcg_temp_free(addr);
1378
        }
1379
        return;
1380
    case 0x8000:                /* mov.b R0,@(disp,Rn) */
1381
        {
1382
            TCGv addr = tcg_temp_new();
1383
            tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1384
            tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1385
            tcg_temp_free(addr);
1386
        }
1387
        return;
1388
    case 0x8100:                /* mov.w R0,@(disp,Rn) */
1389
        {
1390
            TCGv addr = tcg_temp_new();
1391
            tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1392
            tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1393
            tcg_temp_free(addr);
1394
        }
1395
        return;
1396
    case 0x8400:                /* mov.b @(disp,Rn),R0 */
1397
        {
1398
            TCGv addr = tcg_temp_new();
1399
            tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1400
            tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1401
            tcg_temp_free(addr);
1402
        }
1403
        return;
1404
    case 0x8500:                /* mov.w @(disp,Rn),R0 */
1405
        {
1406
            TCGv addr = tcg_temp_new();
1407
            tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1408
            tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1409
            tcg_temp_free(addr);
1410
        }
1411
        return;
1412
    case 0xc700:                /* mova @(disp,PC),R0 */
1413
        tcg_gen_movi_i32(REG(0), ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3);
1414
        return;
1415
    case 0xcb00:                /* or #imm,R0 */
1416
        tcg_gen_ori_i32(REG(0), REG(0), B7_0);
1417
        return;
1418
    case 0xcf00:                /* or.b #imm,@(R0,GBR) */
1419
        {
1420
            TCGv addr, val;
1421
            addr = tcg_temp_new();
1422
            tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1423
            val = tcg_temp_new();
1424
            tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1425
            tcg_gen_ori_i32(val, val, B7_0);
1426
            tcg_gen_qemu_st8(val, addr, ctx->memidx);
1427
            tcg_temp_free(val);
1428
            tcg_temp_free(addr);
1429
        }
1430
        return;
1431
    case 0xc300:                /* trapa #imm */
1432
        {
1433
            TCGv imm;
1434
            CHECK_NOT_DELAY_SLOT
1435
            imm = tcg_const_i32(B7_0);
1436
            gen_helper_trapa(cpu_env, imm);
1437
            tcg_temp_free(imm);
1438
            ctx->bstate = BS_BRANCH;
1439
        }
1440
        return;
1441
    case 0xc800:                /* tst #imm,R0 */
1442
        {
1443
            TCGv val = tcg_temp_new();
1444
            tcg_gen_andi_i32(val, REG(0), B7_0);
1445
            gen_cmp_imm(TCG_COND_EQ, val, 0);
1446
            tcg_temp_free(val);
1447
        }
1448
        return;
1449
    case 0xcc00:                /* tst.b #imm,@(R0,GBR) */
1450
        {
1451
            TCGv val = tcg_temp_new();
1452
            tcg_gen_add_i32(val, REG(0), cpu_gbr);
1453
            tcg_gen_qemu_ld8u(val, val, ctx->memidx);
1454
            tcg_gen_andi_i32(val, val, B7_0);
1455
            gen_cmp_imm(TCG_COND_EQ, val, 0);
1456
            tcg_temp_free(val);
1457
        }
1458
        return;
1459
    case 0xca00:                /* xor #imm,R0 */
1460
        tcg_gen_xori_i32(REG(0), REG(0), B7_0);
1461
        return;
1462
    case 0xce00:                /* xor.b #imm,@(R0,GBR) */
1463
        {
1464
            TCGv addr, val;
1465
            addr = tcg_temp_new();
1466
            tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1467
            val = tcg_temp_new();
1468
            tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1469
            tcg_gen_xori_i32(val, val, B7_0);
1470
            tcg_gen_qemu_st8(val, addr, ctx->memidx);
1471
            tcg_temp_free(val);
1472
            tcg_temp_free(addr);
1473
        }
1474
        return;
1475
    }
1476

    
1477
    switch (ctx->opcode & 0xf08f) {
1478
    case 0x408e:                /* ldc Rm,Rn_BANK */
1479
        CHECK_PRIVILEGED
1480
        tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8));
1481
        return;
1482
    case 0x4087:                /* ldc.l @Rm+,Rn_BANK */
1483
        CHECK_PRIVILEGED
1484
        tcg_gen_qemu_ld32s(ALTREG(B6_4), REG(B11_8), ctx->memidx);
1485
        tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1486
        return;
1487
    case 0x0082:                /* stc Rm_BANK,Rn */
1488
        CHECK_PRIVILEGED
1489
        tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4));
1490
        return;
1491
    case 0x4083:                /* stc.l Rm_BANK,@-Rn */
1492
        CHECK_PRIVILEGED
1493
        {
1494
            TCGv addr = tcg_temp_new();
1495
            tcg_gen_subi_i32(addr, REG(B11_8), 4);
1496
            tcg_gen_qemu_st32(ALTREG(B6_4), addr, ctx->memidx);
1497
            tcg_gen_mov_i32(REG(B11_8), addr);
1498
            tcg_temp_free(addr);
1499
        }
1500
        return;
1501
    }
1502

    
1503
    switch (ctx->opcode & 0xf0ff) {
1504
    case 0x0023:                /* braf Rn */
1505
        CHECK_NOT_DELAY_SLOT
1506
        tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->pc + 4);
1507
        ctx->flags |= DELAY_SLOT;
1508
        ctx->delayed_pc = (uint32_t) - 1;
1509
        return;
1510
    case 0x0003:                /* bsrf Rn */
1511
        CHECK_NOT_DELAY_SLOT
1512
        tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1513
        tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr);
1514
        ctx->flags |= DELAY_SLOT;
1515
        ctx->delayed_pc = (uint32_t) - 1;
1516
        return;
1517
    case 0x4015:                /* cmp/pl Rn */
1518
        gen_cmp_imm(TCG_COND_GT, REG(B11_8), 0);
1519
        return;
1520
    case 0x4011:                /* cmp/pz Rn */
1521
        gen_cmp_imm(TCG_COND_GE, REG(B11_8), 0);
1522
        return;
1523
    case 0x4010:                /* dt Rn */
1524
        tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1);
1525
        gen_cmp_imm(TCG_COND_EQ, REG(B11_8), 0);
1526
        return;
1527
    case 0x402b:                /* jmp @Rn */
1528
        CHECK_NOT_DELAY_SLOT
1529
        tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1530
        ctx->flags |= DELAY_SLOT;
1531
        ctx->delayed_pc = (uint32_t) - 1;
1532
        return;
1533
    case 0x400b:                /* jsr @Rn */
1534
        CHECK_NOT_DELAY_SLOT
1535
        tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1536
        tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1537
        ctx->flags |= DELAY_SLOT;
1538
        ctx->delayed_pc = (uint32_t) - 1;
1539
        return;
1540
    case 0x400e:                /* ldc Rm,SR */
1541
        CHECK_PRIVILEGED
1542
        tcg_gen_andi_i32(cpu_sr, REG(B11_8), 0x700083f3);
1543
        ctx->bstate = BS_STOP;
1544
        return;
1545
    case 0x4007:                /* ldc.l @Rm+,SR */
1546
        CHECK_PRIVILEGED
1547
        {
1548
            TCGv val = tcg_temp_new();
1549
            tcg_gen_qemu_ld32s(val, REG(B11_8), ctx->memidx);
1550
            tcg_gen_andi_i32(cpu_sr, val, 0x700083f3);
1551
            tcg_temp_free(val);
1552
            tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1553
            ctx->bstate = BS_STOP;
1554
        }
1555
        return;
1556
    case 0x0002:                /* stc SR,Rn */
1557
        CHECK_PRIVILEGED
1558
        tcg_gen_mov_i32(REG(B11_8), cpu_sr);
1559
        return;
1560
    case 0x4003:                /* stc SR,@-Rn */
1561
        CHECK_PRIVILEGED
1562
        {
1563
            TCGv addr = tcg_temp_new();
1564
            tcg_gen_subi_i32(addr, REG(B11_8), 4);
1565
            tcg_gen_qemu_st32(cpu_sr, addr, ctx->memidx);
1566
            tcg_gen_mov_i32(REG(B11_8), addr);
1567
            tcg_temp_free(addr);
1568
        }
1569
        return;
1570
#define LD(reg,ldnum,ldpnum,prechk)                \
1571
  case ldnum:                                                        \
1572
    prechk                                                            \
1573
    tcg_gen_mov_i32 (cpu_##reg, REG(B11_8));                        \
1574
    return;                                                        \
1575
  case ldpnum:                                                        \
1576
    prechk                                                            \
1577
    tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx);        \
1578
    tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);                \
1579
    return;
1580
#define ST(reg,stnum,stpnum,prechk)                \
1581
  case stnum:                                                        \
1582
    prechk                                                            \
1583
    tcg_gen_mov_i32 (REG(B11_8), cpu_##reg);                        \
1584
    return;                                                        \
1585
  case stpnum:                                                        \
1586
    prechk                                                            \
1587
    {                                                                \
1588
        TCGv addr = tcg_temp_new();                                \
1589
        tcg_gen_subi_i32(addr, REG(B11_8), 4);                        \
1590
        tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx);        \
1591
        tcg_gen_mov_i32(REG(B11_8), addr);                        \
1592
        tcg_temp_free(addr);                                        \
1593
    }                                                                \
1594
    return;
1595
#define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk)                \
1596
        LD(reg,ldnum,ldpnum,prechk)                                \
1597
        ST(reg,stnum,stpnum,prechk)
1598
        LDST(gbr,  0x401e, 0x4017, 0x0012, 0x4013, {})
1599
        LDST(vbr,  0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED)
1600
        LDST(ssr,  0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED)
1601
        LDST(spc,  0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED)
1602
        ST(sgr,  0x003a, 0x4032, CHECK_PRIVILEGED)
1603
        LD(sgr,  0x403a, 0x4036, CHECK_PRIVILEGED if (!(ctx->features & SH_FEATURE_SH4A)) break;)
1604
        LDST(dbr,  0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED)
1605
        LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {})
1606
        LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {})
1607
        LDST(pr,   0x402a, 0x4026, 0x002a, 0x4022, {})
1608
        LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED})
1609
    case 0x406a:                /* lds Rm,FPSCR */
1610
        CHECK_FPU_ENABLED
1611
        gen_helper_ld_fpscr(cpu_env, REG(B11_8));
1612
        ctx->bstate = BS_STOP;
1613
        return;
1614
    case 0x4066:                /* lds.l @Rm+,FPSCR */
1615
        CHECK_FPU_ENABLED
1616
        {
1617
            TCGv addr = tcg_temp_new();
1618
            tcg_gen_qemu_ld32s(addr, REG(B11_8), ctx->memidx);
1619
            tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1620
            gen_helper_ld_fpscr(cpu_env, addr);
1621
            tcg_temp_free(addr);
1622
            ctx->bstate = BS_STOP;
1623
        }
1624
        return;
1625
    case 0x006a:                /* sts FPSCR,Rn */
1626
        CHECK_FPU_ENABLED
1627
        tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff);
1628
        return;
1629
    case 0x4062:                /* sts FPSCR,@-Rn */
1630
        CHECK_FPU_ENABLED
1631
        {
1632
            TCGv addr, val;
1633
            val = tcg_temp_new();
1634
            tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff);
1635
            addr = tcg_temp_new();
1636
            tcg_gen_subi_i32(addr, REG(B11_8), 4);
1637
            tcg_gen_qemu_st32(val, addr, ctx->memidx);
1638
            tcg_gen_mov_i32(REG(B11_8), addr);
1639
            tcg_temp_free(addr);
1640
            tcg_temp_free(val);
1641
        }
1642
        return;
1643
    case 0x00c3:                /* movca.l R0,@Rm */
1644
        {
1645
            TCGv val = tcg_temp_new();
1646
            tcg_gen_qemu_ld32u(val, REG(B11_8), ctx->memidx);
1647
            gen_helper_movcal(cpu_env, REG(B11_8), val);
1648
            tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1649
        }
1650
        ctx->has_movcal = 1;
1651
        return;
1652
    case 0x40a9:
1653
        /* MOVUA.L @Rm,R0 (Rm) -> R0
1654
           Load non-boundary-aligned data */
1655
        tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1656
        return;
1657
    case 0x40e9:
1658
        /* MOVUA.L @Rm+,R0   (Rm) -> R0, Rm + 4 -> Rm
1659
           Load non-boundary-aligned data */
1660
        tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1661
        tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1662
        return;
1663
    case 0x0029:                /* movt Rn */
1664
        tcg_gen_andi_i32(REG(B11_8), cpu_sr, SR_T);
1665
        return;
1666
    case 0x0073:
1667
        /* MOVCO.L
1668
               LDST -> T
1669
               If (T == 1) R0 -> (Rn)
1670
               0 -> LDST
1671
        */
1672
        if (ctx->features & SH_FEATURE_SH4A) {
1673
            int label = gen_new_label();
1674
            gen_clr_t();
1675
            tcg_gen_or_i32(cpu_sr, cpu_sr, cpu_ldst);
1676
            tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_ldst, 0, label);
1677
            tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1678
            gen_set_label(label);
1679
            tcg_gen_movi_i32(cpu_ldst, 0);
1680
            return;
1681
        } else
1682
            break;
1683
    case 0x0063:
1684
        /* MOVLI.L @Rm,R0
1685
               1 -> LDST
1686
               (Rm) -> R0
1687
               When interrupt/exception
1688
               occurred 0 -> LDST
1689
        */
1690
        if (ctx->features & SH_FEATURE_SH4A) {
1691
            tcg_gen_movi_i32(cpu_ldst, 0);
1692
            tcg_gen_qemu_ld32s(REG(0), REG(B11_8), ctx->memidx);
1693
            tcg_gen_movi_i32(cpu_ldst, 1);
1694
            return;
1695
        } else
1696
            break;
1697
    case 0x0093:                /* ocbi @Rn */
1698
        {
1699
            gen_helper_ocbi(cpu_env, REG(B11_8));
1700
        }
1701
        return;
1702
    case 0x00a3:                /* ocbp @Rn */
1703
    case 0x00b3:                /* ocbwb @Rn */
1704
        /* These instructions are supposed to do nothing in case of
1705
           a cache miss. Given that we only partially emulate caches
1706
           it is safe to simply ignore them. */
1707
        return;
1708
    case 0x0083:                /* pref @Rn */
1709
        return;
1710
    case 0x00d3:                /* prefi @Rn */
1711
        if (ctx->features & SH_FEATURE_SH4A)
1712
            return;
1713
        else
1714
            break;
1715
    case 0x00e3:                /* icbi @Rn */
1716
        if (ctx->features & SH_FEATURE_SH4A)
1717
            return;
1718
        else
1719
            break;
1720
    case 0x00ab:                /* synco */
1721
        if (ctx->features & SH_FEATURE_SH4A)
1722
            return;
1723
        else
1724
            break;
1725
    case 0x4024:                /* rotcl Rn */
1726
        {
1727
            TCGv tmp = tcg_temp_new();
1728
            tcg_gen_mov_i32(tmp, cpu_sr);
1729
            gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1730
            tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1731
            gen_copy_bit_i32(REG(B11_8), 0, tmp, 0);
1732
            tcg_temp_free(tmp);
1733
        }
1734
        return;
1735
    case 0x4025:                /* rotcr Rn */
1736
        {
1737
            TCGv tmp = tcg_temp_new();
1738
            tcg_gen_mov_i32(tmp, cpu_sr);
1739
            gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1740
            tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1741
            gen_copy_bit_i32(REG(B11_8), 31, tmp, 0);
1742
            tcg_temp_free(tmp);
1743
        }
1744
        return;
1745
    case 0x4004:                /* rotl Rn */
1746
        tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1);
1747
        gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1748
        return;
1749
    case 0x4005:                /* rotr Rn */
1750
        gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1751
        tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1);
1752
        return;
1753
    case 0x4000:                /* shll Rn */
1754
    case 0x4020:                /* shal Rn */
1755
        gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1756
        tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1757
        return;
1758
    case 0x4021:                /* shar Rn */
1759
        gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1760
        tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1);
1761
        return;
1762
    case 0x4001:                /* shlr Rn */
1763
        gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1764
        tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1765
        return;
1766
    case 0x4008:                /* shll2 Rn */
1767
        tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2);
1768
        return;
1769
    case 0x4018:                /* shll8 Rn */
1770
        tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8);
1771
        return;
1772
    case 0x4028:                /* shll16 Rn */
1773
        tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16);
1774
        return;
1775
    case 0x4009:                /* shlr2 Rn */
1776
        tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2);
1777
        return;
1778
    case 0x4019:                /* shlr8 Rn */
1779
        tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8);
1780
        return;
1781
    case 0x4029:                /* shlr16 Rn */
1782
        tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16);
1783
        return;
1784
    case 0x401b:                /* tas.b @Rn */
1785
        {
1786
            TCGv addr, val;
1787
            addr = tcg_temp_local_new();
1788
            tcg_gen_mov_i32(addr, REG(B11_8));
1789
            val = tcg_temp_local_new();
1790
            tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1791
            gen_cmp_imm(TCG_COND_EQ, val, 0);
1792
            tcg_gen_ori_i32(val, val, 0x80);
1793
            tcg_gen_qemu_st8(val, addr, ctx->memidx);
1794
            tcg_temp_free(val);
1795
            tcg_temp_free(addr);
1796
        }
1797
        return;
1798
    case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1799
        CHECK_FPU_ENABLED
1800
        tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fpul);
1801
        return;
1802
    case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1803
        CHECK_FPU_ENABLED
1804
        tcg_gen_mov_i32(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1805
        return;
1806
    case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1807
        CHECK_FPU_ENABLED
1808
        if (ctx->fpscr & FPSCR_PR) {
1809
            TCGv_i64 fp;
1810
            if (ctx->opcode & 0x0100)
1811
                break; /* illegal instruction */
1812
            fp = tcg_temp_new_i64();
1813
            gen_helper_float_DT(fp, cpu_env, cpu_fpul);
1814
            gen_store_fpr64(fp, DREG(B11_8));
1815
            tcg_temp_free_i64(fp);
1816
        }
1817
        else {
1818
            gen_helper_float_FT(cpu_fregs[FREG(B11_8)], cpu_env, cpu_fpul);
1819
        }
1820
        return;
1821
    case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1822
        CHECK_FPU_ENABLED
1823
        if (ctx->fpscr & FPSCR_PR) {
1824
            TCGv_i64 fp;
1825
            if (ctx->opcode & 0x0100)
1826
                break; /* illegal instruction */
1827
            fp = tcg_temp_new_i64();
1828
            gen_load_fpr64(fp, DREG(B11_8));
1829
            gen_helper_ftrc_DT(cpu_fpul, cpu_env, fp);
1830
            tcg_temp_free_i64(fp);
1831
        }
1832
        else {
1833
            gen_helper_ftrc_FT(cpu_fpul, cpu_env, cpu_fregs[FREG(B11_8)]);
1834
        }
1835
        return;
1836
    case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1837
        CHECK_FPU_ENABLED
1838
        {
1839
            gen_helper_fneg_T(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1840
        }
1841
        return;
1842
    case 0xf05d: /* fabs FRn/DRn */
1843
        CHECK_FPU_ENABLED
1844
        if (ctx->fpscr & FPSCR_PR) {
1845
            if (ctx->opcode & 0x0100)
1846
                break; /* illegal instruction */
1847
            TCGv_i64 fp = tcg_temp_new_i64();
1848
            gen_load_fpr64(fp, DREG(B11_8));
1849
            gen_helper_fabs_DT(fp, fp);
1850
            gen_store_fpr64(fp, DREG(B11_8));
1851
            tcg_temp_free_i64(fp);
1852
        } else {
1853
            gen_helper_fabs_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1854
        }
1855
        return;
1856
    case 0xf06d: /* fsqrt FRn */
1857
        CHECK_FPU_ENABLED
1858
        if (ctx->fpscr & FPSCR_PR) {
1859
            if (ctx->opcode & 0x0100)
1860
                break; /* illegal instruction */
1861
            TCGv_i64 fp = tcg_temp_new_i64();
1862
            gen_load_fpr64(fp, DREG(B11_8));
1863
            gen_helper_fsqrt_DT(fp, cpu_env, fp);
1864
            gen_store_fpr64(fp, DREG(B11_8));
1865
            tcg_temp_free_i64(fp);
1866
        } else {
1867
            gen_helper_fsqrt_FT(cpu_fregs[FREG(B11_8)], cpu_env,
1868
                                cpu_fregs[FREG(B11_8)]);
1869
        }
1870
        return;
1871
    case 0xf07d: /* fsrra FRn */
1872
        CHECK_FPU_ENABLED
1873
        break;
1874
    case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1875
        CHECK_FPU_ENABLED
1876
        if (!(ctx->fpscr & FPSCR_PR)) {
1877
            tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0);
1878
        }
1879
        return;
1880
    case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1881
        CHECK_FPU_ENABLED
1882
        if (!(ctx->fpscr & FPSCR_PR)) {
1883
            tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0x3f800000);
1884
        }
1885
        return;
1886
    case 0xf0ad: /* fcnvsd FPUL,DRn */
1887
        CHECK_FPU_ENABLED
1888
        {
1889
            TCGv_i64 fp = tcg_temp_new_i64();
1890
            gen_helper_fcnvsd_FT_DT(fp, cpu_env, cpu_fpul);
1891
            gen_store_fpr64(fp, DREG(B11_8));
1892
            tcg_temp_free_i64(fp);
1893
        }
1894
        return;
1895
    case 0xf0bd: /* fcnvds DRn,FPUL */
1896
        CHECK_FPU_ENABLED
1897
        {
1898
            TCGv_i64 fp = tcg_temp_new_i64();
1899
            gen_load_fpr64(fp, DREG(B11_8));
1900
            gen_helper_fcnvds_DT_FT(cpu_fpul, cpu_env, fp);
1901
            tcg_temp_free_i64(fp);
1902
        }
1903
        return;
1904
    case 0xf0ed: /* fipr FVm,FVn */
1905
        CHECK_FPU_ENABLED
1906
        if ((ctx->fpscr & FPSCR_PR) == 0) {
1907
            TCGv m, n;
1908
            m = tcg_const_i32((ctx->opcode >> 8) & 3);
1909
            n = tcg_const_i32((ctx->opcode >> 10) & 3);
1910
            gen_helper_fipr(cpu_env, m, n);
1911
            tcg_temp_free(m);
1912
            tcg_temp_free(n);
1913
            return;
1914
        }
1915
        break;
1916
    case 0xf0fd: /* ftrv XMTRX,FVn */
1917
        CHECK_FPU_ENABLED
1918
        if ((ctx->opcode & 0x0300) == 0x0100 &&
1919
            (ctx->fpscr & FPSCR_PR) == 0) {
1920
            TCGv n;
1921
            n = tcg_const_i32((ctx->opcode >> 10) & 3);
1922
            gen_helper_ftrv(cpu_env, n);
1923
            tcg_temp_free(n);
1924
            return;
1925
        }
1926
        break;
1927
    }
1928
#if 0
1929
    fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n",
1930
            ctx->opcode, ctx->pc);
1931
    fflush(stderr);
1932
#endif
1933
    if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1934
        gen_helper_raise_slot_illegal_instruction(cpu_env);
1935
    } else {
1936
        gen_helper_raise_illegal_instruction(cpu_env);
1937
    }
1938
    ctx->bstate = BS_EXCP;
1939
}
1940

    
1941
static void decode_opc(DisasContext * ctx)
1942
{
1943
    uint32_t old_flags = ctx->flags;
1944

    
1945
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP))) {
1946
        tcg_gen_debug_insn_start(ctx->pc);
1947
    }
1948

    
1949
    _decode_opc(ctx);
1950

    
1951
    if (old_flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1952
        if (ctx->flags & DELAY_SLOT_CLEARME) {
1953
            gen_store_flags(0);
1954
        } else {
1955
            /* go out of the delay slot */
1956
            uint32_t new_flags = ctx->flags;
1957
            new_flags &= ~(DELAY_SLOT | DELAY_SLOT_CONDITIONAL);
1958
            gen_store_flags(new_flags);
1959
        }
1960
        ctx->flags = 0;
1961
        ctx->bstate = BS_BRANCH;
1962
        if (old_flags & DELAY_SLOT_CONDITIONAL) {
1963
            gen_delayed_conditional_jump(ctx);
1964
        } else if (old_flags & DELAY_SLOT) {
1965
            gen_jump(ctx);
1966
        }
1967

    
1968
    }
1969

    
1970
    /* go into a delay slot */
1971
    if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))
1972
        gen_store_flags(ctx->flags);
1973
}
1974

    
1975
static inline void
1976
gen_intermediate_code_internal(CPUSH4State * env, TranslationBlock * tb,
1977
                               int search_pc)
1978
{
1979
    DisasContext ctx;
1980
    target_ulong pc_start;
1981
    static uint16_t *gen_opc_end;
1982
    CPUBreakpoint *bp;
1983
    int i, ii;
1984
    int num_insns;
1985
    int max_insns;
1986

    
1987
    pc_start = tb->pc;
1988
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
1989
    ctx.pc = pc_start;
1990
    ctx.flags = (uint32_t)tb->flags;
1991
    ctx.bstate = BS_NONE;
1992
    ctx.sr = env->sr;
1993
    ctx.fpscr = env->fpscr;
1994
    ctx.memidx = (env->sr & SR_MD) == 0 ? 1 : 0;
1995
    /* We don't know if the delayed pc came from a dynamic or static branch,
1996
       so assume it is a dynamic branch.  */
1997
    ctx.delayed_pc = -1; /* use delayed pc from env pointer */
1998
    ctx.tb = tb;
1999
    ctx.singlestep_enabled = env->singlestep_enabled;
2000
    ctx.features = env->features;
2001
    ctx.has_movcal = (tb->flags & TB_FLAG_PENDING_MOVCA);
2002

    
2003
    ii = -1;
2004
    num_insns = 0;
2005
    max_insns = tb->cflags & CF_COUNT_MASK;
2006
    if (max_insns == 0)
2007
        max_insns = CF_COUNT_MASK;
2008
    gen_icount_start();
2009
    while (ctx.bstate == BS_NONE && gen_opc_ptr < gen_opc_end) {
2010
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
2011
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
2012
                if (ctx.pc == bp->pc) {
2013
                    /* We have hit a breakpoint - make sure PC is up-to-date */
2014
                    tcg_gen_movi_i32(cpu_pc, ctx.pc);
2015
                    gen_helper_debug(cpu_env);
2016
                    ctx.bstate = BS_EXCP;
2017
                    break;
2018
                }
2019
            }
2020
        }
2021
        if (search_pc) {
2022
            i = gen_opc_ptr - gen_opc_buf;
2023
            if (ii < i) {
2024
                ii++;
2025
                while (ii < i)
2026
                    gen_opc_instr_start[ii++] = 0;
2027
            }
2028
            gen_opc_pc[ii] = ctx.pc;
2029
            gen_opc_hflags[ii] = ctx.flags;
2030
            gen_opc_instr_start[ii] = 1;
2031
            gen_opc_icount[ii] = num_insns;
2032
        }
2033
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2034
            gen_io_start();
2035
#if 0
2036
        fprintf(stderr, "Loading opcode at address 0x%08x\n", ctx.pc);
2037
        fflush(stderr);
2038
#endif
2039
        ctx.opcode = cpu_lduw_code(env, ctx.pc);
2040
        decode_opc(&ctx);
2041
        num_insns++;
2042
        ctx.pc += 2;
2043
        if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2044
            break;
2045
        if (env->singlestep_enabled)
2046
            break;
2047
        if (num_insns >= max_insns)
2048
            break;
2049
        if (singlestep)
2050
            break;
2051
    }
2052
    if (tb->cflags & CF_LAST_IO)
2053
        gen_io_end();
2054
    if (env->singlestep_enabled) {
2055
        tcg_gen_movi_i32(cpu_pc, ctx.pc);
2056
        gen_helper_debug(cpu_env);
2057
    } else {
2058
        switch (ctx.bstate) {
2059
        case BS_STOP:
2060
            /* gen_op_interrupt_restart(); */
2061
            /* fall through */
2062
        case BS_NONE:
2063
            if (ctx.flags) {
2064
                gen_store_flags(ctx.flags | DELAY_SLOT_CLEARME);
2065
            }
2066
            gen_goto_tb(&ctx, 0, ctx.pc);
2067
            break;
2068
        case BS_EXCP:
2069
            /* gen_op_interrupt_restart(); */
2070
            tcg_gen_exit_tb(0);
2071
            break;
2072
        case BS_BRANCH:
2073
        default:
2074
            break;
2075
        }
2076
    }
2077

    
2078
    gen_icount_end(tb, num_insns);
2079
    *gen_opc_ptr = INDEX_op_end;
2080
    if (search_pc) {
2081
        i = gen_opc_ptr - gen_opc_buf;
2082
        ii++;
2083
        while (ii <= i)
2084
            gen_opc_instr_start[ii++] = 0;
2085
    } else {
2086
        tb->size = ctx.pc - pc_start;
2087
        tb->icount = num_insns;
2088
    }
2089

    
2090
#ifdef DEBUG_DISAS
2091
#ifdef SH4_DEBUG_DISAS
2092
    qemu_log_mask(CPU_LOG_TB_IN_ASM, "\n");
2093
#endif
2094
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2095
        qemu_log("IN:\n");        /* , lookup_symbol(pc_start)); */
2096
        log_target_disas(pc_start, ctx.pc - pc_start, 0);
2097
        qemu_log("\n");
2098
    }
2099
#endif
2100
}
2101

    
2102
void gen_intermediate_code(CPUSH4State * env, struct TranslationBlock *tb)
2103
{
2104
    gen_intermediate_code_internal(env, tb, 0);
2105
}
2106

    
2107
void gen_intermediate_code_pc(CPUSH4State * env, struct TranslationBlock *tb)
2108
{
2109
    gen_intermediate_code_internal(env, tb, 1);
2110
}
2111

    
2112
void restore_state_to_opc(CPUSH4State *env, TranslationBlock *tb, int pc_pos)
2113
{
2114
    env->pc = gen_opc_pc[pc_pos];
2115
    env->flags = gen_opc_hflags[pc_pos];
2116
}