Statistics
| Branch: | Revision:

root / target-alpha / translate.c @ 14ab1634

History | View | Annotate | Download (80 kB)

1
/*
2
 *  Alpha emulation cpu translation for qemu.
3
 *
4
 *  Copyright (c) 2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18
 */
19

    
20
#include <stdint.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23

    
24
#include "cpu.h"
25
#include "exec-all.h"
26
#include "disas.h"
27
#include "host-utils.h"
28
#include "tcg-op.h"
29
#include "qemu-common.h"
30

    
31
#include "helper.h"
32
#define GEN_HELPER 1
33
#include "helper.h"
34

    
35
#undef ALPHA_DEBUG_DISAS
36

    
37
#ifdef ALPHA_DEBUG_DISAS
38
#  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
39
#else
40
#  define LOG_DISAS(...) do { } while (0)
41
#endif
42

    
43
typedef struct DisasContext DisasContext;
44
struct DisasContext {
45
    uint64_t pc;
46
    int mem_idx;
47
#if !defined (CONFIG_USER_ONLY)
48
    int pal_mode;
49
#endif
50
    CPUAlphaState *env;
51
    uint32_t amask;
52
};
53

    
54
/* global register indexes */
55
static TCGv_ptr cpu_env;
56
static TCGv cpu_ir[31];
57
static TCGv cpu_fir[31];
58
static TCGv cpu_pc;
59
static TCGv cpu_lock;
60
#ifdef CONFIG_USER_ONLY
61
static TCGv cpu_uniq;
62
#endif
63

    
64
/* register names */
65
static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
66

    
67
#include "gen-icount.h"
68

    
69
static void alpha_translate_init(void)
70
{
71
    int i;
72
    char *p;
73
    static int done_init = 0;
74

    
75
    if (done_init)
76
        return;
77

    
78
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
79

    
80
    p = cpu_reg_names;
81
    for (i = 0; i < 31; i++) {
82
        sprintf(p, "ir%d", i);
83
        cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
84
                                           offsetof(CPUState, ir[i]), p);
85
        p += (i < 10) ? 4 : 5;
86

    
87
        sprintf(p, "fir%d", i);
88
        cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
89
                                            offsetof(CPUState, fir[i]), p);
90
        p += (i < 10) ? 5 : 6;
91
    }
92

    
93
    cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
94
                                    offsetof(CPUState, pc), "pc");
95

    
96
    cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
97
                                      offsetof(CPUState, lock), "lock");
98

    
99
#ifdef CONFIG_USER_ONLY
100
    cpu_uniq = tcg_global_mem_new_i64(TCG_AREG0,
101
                                      offsetof(CPUState, unique), "uniq");
102
#endif
103

    
104
    /* register helpers */
105
#define GEN_HELPER 2
106
#include "helper.h"
107

    
108
    done_init = 1;
109
}
110

    
111
static inline void gen_excp(DisasContext *ctx, int exception, int error_code)
112
{
113
    TCGv_i32 tmp1, tmp2;
114

    
115
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
116
    tmp1 = tcg_const_i32(exception);
117
    tmp2 = tcg_const_i32(error_code);
118
    gen_helper_excp(tmp1, tmp2);
119
    tcg_temp_free_i32(tmp2);
120
    tcg_temp_free_i32(tmp1);
121
}
122

    
123
static inline void gen_invalid(DisasContext *ctx)
124
{
125
    gen_excp(ctx, EXCP_OPCDEC, 0);
126
}
127

    
128
static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
129
{
130
    TCGv tmp = tcg_temp_new();
131
    TCGv_i32 tmp32 = tcg_temp_new_i32();
132
    tcg_gen_qemu_ld32u(tmp, t1, flags);
133
    tcg_gen_trunc_i64_i32(tmp32, tmp);
134
    gen_helper_memory_to_f(t0, tmp32);
135
    tcg_temp_free_i32(tmp32);
136
    tcg_temp_free(tmp);
137
}
138

    
139
static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
140
{
141
    TCGv tmp = tcg_temp_new();
142
    tcg_gen_qemu_ld64(tmp, t1, flags);
143
    gen_helper_memory_to_g(t0, tmp);
144
    tcg_temp_free(tmp);
145
}
146

    
147
static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
148
{
149
    TCGv tmp = tcg_temp_new();
150
    TCGv_i32 tmp32 = tcg_temp_new_i32();
151
    tcg_gen_qemu_ld32u(tmp, t1, flags);
152
    tcg_gen_trunc_i64_i32(tmp32, tmp);
153
    gen_helper_memory_to_s(t0, tmp32);
154
    tcg_temp_free_i32(tmp32);
155
    tcg_temp_free(tmp);
156
}
157

    
158
static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
159
{
160
    tcg_gen_mov_i64(cpu_lock, t1);
161
    tcg_gen_qemu_ld32s(t0, t1, flags);
162
}
163

    
164
static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
165
{
166
    tcg_gen_mov_i64(cpu_lock, t1);
167
    tcg_gen_qemu_ld64(t0, t1, flags);
168
}
169

    
170
static inline void gen_load_mem(DisasContext *ctx,
171
                                void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
172
                                                          int flags),
173
                                int ra, int rb, int32_t disp16, int fp,
174
                                int clear)
175
{
176
    TCGv addr;
177

    
178
    if (unlikely(ra == 31))
179
        return;
180

    
181
    addr = tcg_temp_new();
182
    if (rb != 31) {
183
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
184
        if (clear)
185
            tcg_gen_andi_i64(addr, addr, ~0x7);
186
    } else {
187
        if (clear)
188
            disp16 &= ~0x7;
189
        tcg_gen_movi_i64(addr, disp16);
190
    }
191
    if (fp)
192
        tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
193
    else
194
        tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
195
    tcg_temp_free(addr);
196
}
197

    
198
static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
199
{
200
    TCGv_i32 tmp32 = tcg_temp_new_i32();
201
    TCGv tmp = tcg_temp_new();
202
    gen_helper_f_to_memory(tmp32, t0);
203
    tcg_gen_extu_i32_i64(tmp, tmp32);
204
    tcg_gen_qemu_st32(tmp, t1, flags);
205
    tcg_temp_free(tmp);
206
    tcg_temp_free_i32(tmp32);
207
}
208

    
209
static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
210
{
211
    TCGv tmp = tcg_temp_new();
212
    gen_helper_g_to_memory(tmp, t0);
213
    tcg_gen_qemu_st64(tmp, t1, flags);
214
    tcg_temp_free(tmp);
215
}
216

    
217
static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
218
{
219
    TCGv_i32 tmp32 = tcg_temp_new_i32();
220
    TCGv tmp = tcg_temp_new();
221
    gen_helper_s_to_memory(tmp32, t0);
222
    tcg_gen_extu_i32_i64(tmp, tmp32);
223
    tcg_gen_qemu_st32(tmp, t1, flags);
224
    tcg_temp_free(tmp);
225
    tcg_temp_free_i32(tmp32);
226
}
227

    
228
static inline void gen_qemu_stl_c(TCGv t0, TCGv t1, int flags)
229
{
230
    int l1, l2;
231

    
232
    l1 = gen_new_label();
233
    l2 = gen_new_label();
234
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
235
    tcg_gen_qemu_st32(t0, t1, flags);
236
    tcg_gen_movi_i64(t0, 1);
237
    tcg_gen_br(l2);
238
    gen_set_label(l1);
239
    tcg_gen_movi_i64(t0, 0);
240
    gen_set_label(l2);
241
    tcg_gen_movi_i64(cpu_lock, -1);
242
}
243

    
244
static inline void gen_qemu_stq_c(TCGv t0, TCGv t1, int flags)
245
{
246
    int l1, l2;
247

    
248
    l1 = gen_new_label();
249
    l2 = gen_new_label();
250
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
251
    tcg_gen_qemu_st64(t0, t1, flags);
252
    tcg_gen_movi_i64(t0, 1);
253
    tcg_gen_br(l2);
254
    gen_set_label(l1);
255
    tcg_gen_movi_i64(t0, 0);
256
    gen_set_label(l2);
257
    tcg_gen_movi_i64(cpu_lock, -1);
258
}
259

    
260
static inline void gen_store_mem(DisasContext *ctx,
261
                                 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
262
                                                            int flags),
263
                                 int ra, int rb, int32_t disp16, int fp,
264
                                 int clear, int local)
265
{
266
    TCGv addr;
267
    if (local)
268
        addr = tcg_temp_local_new();
269
    else
270
        addr = tcg_temp_new();
271
    if (rb != 31) {
272
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
273
        if (clear)
274
            tcg_gen_andi_i64(addr, addr, ~0x7);
275
    } else {
276
        if (clear)
277
            disp16 &= ~0x7;
278
        tcg_gen_movi_i64(addr, disp16);
279
    }
280
    if (ra != 31) {
281
        if (fp)
282
            tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
283
        else
284
            tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
285
    } else {
286
        TCGv zero;
287
        if (local)
288
            zero = tcg_const_local_i64(0);
289
        else
290
            zero = tcg_const_i64(0);
291
        tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
292
        tcg_temp_free(zero);
293
    }
294
    tcg_temp_free(addr);
295
}
296

    
297
static inline void gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
298
                             int32_t disp, int mask)
299
{
300
    int l1, l2;
301

    
302
    l1 = gen_new_label();
303
    l2 = gen_new_label();
304
    if (likely(ra != 31)) {
305
        if (mask) {
306
            TCGv tmp = tcg_temp_new();
307
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
308
            tcg_gen_brcondi_i64(cond, tmp, 0, l1);
309
            tcg_temp_free(tmp);
310
        } else
311
            tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
312
    } else {
313
        /* Very uncommon case - Do not bother to optimize.  */
314
        TCGv tmp = tcg_const_i64(0);
315
        tcg_gen_brcondi_i64(cond, tmp, 0, l1);
316
        tcg_temp_free(tmp);
317
    }
318
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
319
    tcg_gen_br(l2);
320
    gen_set_label(l1);
321
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
322
    gen_set_label(l2);
323
}
324

    
325
static inline void gen_fbcond(DisasContext *ctx, int opc, int ra, int32_t disp)
326
{
327
    int l1, l2;
328
    TCGv tmp;
329
    TCGv src;
330

    
331
    l1 = gen_new_label();
332
    l2 = gen_new_label();
333
    if (ra != 31) {
334
        tmp = tcg_temp_new();
335
        src = cpu_fir[ra];
336
    } else  {
337
        tmp = tcg_const_i64(0);
338
        src = tmp;
339
    }
340
    switch (opc) {
341
    case 0x31: /* FBEQ */
342
        gen_helper_cmpfeq(tmp, src);
343
        break;
344
    case 0x32: /* FBLT */
345
        gen_helper_cmpflt(tmp, src);
346
        break;
347
    case 0x33: /* FBLE */
348
        gen_helper_cmpfle(tmp, src);
349
        break;
350
    case 0x35: /* FBNE */
351
        gen_helper_cmpfne(tmp, src);
352
        break;
353
    case 0x36: /* FBGE */
354
        gen_helper_cmpfge(tmp, src);
355
        break;
356
    case 0x37: /* FBGT */
357
        gen_helper_cmpfgt(tmp, src);
358
        break;
359
    default:
360
        abort();
361
    }
362
    tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
363
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
364
    tcg_gen_br(l2);
365
    gen_set_label(l1);
366
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
367
    gen_set_label(l2);
368
}
369

    
370
static inline void gen_cmov(TCGCond inv_cond, int ra, int rb, int rc,
371
                            int islit, uint8_t lit, int mask)
372
{
373
    int l1;
374

    
375
    if (unlikely(rc == 31))
376
        return;
377

    
378
    l1 = gen_new_label();
379

    
380
    if (ra != 31) {
381
        if (mask) {
382
            TCGv tmp = tcg_temp_new();
383
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
384
            tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
385
            tcg_temp_free(tmp);
386
        } else
387
            tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
388
    } else {
389
        /* Very uncommon case - Do not bother to optimize.  */
390
        TCGv tmp = tcg_const_i64(0);
391
        tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
392
        tcg_temp_free(tmp);
393
    }
394

    
395
    if (islit)
396
        tcg_gen_movi_i64(cpu_ir[rc], lit);
397
    else
398
        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
399
    gen_set_label(l1);
400
}
401

    
402
#define FARITH2(name)                                       \
403
static inline void glue(gen_f, name)(int rb, int rc)        \
404
{                                                           \
405
    if (unlikely(rc == 31))                                 \
406
      return;                                               \
407
                                                            \
408
    if (rb != 31)                                           \
409
        gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]);    \
410
    else {                                                  \
411
        TCGv tmp = tcg_const_i64(0);                        \
412
        gen_helper_ ## name (cpu_fir[rc], tmp);            \
413
        tcg_temp_free(tmp);                                 \
414
    }                                                       \
415
}
416
FARITH2(sqrts)
417
FARITH2(sqrtf)
418
FARITH2(sqrtg)
419
FARITH2(sqrtt)
420
FARITH2(cvtgf)
421
FARITH2(cvtgq)
422
FARITH2(cvtqf)
423
FARITH2(cvtqg)
424
FARITH2(cvtst)
425
FARITH2(cvtts)
426
FARITH2(cvttq)
427
FARITH2(cvtqs)
428
FARITH2(cvtqt)
429
FARITH2(cvtlq)
430
FARITH2(cvtql)
431
FARITH2(cvtqlv)
432
FARITH2(cvtqlsv)
433

    
434
#define FARITH3(name)                                                     \
435
static inline void glue(gen_f, name)(int ra, int rb, int rc)              \
436
{                                                                         \
437
    if (unlikely(rc == 31))                                               \
438
        return;                                                           \
439
                                                                          \
440
    if (ra != 31) {                                                       \
441
        if (rb != 31)                                                     \
442
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);  \
443
        else {                                                            \
444
            TCGv tmp = tcg_const_i64(0);                                  \
445
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp);          \
446
            tcg_temp_free(tmp);                                           \
447
        }                                                                 \
448
    } else {                                                              \
449
        TCGv tmp = tcg_const_i64(0);                                      \
450
        if (rb != 31)                                                     \
451
            gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]);          \
452
        else                                                              \
453
            gen_helper_ ## name (cpu_fir[rc], tmp, tmp);                   \
454
        tcg_temp_free(tmp);                                               \
455
    }                                                                     \
456
}
457

    
458
FARITH3(addf)
459
FARITH3(subf)
460
FARITH3(mulf)
461
FARITH3(divf)
462
FARITH3(addg)
463
FARITH3(subg)
464
FARITH3(mulg)
465
FARITH3(divg)
466
FARITH3(cmpgeq)
467
FARITH3(cmpglt)
468
FARITH3(cmpgle)
469
FARITH3(adds)
470
FARITH3(subs)
471
FARITH3(muls)
472
FARITH3(divs)
473
FARITH3(addt)
474
FARITH3(subt)
475
FARITH3(mult)
476
FARITH3(divt)
477
FARITH3(cmptun)
478
FARITH3(cmpteq)
479
FARITH3(cmptlt)
480
FARITH3(cmptle)
481
FARITH3(cpys)
482
FARITH3(cpysn)
483
FARITH3(cpyse)
484

    
485
#define FCMOV(name)                                                   \
486
static inline void glue(gen_f, name)(int ra, int rb, int rc)          \
487
{                                                                     \
488
    int l1;                                                           \
489
    TCGv tmp;                                                         \
490
                                                                      \
491
    if (unlikely(rc == 31))                                           \
492
        return;                                                       \
493
                                                                      \
494
    l1 = gen_new_label();                                             \
495
    tmp = tcg_temp_new();                                 \
496
    if (ra != 31) {                                                   \
497
        tmp = tcg_temp_new();                             \
498
        gen_helper_ ## name (tmp, cpu_fir[ra]);                       \
499
    } else  {                                                         \
500
        tmp = tcg_const_i64(0);                                       \
501
        gen_helper_ ## name (tmp, tmp);                               \
502
    }                                                                 \
503
    tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);                     \
504
    if (rb != 31)                                                     \
505
        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);                    \
506
    else                                                              \
507
        tcg_gen_movi_i64(cpu_fir[rc], 0);                             \
508
    gen_set_label(l1);                                                \
509
}
510
FCMOV(cmpfeq)
511
FCMOV(cmpfne)
512
FCMOV(cmpflt)
513
FCMOV(cmpfge)
514
FCMOV(cmpfle)
515
FCMOV(cmpfgt)
516

    
517
static inline uint64_t zapnot_mask(uint8_t lit)
518
{
519
    uint64_t mask = 0;
520
    int i;
521

    
522
    for (i = 0; i < 8; ++i) {
523
        if ((lit >> i) & 1)
524
            mask |= 0xffull << (i * 8);
525
    }
526
    return mask;
527
}
528

    
529
/* Implement zapnot with an immediate operand, which expands to some
530
   form of immediate AND.  This is a basic building block in the
531
   definition of many of the other byte manipulation instructions.  */
532
static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
533
{
534
    switch (lit) {
535
    case 0x00:
536
        tcg_gen_movi_i64(dest, 0);
537
        break;
538
    case 0x01:
539
        tcg_gen_ext8u_i64(dest, src);
540
        break;
541
    case 0x03:
542
        tcg_gen_ext16u_i64(dest, src);
543
        break;
544
    case 0x0f:
545
        tcg_gen_ext32u_i64(dest, src);
546
        break;
547
    case 0xff:
548
        tcg_gen_mov_i64(dest, src);
549
        break;
550
    default:
551
        tcg_gen_andi_i64 (dest, src, zapnot_mask (lit));
552
        break;
553
    }
554
}
555

    
556
static inline void gen_zapnot(int ra, int rb, int rc, int islit, uint8_t lit)
557
{
558
    if (unlikely(rc == 31))
559
        return;
560
    else if (unlikely(ra == 31))
561
        tcg_gen_movi_i64(cpu_ir[rc], 0);
562
    else if (islit)
563
        gen_zapnoti(cpu_ir[rc], cpu_ir[ra], lit);
564
    else
565
        gen_helper_zapnot (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
566
}
567

    
568
static inline void gen_zap(int ra, int rb, int rc, int islit, uint8_t lit)
569
{
570
    if (unlikely(rc == 31))
571
        return;
572
    else if (unlikely(ra == 31))
573
        tcg_gen_movi_i64(cpu_ir[rc], 0);
574
    else if (islit)
575
        gen_zapnoti(cpu_ir[rc], cpu_ir[ra], ~lit);
576
    else
577
        gen_helper_zap (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
578
}
579

    
580

    
581
/* EXTWH, EXTLH, EXTQH */
582
static inline void gen_ext_h(int ra, int rb, int rc, int islit,
583
                             uint8_t lit, uint8_t byte_mask)
584
{
585
    if (unlikely(rc == 31))
586
        return;
587
    else if (unlikely(ra == 31))
588
        tcg_gen_movi_i64(cpu_ir[rc], 0);
589
    else {
590
        if (islit) {
591
            lit = (64 - (lit & 7) * 8) & 0x3f;
592
            tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit);
593
        } else {
594
            TCGv tmp1 = tcg_temp_new();
595
            tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
596
            tcg_gen_shli_i64(tmp1, tmp1, 3);
597
            tcg_gen_neg_i64(tmp1, tmp1);
598
            tcg_gen_andi_i64(tmp1, tmp1, 0x3f);
599
            tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
600
            tcg_temp_free(tmp1);
601
        }
602
        gen_zapnoti(cpu_ir[rc], cpu_ir[rc], byte_mask);
603
    }
604
}
605

    
606
/* EXTBL, EXTWL, EXTLL, EXTQL */
607
static inline void gen_ext_l(int ra, int rb, int rc, int islit,
608
                             uint8_t lit, uint8_t byte_mask)
609
{
610
    if (unlikely(rc == 31))
611
        return;
612
    else if (unlikely(ra == 31))
613
        tcg_gen_movi_i64(cpu_ir[rc], 0);
614
    else {
615
        if (islit) {
616
            tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
617
        } else {
618
            TCGv tmp = tcg_temp_new();
619
            tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
620
            tcg_gen_shli_i64(tmp, tmp, 3);
621
            tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
622
            tcg_temp_free(tmp);
623
        }
624
        gen_zapnoti(cpu_ir[rc], cpu_ir[rc], byte_mask);
625
    }
626
}
627

    
628
/* INSBL, INSWL, INSLL, INSQL */
629
static inline void gen_ins_l(int ra, int rb, int rc, int islit,
630
                             uint8_t lit, uint8_t byte_mask)
631
{
632
    if (unlikely(rc == 31))
633
        return;
634
    else if (unlikely(ra == 31))
635
        tcg_gen_movi_i64(cpu_ir[rc], 0);
636
    else {
637
        TCGv tmp = tcg_temp_new();
638

    
639
        /* The instruction description has us left-shift the byte mask
640
           the same number of byte slots as the data and apply the zap
641
           at the end.  This is equivalent to simply performing the zap
642
           first and shifting afterward.  */
643
        gen_zapnoti (tmp, cpu_ir[ra], byte_mask);
644

    
645
        if (islit) {
646
            tcg_gen_shli_i64(cpu_ir[rc], tmp, (lit & 7) * 8);
647
        } else {
648
            TCGv shift = tcg_temp_new();
649
            tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
650
            tcg_gen_shli_i64(shift, shift, 3);
651
            tcg_gen_shl_i64(cpu_ir[rc], tmp, shift);
652
            tcg_temp_free(shift);
653
        }
654
        tcg_temp_free(tmp);
655
    }
656
}
657

    
658
/* MSKBL, MSKWL, MSKLL, MSKQL */
659
static inline void gen_msk_l(int ra, int rb, int rc, int islit,
660
                             uint8_t lit, uint8_t byte_mask)
661
{
662
    if (unlikely(rc == 31))
663
        return;
664
    else if (unlikely(ra == 31))
665
        tcg_gen_movi_i64(cpu_ir[rc], 0);
666
    else if (islit) {
667
        gen_zapnoti (cpu_ir[rc], cpu_ir[ra], ~(byte_mask << (lit & 7)));
668
    } else {
669
        TCGv shift = tcg_temp_new();
670
        TCGv mask = tcg_temp_new();
671

    
672
        tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
673
        tcg_gen_shli_i64(shift, shift, 3);
674
        tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
675
        tcg_gen_shl_i64(mask, mask, shift);
676

    
677
        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], mask);
678

    
679
        tcg_temp_free(mask);
680
        tcg_temp_free(shift);
681
    }
682
}
683

    
684
/* Code to call arith3 helpers */
685
#define ARITH3(name)                                                  \
686
static inline void glue(gen_, name)(int ra, int rb, int rc, int islit,\
687
                                    uint8_t lit)                      \
688
{                                                                     \
689
    if (unlikely(rc == 31))                                           \
690
        return;                                                       \
691
                                                                      \
692
    if (ra != 31) {                                                   \
693
        if (islit) {                                                  \
694
            TCGv tmp = tcg_const_i64(lit);                            \
695
            gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp);         \
696
            tcg_temp_free(tmp);                                       \
697
        } else                                                        \
698
            gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
699
    } else {                                                          \
700
        TCGv tmp1 = tcg_const_i64(0);                                 \
701
        if (islit) {                                                  \
702
            TCGv tmp2 = tcg_const_i64(lit);                           \
703
            gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2);             \
704
            tcg_temp_free(tmp2);                                      \
705
        } else                                                        \
706
            gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]);       \
707
        tcg_temp_free(tmp1);                                          \
708
    }                                                                 \
709
}
710
ARITH3(cmpbge)
711
ARITH3(addlv)
712
ARITH3(sublv)
713
ARITH3(addqv)
714
ARITH3(subqv)
715
ARITH3(mskwh)
716
ARITH3(inswh)
717
ARITH3(msklh)
718
ARITH3(inslh)
719
ARITH3(mskqh)
720
ARITH3(insqh)
721
ARITH3(umulh)
722
ARITH3(mullv)
723
ARITH3(mulqv)
724
ARITH3(minub8)
725
ARITH3(minsb8)
726
ARITH3(minuw4)
727
ARITH3(minsw4)
728
ARITH3(maxub8)
729
ARITH3(maxsb8)
730
ARITH3(maxuw4)
731
ARITH3(maxsw4)
732
ARITH3(perr)
733

    
734
#define MVIOP2(name)                                    \
735
static inline void glue(gen_, name)(int rb, int rc)     \
736
{                                                       \
737
    if (unlikely(rc == 31))                             \
738
        return;                                         \
739
    if (unlikely(rb == 31))                             \
740
        tcg_gen_movi_i64(cpu_ir[rc], 0);                \
741
    else                                                \
742
        gen_helper_ ## name (cpu_ir[rc], cpu_ir[rb]);   \
743
}
744
MVIOP2(pklb)
745
MVIOP2(pkwb)
746
MVIOP2(unpkbl)
747
MVIOP2(unpkbw)
748

    
749
static inline void gen_cmp(TCGCond cond, int ra, int rb, int rc, int islit,
750
                           uint8_t lit)
751
{
752
    int l1, l2;
753
    TCGv tmp;
754

    
755
    if (unlikely(rc == 31))
756
        return;
757

    
758
    l1 = gen_new_label();
759
    l2 = gen_new_label();
760

    
761
    if (ra != 31) {
762
        tmp = tcg_temp_new();
763
        tcg_gen_mov_i64(tmp, cpu_ir[ra]);
764
    } else
765
        tmp = tcg_const_i64(0);
766
    if (islit)
767
        tcg_gen_brcondi_i64(cond, tmp, lit, l1);
768
    else
769
        tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
770

    
771
    tcg_gen_movi_i64(cpu_ir[rc], 0);
772
    tcg_gen_br(l2);
773
    gen_set_label(l1);
774
    tcg_gen_movi_i64(cpu_ir[rc], 1);
775
    gen_set_label(l2);
776
}
777

    
778
static inline int translate_one(DisasContext *ctx, uint32_t insn)
779
{
780
    uint32_t palcode;
781
    int32_t disp21, disp16, disp12;
782
    uint16_t fn11, fn16;
783
    uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit, real_islit;
784
    uint8_t lit;
785
    int ret;
786

    
787
    /* Decode all instruction fields */
788
    opc = insn >> 26;
789
    ra = (insn >> 21) & 0x1F;
790
    rb = (insn >> 16) & 0x1F;
791
    rc = insn & 0x1F;
792
    sbz = (insn >> 13) & 0x07;
793
    real_islit = islit = (insn >> 12) & 1;
794
    if (rb == 31 && !islit) {
795
        islit = 1;
796
        lit = 0;
797
    } else
798
        lit = (insn >> 13) & 0xFF;
799
    palcode = insn & 0x03FFFFFF;
800
    disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
801
    disp16 = (int16_t)(insn & 0x0000FFFF);
802
    disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
803
    fn16 = insn & 0x0000FFFF;
804
    fn11 = (insn >> 5) & 0x000007FF;
805
    fpfn = fn11 & 0x3F;
806
    fn7 = (insn >> 5) & 0x0000007F;
807
    fn2 = (insn >> 5) & 0x00000003;
808
    ret = 0;
809
    LOG_DISAS("opc %02x ra %2d rb %2d rc %2d disp16 %6d\n",
810
              opc, ra, rb, rc, disp16);
811

    
812
    switch (opc) {
813
    case 0x00:
814
        /* CALL_PAL */
815
#ifdef CONFIG_USER_ONLY
816
        if (palcode == 0x9E) {
817
            /* RDUNIQUE */
818
            tcg_gen_mov_i64(cpu_ir[IR_V0], cpu_uniq);
819
            break;
820
        } else if (palcode == 0x9F) {
821
            /* WRUNIQUE */
822
            tcg_gen_mov_i64(cpu_uniq, cpu_ir[IR_A0]);
823
            break;
824
        }
825
#endif
826
        if (palcode >= 0x80 && palcode < 0xC0) {
827
            /* Unprivileged PAL call */
828
            gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x3F) << 6), 0);
829
            ret = 3;
830
            break;
831
        }
832
#ifndef CONFIG_USER_ONLY
833
        if (palcode < 0x40) {
834
            /* Privileged PAL code */
835
            if (ctx->mem_idx & 1)
836
                goto invalid_opc;
837
            gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x3F) << 6), 0);
838
            ret = 3;
839
        }
840
#endif
841
        /* Invalid PAL call */
842
        goto invalid_opc;
843
    case 0x01:
844
        /* OPC01 */
845
        goto invalid_opc;
846
    case 0x02:
847
        /* OPC02 */
848
        goto invalid_opc;
849
    case 0x03:
850
        /* OPC03 */
851
        goto invalid_opc;
852
    case 0x04:
853
        /* OPC04 */
854
        goto invalid_opc;
855
    case 0x05:
856
        /* OPC05 */
857
        goto invalid_opc;
858
    case 0x06:
859
        /* OPC06 */
860
        goto invalid_opc;
861
    case 0x07:
862
        /* OPC07 */
863
        goto invalid_opc;
864
    case 0x08:
865
        /* LDA */
866
        if (likely(ra != 31)) {
867
            if (rb != 31)
868
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
869
            else
870
                tcg_gen_movi_i64(cpu_ir[ra], disp16);
871
        }
872
        break;
873
    case 0x09:
874
        /* LDAH */
875
        if (likely(ra != 31)) {
876
            if (rb != 31)
877
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
878
            else
879
                tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
880
        }
881
        break;
882
    case 0x0A:
883
        /* LDBU */
884
        if (!(ctx->amask & AMASK_BWX))
885
            goto invalid_opc;
886
        gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
887
        break;
888
    case 0x0B:
889
        /* LDQ_U */
890
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
891
        break;
892
    case 0x0C:
893
        /* LDWU */
894
        if (!(ctx->amask & AMASK_BWX))
895
            goto invalid_opc;
896
        gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
897
        break;
898
    case 0x0D:
899
        /* STW */
900
        gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
901
        break;
902
    case 0x0E:
903
        /* STB */
904
        gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
905
        break;
906
    case 0x0F:
907
        /* STQ_U */
908
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
909
        break;
910
    case 0x10:
911
        switch (fn7) {
912
        case 0x00:
913
            /* ADDL */
914
            if (likely(rc != 31)) {
915
                if (ra != 31) {
916
                    if (islit) {
917
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
918
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
919
                    } else {
920
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
921
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
922
                    }
923
                } else {
924
                    if (islit)
925
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
926
                    else
927
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
928
                }
929
            }
930
            break;
931
        case 0x02:
932
            /* S4ADDL */
933
            if (likely(rc != 31)) {
934
                if (ra != 31) {
935
                    TCGv tmp = tcg_temp_new();
936
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
937
                    if (islit)
938
                        tcg_gen_addi_i64(tmp, tmp, lit);
939
                    else
940
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
941
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
942
                    tcg_temp_free(tmp);
943
                } else {
944
                    if (islit)
945
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
946
                    else
947
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
948
                }
949
            }
950
            break;
951
        case 0x09:
952
            /* SUBL */
953
            if (likely(rc != 31)) {
954
                if (ra != 31) {
955
                    if (islit)
956
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
957
                    else
958
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
959
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
960
                } else {
961
                    if (islit)
962
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
963
                    else {
964
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
965
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
966
                }
967
            }
968
            break;
969
        case 0x0B:
970
            /* S4SUBL */
971
            if (likely(rc != 31)) {
972
                if (ra != 31) {
973
                    TCGv tmp = tcg_temp_new();
974
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
975
                    if (islit)
976
                        tcg_gen_subi_i64(tmp, tmp, lit);
977
                    else
978
                        tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
979
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
980
                    tcg_temp_free(tmp);
981
                } else {
982
                    if (islit)
983
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
984
                    else {
985
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
986
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
987
                    }
988
                }
989
            }
990
            break;
991
        case 0x0F:
992
            /* CMPBGE */
993
            gen_cmpbge(ra, rb, rc, islit, lit);
994
            break;
995
        case 0x12:
996
            /* S8ADDL */
997
            if (likely(rc != 31)) {
998
                if (ra != 31) {
999
                    TCGv tmp = tcg_temp_new();
1000
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1001
                    if (islit)
1002
                        tcg_gen_addi_i64(tmp, tmp, lit);
1003
                    else
1004
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
1005
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1006
                    tcg_temp_free(tmp);
1007
                } else {
1008
                    if (islit)
1009
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1010
                    else
1011
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
1012
                }
1013
            }
1014
            break;
1015
        case 0x1B:
1016
            /* S8SUBL */
1017
            if (likely(rc != 31)) {
1018
                if (ra != 31) {
1019
                    TCGv tmp = tcg_temp_new();
1020
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1021
                    if (islit)
1022
                        tcg_gen_subi_i64(tmp, tmp, lit);
1023
                    else
1024
                       tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
1025
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1026
                    tcg_temp_free(tmp);
1027
                } else {
1028
                    if (islit)
1029
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1030
                    else
1031
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1032
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1033
                    }
1034
                }
1035
            }
1036
            break;
1037
        case 0x1D:
1038
            /* CMPULT */
1039
            gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
1040
            break;
1041
        case 0x20:
1042
            /* ADDQ */
1043
            if (likely(rc != 31)) {
1044
                if (ra != 31) {
1045
                    if (islit)
1046
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1047
                    else
1048
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1049
                } else {
1050
                    if (islit)
1051
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1052
                    else
1053
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1054
                }
1055
            }
1056
            break;
1057
        case 0x22:
1058
            /* S4ADDQ */
1059
            if (likely(rc != 31)) {
1060
                if (ra != 31) {
1061
                    TCGv tmp = tcg_temp_new();
1062
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1063
                    if (islit)
1064
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
1065
                    else
1066
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1067
                    tcg_temp_free(tmp);
1068
                } else {
1069
                    if (islit)
1070
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1071
                    else
1072
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1073
                }
1074
            }
1075
            break;
1076
        case 0x29:
1077
            /* SUBQ */
1078
            if (likely(rc != 31)) {
1079
                if (ra != 31) {
1080
                    if (islit)
1081
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1082
                    else
1083
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1084
                } else {
1085
                    if (islit)
1086
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1087
                    else
1088
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1089
                }
1090
            }
1091
            break;
1092
        case 0x2B:
1093
            /* S4SUBQ */
1094
            if (likely(rc != 31)) {
1095
                if (ra != 31) {
1096
                    TCGv tmp = tcg_temp_new();
1097
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1098
                    if (islit)
1099
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1100
                    else
1101
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1102
                    tcg_temp_free(tmp);
1103
                } else {
1104
                    if (islit)
1105
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1106
                    else
1107
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1108
                }
1109
            }
1110
            break;
1111
        case 0x2D:
1112
            /* CMPEQ */
1113
            gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
1114
            break;
1115
        case 0x32:
1116
            /* S8ADDQ */
1117
            if (likely(rc != 31)) {
1118
                if (ra != 31) {
1119
                    TCGv tmp = tcg_temp_new();
1120
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1121
                    if (islit)
1122
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
1123
                    else
1124
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1125
                    tcg_temp_free(tmp);
1126
                } else {
1127
                    if (islit)
1128
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1129
                    else
1130
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1131
                }
1132
            }
1133
            break;
1134
        case 0x3B:
1135
            /* S8SUBQ */
1136
            if (likely(rc != 31)) {
1137
                if (ra != 31) {
1138
                    TCGv tmp = tcg_temp_new();
1139
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1140
                    if (islit)
1141
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1142
                    else
1143
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1144
                    tcg_temp_free(tmp);
1145
                } else {
1146
                    if (islit)
1147
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1148
                    else
1149
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1150
                }
1151
            }
1152
            break;
1153
        case 0x3D:
1154
            /* CMPULE */
1155
            gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1156
            break;
1157
        case 0x40:
1158
            /* ADDL/V */
1159
            gen_addlv(ra, rb, rc, islit, lit);
1160
            break;
1161
        case 0x49:
1162
            /* SUBL/V */
1163
            gen_sublv(ra, rb, rc, islit, lit);
1164
            break;
1165
        case 0x4D:
1166
            /* CMPLT */
1167
            gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1168
            break;
1169
        case 0x60:
1170
            /* ADDQ/V */
1171
            gen_addqv(ra, rb, rc, islit, lit);
1172
            break;
1173
        case 0x69:
1174
            /* SUBQ/V */
1175
            gen_subqv(ra, rb, rc, islit, lit);
1176
            break;
1177
        case 0x6D:
1178
            /* CMPLE */
1179
            gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1180
            break;
1181
        default:
1182
            goto invalid_opc;
1183
        }
1184
        break;
1185
    case 0x11:
1186
        switch (fn7) {
1187
        case 0x00:
1188
            /* AND */
1189
            if (likely(rc != 31)) {
1190
                if (ra == 31)
1191
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1192
                else if (islit)
1193
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1194
                else
1195
                    tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1196
            }
1197
            break;
1198
        case 0x08:
1199
            /* BIC */
1200
            if (likely(rc != 31)) {
1201
                if (ra != 31) {
1202
                    if (islit)
1203
                        tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1204
                    else
1205
                        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1206
                } else
1207
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1208
            }
1209
            break;
1210
        case 0x14:
1211
            /* CMOVLBS */
1212
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1213
            break;
1214
        case 0x16:
1215
            /* CMOVLBC */
1216
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1217
            break;
1218
        case 0x20:
1219
            /* BIS */
1220
            if (likely(rc != 31)) {
1221
                if (ra != 31) {
1222
                    if (islit)
1223
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1224
                    else
1225
                        tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1226
                } else {
1227
                    if (islit)
1228
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1229
                    else
1230
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1231
                }
1232
            }
1233
            break;
1234
        case 0x24:
1235
            /* CMOVEQ */
1236
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1237
            break;
1238
        case 0x26:
1239
            /* CMOVNE */
1240
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1241
            break;
1242
        case 0x28:
1243
            /* ORNOT */
1244
            if (likely(rc != 31)) {
1245
                if (ra != 31) {
1246
                    if (islit)
1247
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1248
                    else
1249
                        tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1250
                } else {
1251
                    if (islit)
1252
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1253
                    else
1254
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1255
                }
1256
            }
1257
            break;
1258
        case 0x40:
1259
            /* XOR */
1260
            if (likely(rc != 31)) {
1261
                if (ra != 31) {
1262
                    if (islit)
1263
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1264
                    else
1265
                        tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1266
                } else {
1267
                    if (islit)
1268
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1269
                    else
1270
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1271
                }
1272
            }
1273
            break;
1274
        case 0x44:
1275
            /* CMOVLT */
1276
            gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1277
            break;
1278
        case 0x46:
1279
            /* CMOVGE */
1280
            gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1281
            break;
1282
        case 0x48:
1283
            /* EQV */
1284
            if (likely(rc != 31)) {
1285
                if (ra != 31) {
1286
                    if (islit)
1287
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1288
                    else
1289
                        tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1290
                } else {
1291
                    if (islit)
1292
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1293
                    else
1294
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1295
                }
1296
            }
1297
            break;
1298
        case 0x61:
1299
            /* AMASK */
1300
            if (likely(rc != 31)) {
1301
                if (islit)
1302
                    tcg_gen_movi_i64(cpu_ir[rc], lit);
1303
                else
1304
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1305
                switch (ctx->env->implver) {
1306
                case IMPLVER_2106x:
1307
                    /* EV4, EV45, LCA, LCA45 & EV5 */
1308
                    break;
1309
                case IMPLVER_21164:
1310
                case IMPLVER_21264:
1311
                case IMPLVER_21364:
1312
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[rc],
1313
                                     ~(uint64_t)ctx->amask);
1314
                    break;
1315
                }
1316
            }
1317
            break;
1318
        case 0x64:
1319
            /* CMOVLE */
1320
            gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1321
            break;
1322
        case 0x66:
1323
            /* CMOVGT */
1324
            gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1325
            break;
1326
        case 0x6C:
1327
            /* IMPLVER */
1328
            if (rc != 31)
1329
                tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
1330
            break;
1331
        default:
1332
            goto invalid_opc;
1333
        }
1334
        break;
1335
    case 0x12:
1336
        switch (fn7) {
1337
        case 0x02:
1338
            /* MSKBL */
1339
            gen_msk_l(ra, rb, rc, islit, lit, 0x01);
1340
            break;
1341
        case 0x06:
1342
            /* EXTBL */
1343
            gen_ext_l(ra, rb, rc, islit, lit, 0x01);
1344
            break;
1345
        case 0x0B:
1346
            /* INSBL */
1347
            gen_ins_l(ra, rb, rc, islit, lit, 0x01);
1348
            break;
1349
        case 0x12:
1350
            /* MSKWL */
1351
            gen_msk_l(ra, rb, rc, islit, lit, 0x03);
1352
            break;
1353
        case 0x16:
1354
            /* EXTWL */
1355
            gen_ext_l(ra, rb, rc, islit, lit, 0x03);
1356
            break;
1357
        case 0x1B:
1358
            /* INSWL */
1359
            gen_ins_l(ra, rb, rc, islit, lit, 0x03);
1360
            break;
1361
        case 0x22:
1362
            /* MSKLL */
1363
            gen_msk_l(ra, rb, rc, islit, lit, 0x0f);
1364
            break;
1365
        case 0x26:
1366
            /* EXTLL */
1367
            gen_ext_l(ra, rb, rc, islit, lit, 0x0f);
1368
            break;
1369
        case 0x2B:
1370
            /* INSLL */
1371
            gen_ins_l(ra, rb, rc, islit, lit, 0x0f);
1372
            break;
1373
        case 0x30:
1374
            /* ZAP */
1375
            gen_zap(ra, rb, rc, islit, lit);
1376
            break;
1377
        case 0x31:
1378
            /* ZAPNOT */
1379
            gen_zapnot(ra, rb, rc, islit, lit);
1380
            break;
1381
        case 0x32:
1382
            /* MSKQL */
1383
            gen_msk_l(ra, rb, rc, islit, lit, 0xff);
1384
            break;
1385
        case 0x34:
1386
            /* SRL */
1387
            if (likely(rc != 31)) {
1388
                if (ra != 31) {
1389
                    if (islit)
1390
                        tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1391
                    else {
1392
                        TCGv shift = tcg_temp_new();
1393
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1394
                        tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1395
                        tcg_temp_free(shift);
1396
                    }
1397
                } else
1398
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1399
            }
1400
            break;
1401
        case 0x36:
1402
            /* EXTQL */
1403
            gen_ext_l(ra, rb, rc, islit, lit, 0xff);
1404
            break;
1405
        case 0x39:
1406
            /* SLL */
1407
            if (likely(rc != 31)) {
1408
                if (ra != 31) {
1409
                    if (islit)
1410
                        tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1411
                    else {
1412
                        TCGv shift = tcg_temp_new();
1413
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1414
                        tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1415
                        tcg_temp_free(shift);
1416
                    }
1417
                } else
1418
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1419
            }
1420
            break;
1421
        case 0x3B:
1422
            /* INSQL */
1423
            gen_ins_l(ra, rb, rc, islit, lit, 0xff);
1424
            break;
1425
        case 0x3C:
1426
            /* SRA */
1427
            if (likely(rc != 31)) {
1428
                if (ra != 31) {
1429
                    if (islit)
1430
                        tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1431
                    else {
1432
                        TCGv shift = tcg_temp_new();
1433
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1434
                        tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1435
                        tcg_temp_free(shift);
1436
                    }
1437
                } else
1438
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1439
            }
1440
            break;
1441
        case 0x52:
1442
            /* MSKWH */
1443
            gen_mskwh(ra, rb, rc, islit, lit);
1444
            break;
1445
        case 0x57:
1446
            /* INSWH */
1447
            gen_inswh(ra, rb, rc, islit, lit);
1448
            break;
1449
        case 0x5A:
1450
            /* EXTWH */
1451
            gen_ext_h(ra, rb, rc, islit, lit, 0x03);
1452
            break;
1453
        case 0x62:
1454
            /* MSKLH */
1455
            gen_msklh(ra, rb, rc, islit, lit);
1456
            break;
1457
        case 0x67:
1458
            /* INSLH */
1459
            gen_inslh(ra, rb, rc, islit, lit);
1460
            break;
1461
        case 0x6A:
1462
            /* EXTLH */
1463
            gen_ext_h(ra, rb, rc, islit, lit, 0x0f);
1464
            break;
1465
        case 0x72:
1466
            /* MSKQH */
1467
            gen_mskqh(ra, rb, rc, islit, lit);
1468
            break;
1469
        case 0x77:
1470
            /* INSQH */
1471
            gen_insqh(ra, rb, rc, islit, lit);
1472
            break;
1473
        case 0x7A:
1474
            /* EXTQH */
1475
            gen_ext_h(ra, rb, rc, islit, lit, 0xff);
1476
            break;
1477
        default:
1478
            goto invalid_opc;
1479
        }
1480
        break;
1481
    case 0x13:
1482
        switch (fn7) {
1483
        case 0x00:
1484
            /* MULL */
1485
            if (likely(rc != 31)) {
1486
                if (ra == 31)
1487
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1488
                else {
1489
                    if (islit)
1490
                        tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1491
                    else
1492
                        tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1493
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1494
                }
1495
            }
1496
            break;
1497
        case 0x20:
1498
            /* MULQ */
1499
            if (likely(rc != 31)) {
1500
                if (ra == 31)
1501
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1502
                else if (islit)
1503
                    tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1504
                else
1505
                    tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1506
            }
1507
            break;
1508
        case 0x30:
1509
            /* UMULH */
1510
            gen_umulh(ra, rb, rc, islit, lit);
1511
            break;
1512
        case 0x40:
1513
            /* MULL/V */
1514
            gen_mullv(ra, rb, rc, islit, lit);
1515
            break;
1516
        case 0x60:
1517
            /* MULQ/V */
1518
            gen_mulqv(ra, rb, rc, islit, lit);
1519
            break;
1520
        default:
1521
            goto invalid_opc;
1522
        }
1523
        break;
1524
    case 0x14:
1525
        switch (fpfn) { /* f11 & 0x3F */
1526
        case 0x04:
1527
            /* ITOFS */
1528
            if (!(ctx->amask & AMASK_FIX))
1529
                goto invalid_opc;
1530
            if (likely(rc != 31)) {
1531
                if (ra != 31) {
1532
                    TCGv_i32 tmp = tcg_temp_new_i32();
1533
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1534
                    gen_helper_memory_to_s(cpu_fir[rc], tmp);
1535
                    tcg_temp_free_i32(tmp);
1536
                } else
1537
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1538
            }
1539
            break;
1540
        case 0x0A:
1541
            /* SQRTF */
1542
            if (!(ctx->amask & AMASK_FIX))
1543
                goto invalid_opc;
1544
            gen_fsqrtf(rb, rc);
1545
            break;
1546
        case 0x0B:
1547
            /* SQRTS */
1548
            if (!(ctx->amask & AMASK_FIX))
1549
                goto invalid_opc;
1550
            gen_fsqrts(rb, rc);
1551
            break;
1552
        case 0x14:
1553
            /* ITOFF */
1554
            if (!(ctx->amask & AMASK_FIX))
1555
                goto invalid_opc;
1556
            if (likely(rc != 31)) {
1557
                if (ra != 31) {
1558
                    TCGv_i32 tmp = tcg_temp_new_i32();
1559
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1560
                    gen_helper_memory_to_f(cpu_fir[rc], tmp);
1561
                    tcg_temp_free_i32(tmp);
1562
                } else
1563
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1564
            }
1565
            break;
1566
        case 0x24:
1567
            /* ITOFT */
1568
            if (!(ctx->amask & AMASK_FIX))
1569
                goto invalid_opc;
1570
            if (likely(rc != 31)) {
1571
                if (ra != 31)
1572
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1573
                else
1574
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1575
            }
1576
            break;
1577
        case 0x2A:
1578
            /* SQRTG */
1579
            if (!(ctx->amask & AMASK_FIX))
1580
                goto invalid_opc;
1581
            gen_fsqrtg(rb, rc);
1582
            break;
1583
        case 0x02B:
1584
            /* SQRTT */
1585
            if (!(ctx->amask & AMASK_FIX))
1586
                goto invalid_opc;
1587
            gen_fsqrtt(rb, rc);
1588
            break;
1589
        default:
1590
            goto invalid_opc;
1591
        }
1592
        break;
1593
    case 0x15:
1594
        /* VAX floating point */
1595
        /* XXX: rounding mode and trap are ignored (!) */
1596
        switch (fpfn) { /* f11 & 0x3F */
1597
        case 0x00:
1598
            /* ADDF */
1599
            gen_faddf(ra, rb, rc);
1600
            break;
1601
        case 0x01:
1602
            /* SUBF */
1603
            gen_fsubf(ra, rb, rc);
1604
            break;
1605
        case 0x02:
1606
            /* MULF */
1607
            gen_fmulf(ra, rb, rc);
1608
            break;
1609
        case 0x03:
1610
            /* DIVF */
1611
            gen_fdivf(ra, rb, rc);
1612
            break;
1613
        case 0x1E:
1614
            /* CVTDG */
1615
#if 0 // TODO
1616
            gen_fcvtdg(rb, rc);
1617
#else
1618
            goto invalid_opc;
1619
#endif
1620
            break;
1621
        case 0x20:
1622
            /* ADDG */
1623
            gen_faddg(ra, rb, rc);
1624
            break;
1625
        case 0x21:
1626
            /* SUBG */
1627
            gen_fsubg(ra, rb, rc);
1628
            break;
1629
        case 0x22:
1630
            /* MULG */
1631
            gen_fmulg(ra, rb, rc);
1632
            break;
1633
        case 0x23:
1634
            /* DIVG */
1635
            gen_fdivg(ra, rb, rc);
1636
            break;
1637
        case 0x25:
1638
            /* CMPGEQ */
1639
            gen_fcmpgeq(ra, rb, rc);
1640
            break;
1641
        case 0x26:
1642
            /* CMPGLT */
1643
            gen_fcmpglt(ra, rb, rc);
1644
            break;
1645
        case 0x27:
1646
            /* CMPGLE */
1647
            gen_fcmpgle(ra, rb, rc);
1648
            break;
1649
        case 0x2C:
1650
            /* CVTGF */
1651
            gen_fcvtgf(rb, rc);
1652
            break;
1653
        case 0x2D:
1654
            /* CVTGD */
1655
#if 0 // TODO
1656
            gen_fcvtgd(rb, rc);
1657
#else
1658
            goto invalid_opc;
1659
#endif
1660
            break;
1661
        case 0x2F:
1662
            /* CVTGQ */
1663
            gen_fcvtgq(rb, rc);
1664
            break;
1665
        case 0x3C:
1666
            /* CVTQF */
1667
            gen_fcvtqf(rb, rc);
1668
            break;
1669
        case 0x3E:
1670
            /* CVTQG */
1671
            gen_fcvtqg(rb, rc);
1672
            break;
1673
        default:
1674
            goto invalid_opc;
1675
        }
1676
        break;
1677
    case 0x16:
1678
        /* IEEE floating-point */
1679
        /* XXX: rounding mode and traps are ignored (!) */
1680
        switch (fpfn) { /* f11 & 0x3F */
1681
        case 0x00:
1682
            /* ADDS */
1683
            gen_fadds(ra, rb, rc);
1684
            break;
1685
        case 0x01:
1686
            /* SUBS */
1687
            gen_fsubs(ra, rb, rc);
1688
            break;
1689
        case 0x02:
1690
            /* MULS */
1691
            gen_fmuls(ra, rb, rc);
1692
            break;
1693
        case 0x03:
1694
            /* DIVS */
1695
            gen_fdivs(ra, rb, rc);
1696
            break;
1697
        case 0x20:
1698
            /* ADDT */
1699
            gen_faddt(ra, rb, rc);
1700
            break;
1701
        case 0x21:
1702
            /* SUBT */
1703
            gen_fsubt(ra, rb, rc);
1704
            break;
1705
        case 0x22:
1706
            /* MULT */
1707
            gen_fmult(ra, rb, rc);
1708
            break;
1709
        case 0x23:
1710
            /* DIVT */
1711
            gen_fdivt(ra, rb, rc);
1712
            break;
1713
        case 0x24:
1714
            /* CMPTUN */
1715
            gen_fcmptun(ra, rb, rc);
1716
            break;
1717
        case 0x25:
1718
            /* CMPTEQ */
1719
            gen_fcmpteq(ra, rb, rc);
1720
            break;
1721
        case 0x26:
1722
            /* CMPTLT */
1723
            gen_fcmptlt(ra, rb, rc);
1724
            break;
1725
        case 0x27:
1726
            /* CMPTLE */
1727
            gen_fcmptle(ra, rb, rc);
1728
            break;
1729
        case 0x2C:
1730
            /* XXX: incorrect */
1731
            if (fn11 == 0x2AC || fn11 == 0x6AC) {
1732
                /* CVTST */
1733
                gen_fcvtst(rb, rc);
1734
            } else {
1735
                /* CVTTS */
1736
                gen_fcvtts(rb, rc);
1737
            }
1738
            break;
1739
        case 0x2F:
1740
            /* CVTTQ */
1741
            gen_fcvttq(rb, rc);
1742
            break;
1743
        case 0x3C:
1744
            /* CVTQS */
1745
            gen_fcvtqs(rb, rc);
1746
            break;
1747
        case 0x3E:
1748
            /* CVTQT */
1749
            gen_fcvtqt(rb, rc);
1750
            break;
1751
        default:
1752
            goto invalid_opc;
1753
        }
1754
        break;
1755
    case 0x17:
1756
        switch (fn11) {
1757
        case 0x010:
1758
            /* CVTLQ */
1759
            gen_fcvtlq(rb, rc);
1760
            break;
1761
        case 0x020:
1762
            if (likely(rc != 31)) {
1763
                if (ra == rb)
1764
                    /* FMOV */
1765
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1766
                else
1767
                    /* CPYS */
1768
                    gen_fcpys(ra, rb, rc);
1769
            }
1770
            break;
1771
        case 0x021:
1772
            /* CPYSN */
1773
            gen_fcpysn(ra, rb, rc);
1774
            break;
1775
        case 0x022:
1776
            /* CPYSE */
1777
            gen_fcpyse(ra, rb, rc);
1778
            break;
1779
        case 0x024:
1780
            /* MT_FPCR */
1781
            if (likely(ra != 31))
1782
                gen_helper_store_fpcr(cpu_fir[ra]);
1783
            else {
1784
                TCGv tmp = tcg_const_i64(0);
1785
                gen_helper_store_fpcr(tmp);
1786
                tcg_temp_free(tmp);
1787
            }
1788
            break;
1789
        case 0x025:
1790
            /* MF_FPCR */
1791
            if (likely(ra != 31))
1792
                gen_helper_load_fpcr(cpu_fir[ra]);
1793
            break;
1794
        case 0x02A:
1795
            /* FCMOVEQ */
1796
            gen_fcmpfeq(ra, rb, rc);
1797
            break;
1798
        case 0x02B:
1799
            /* FCMOVNE */
1800
            gen_fcmpfne(ra, rb, rc);
1801
            break;
1802
        case 0x02C:
1803
            /* FCMOVLT */
1804
            gen_fcmpflt(ra, rb, rc);
1805
            break;
1806
        case 0x02D:
1807
            /* FCMOVGE */
1808
            gen_fcmpfge(ra, rb, rc);
1809
            break;
1810
        case 0x02E:
1811
            /* FCMOVLE */
1812
            gen_fcmpfle(ra, rb, rc);
1813
            break;
1814
        case 0x02F:
1815
            /* FCMOVGT */
1816
            gen_fcmpfgt(ra, rb, rc);
1817
            break;
1818
        case 0x030:
1819
            /* CVTQL */
1820
            gen_fcvtql(rb, rc);
1821
            break;
1822
        case 0x130:
1823
            /* CVTQL/V */
1824
            gen_fcvtqlv(rb, rc);
1825
            break;
1826
        case 0x530:
1827
            /* CVTQL/SV */
1828
            gen_fcvtqlsv(rb, rc);
1829
            break;
1830
        default:
1831
            goto invalid_opc;
1832
        }
1833
        break;
1834
    case 0x18:
1835
        switch ((uint16_t)disp16) {
1836
        case 0x0000:
1837
            /* TRAPB */
1838
            /* No-op. Just exit from the current tb */
1839
            ret = 2;
1840
            break;
1841
        case 0x0400:
1842
            /* EXCB */
1843
            /* No-op. Just exit from the current tb */
1844
            ret = 2;
1845
            break;
1846
        case 0x4000:
1847
            /* MB */
1848
            /* No-op */
1849
            break;
1850
        case 0x4400:
1851
            /* WMB */
1852
            /* No-op */
1853
            break;
1854
        case 0x8000:
1855
            /* FETCH */
1856
            /* No-op */
1857
            break;
1858
        case 0xA000:
1859
            /* FETCH_M */
1860
            /* No-op */
1861
            break;
1862
        case 0xC000:
1863
            /* RPCC */
1864
            if (ra != 31)
1865
                gen_helper_load_pcc(cpu_ir[ra]);
1866
            break;
1867
        case 0xE000:
1868
            /* RC */
1869
            if (ra != 31)
1870
                gen_helper_rc(cpu_ir[ra]);
1871
            break;
1872
        case 0xE800:
1873
            /* ECB */
1874
            break;
1875
        case 0xF000:
1876
            /* RS */
1877
            if (ra != 31)
1878
                gen_helper_rs(cpu_ir[ra]);
1879
            break;
1880
        case 0xF800:
1881
            /* WH64 */
1882
            /* No-op */
1883
            break;
1884
        default:
1885
            goto invalid_opc;
1886
        }
1887
        break;
1888
    case 0x19:
1889
        /* HW_MFPR (PALcode) */
1890
#if defined (CONFIG_USER_ONLY)
1891
        goto invalid_opc;
1892
#else
1893
        if (!ctx->pal_mode)
1894
            goto invalid_opc;
1895
        if (ra != 31) {
1896
            TCGv tmp = tcg_const_i32(insn & 0xFF);
1897
            gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1898
            tcg_temp_free(tmp);
1899
        }
1900
        break;
1901
#endif
1902
    case 0x1A:
1903
        if (rb != 31)
1904
            tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1905
        else
1906
            tcg_gen_movi_i64(cpu_pc, 0);
1907
        if (ra != 31)
1908
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1909
        /* Those four jumps only differ by the branch prediction hint */
1910
        switch (fn2) {
1911
        case 0x0:
1912
            /* JMP */
1913
            break;
1914
        case 0x1:
1915
            /* JSR */
1916
            break;
1917
        case 0x2:
1918
            /* RET */
1919
            break;
1920
        case 0x3:
1921
            /* JSR_COROUTINE */
1922
            break;
1923
        }
1924
        ret = 1;
1925
        break;
1926
    case 0x1B:
1927
        /* HW_LD (PALcode) */
1928
#if defined (CONFIG_USER_ONLY)
1929
        goto invalid_opc;
1930
#else
1931
        if (!ctx->pal_mode)
1932
            goto invalid_opc;
1933
        if (ra != 31) {
1934
            TCGv addr = tcg_temp_new();
1935
            if (rb != 31)
1936
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1937
            else
1938
                tcg_gen_movi_i64(addr, disp12);
1939
            switch ((insn >> 12) & 0xF) {
1940
            case 0x0:
1941
                /* Longword physical access (hw_ldl/p) */
1942
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1943
                break;
1944
            case 0x1:
1945
                /* Quadword physical access (hw_ldq/p) */
1946
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1947
                break;
1948
            case 0x2:
1949
                /* Longword physical access with lock (hw_ldl_l/p) */
1950
                gen_helper_ldl_l_raw(cpu_ir[ra], addr);
1951
                break;
1952
            case 0x3:
1953
                /* Quadword physical access with lock (hw_ldq_l/p) */
1954
                gen_helper_ldq_l_raw(cpu_ir[ra], addr);
1955
                break;
1956
            case 0x4:
1957
                /* Longword virtual PTE fetch (hw_ldl/v) */
1958
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1959
                break;
1960
            case 0x5:
1961
                /* Quadword virtual PTE fetch (hw_ldq/v) */
1962
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1963
                break;
1964
            case 0x6:
1965
                /* Incpu_ir[ra]id */
1966
                goto invalid_opc;
1967
            case 0x7:
1968
                /* Incpu_ir[ra]id */
1969
                goto invalid_opc;
1970
            case 0x8:
1971
                /* Longword virtual access (hw_ldl) */
1972
                gen_helper_st_virt_to_phys(addr, addr);
1973
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1974
                break;
1975
            case 0x9:
1976
                /* Quadword virtual access (hw_ldq) */
1977
                gen_helper_st_virt_to_phys(addr, addr);
1978
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1979
                break;
1980
            case 0xA:
1981
                /* Longword virtual access with protection check (hw_ldl/w) */
1982
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1983
                break;
1984
            case 0xB:
1985
                /* Quadword virtual access with protection check (hw_ldq/w) */
1986
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1987
                break;
1988
            case 0xC:
1989
                /* Longword virtual access with alt access mode (hw_ldl/a)*/
1990
                gen_helper_set_alt_mode();
1991
                gen_helper_st_virt_to_phys(addr, addr);
1992
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1993
                gen_helper_restore_mode();
1994
                break;
1995
            case 0xD:
1996
                /* Quadword virtual access with alt access mode (hw_ldq/a) */
1997
                gen_helper_set_alt_mode();
1998
                gen_helper_st_virt_to_phys(addr, addr);
1999
                gen_helper_ldq_raw(cpu_ir[ra], addr);
2000
                gen_helper_restore_mode();
2001
                break;
2002
            case 0xE:
2003
                /* Longword virtual access with alternate access mode and
2004
                 * protection checks (hw_ldl/wa)
2005
                 */
2006
                gen_helper_set_alt_mode();
2007
                gen_helper_ldl_data(cpu_ir[ra], addr);
2008
                gen_helper_restore_mode();
2009
                break;
2010
            case 0xF:
2011
                /* Quadword virtual access with alternate access mode and
2012
                 * protection checks (hw_ldq/wa)
2013
                 */
2014
                gen_helper_set_alt_mode();
2015
                gen_helper_ldq_data(cpu_ir[ra], addr);
2016
                gen_helper_restore_mode();
2017
                break;
2018
            }
2019
            tcg_temp_free(addr);
2020
        }
2021
        break;
2022
#endif
2023
    case 0x1C:
2024
        switch (fn7) {
2025
        case 0x00:
2026
            /* SEXTB */
2027
            if (!(ctx->amask & AMASK_BWX))
2028
                goto invalid_opc;
2029
            if (likely(rc != 31)) {
2030
                if (islit)
2031
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
2032
                else
2033
                    tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
2034
            }
2035
            break;
2036
        case 0x01:
2037
            /* SEXTW */
2038
            if (!(ctx->amask & AMASK_BWX))
2039
                goto invalid_opc;
2040
            if (likely(rc != 31)) {
2041
                if (islit)
2042
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
2043
                else
2044
                    tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
2045
            }
2046
            break;
2047
        case 0x30:
2048
            /* CTPOP */
2049
            if (!(ctx->amask & AMASK_CIX))
2050
                goto invalid_opc;
2051
            if (likely(rc != 31)) {
2052
                if (islit)
2053
                    tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
2054
                else
2055
                    gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
2056
            }
2057
            break;
2058
        case 0x31:
2059
            /* PERR */
2060
            if (!(ctx->amask & AMASK_MVI))
2061
                goto invalid_opc;
2062
            gen_perr(ra, rb, rc, islit, lit);
2063
            break;
2064
        case 0x32:
2065
            /* CTLZ */
2066
            if (!(ctx->amask & AMASK_CIX))
2067
                goto invalid_opc;
2068
            if (likely(rc != 31)) {
2069
                if (islit)
2070
                    tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
2071
                else
2072
                    gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
2073
            }
2074
            break;
2075
        case 0x33:
2076
            /* CTTZ */
2077
            if (!(ctx->amask & AMASK_CIX))
2078
                goto invalid_opc;
2079
            if (likely(rc != 31)) {
2080
                if (islit)
2081
                    tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
2082
                else
2083
                    gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
2084
            }
2085
            break;
2086
        case 0x34:
2087
            /* UNPKBW */
2088
            if (!(ctx->amask & AMASK_MVI))
2089
                goto invalid_opc;
2090
            if (real_islit || ra != 31)
2091
                goto invalid_opc;
2092
            gen_unpkbw (rb, rc);
2093
            break;
2094
        case 0x35:
2095
            /* UNPKBL */
2096
            if (!(ctx->amask & AMASK_MVI))
2097
                goto invalid_opc;
2098
            if (real_islit || ra != 31)
2099
                goto invalid_opc;
2100
            gen_unpkbl (rb, rc);
2101
            break;
2102
        case 0x36:
2103
            /* PKWB */
2104
            if (!(ctx->amask & AMASK_MVI))
2105
                goto invalid_opc;
2106
            if (real_islit || ra != 31)
2107
                goto invalid_opc;
2108
            gen_pkwb (rb, rc);
2109
            break;
2110
        case 0x37:
2111
            /* PKLB */
2112
            if (!(ctx->amask & AMASK_MVI))
2113
                goto invalid_opc;
2114
            if (real_islit || ra != 31)
2115
                goto invalid_opc;
2116
            gen_pklb (rb, rc);
2117
            break;
2118
        case 0x38:
2119
            /* MINSB8 */
2120
            if (!(ctx->amask & AMASK_MVI))
2121
                goto invalid_opc;
2122
            gen_minsb8 (ra, rb, rc, islit, lit);
2123
            break;
2124
        case 0x39:
2125
            /* MINSW4 */
2126
            if (!(ctx->amask & AMASK_MVI))
2127
                goto invalid_opc;
2128
            gen_minsw4 (ra, rb, rc, islit, lit);
2129
            break;
2130
        case 0x3A:
2131
            /* MINUB8 */
2132
            if (!(ctx->amask & AMASK_MVI))
2133
                goto invalid_opc;
2134
            gen_minub8 (ra, rb, rc, islit, lit);
2135
            break;
2136
        case 0x3B:
2137
            /* MINUW4 */
2138
            if (!(ctx->amask & AMASK_MVI))
2139
                goto invalid_opc;
2140
            gen_minuw4 (ra, rb, rc, islit, lit);
2141
            break;
2142
        case 0x3C:
2143
            /* MAXUB8 */
2144
            if (!(ctx->amask & AMASK_MVI))
2145
                goto invalid_opc;
2146
            gen_maxub8 (ra, rb, rc, islit, lit);
2147
            break;
2148
        case 0x3D:
2149
            /* MAXUW4 */
2150
            if (!(ctx->amask & AMASK_MVI))
2151
                goto invalid_opc;
2152
            gen_maxuw4 (ra, rb, rc, islit, lit);
2153
            break;
2154
        case 0x3E:
2155
            /* MAXSB8 */
2156
            if (!(ctx->amask & AMASK_MVI))
2157
                goto invalid_opc;
2158
            gen_maxsb8 (ra, rb, rc, islit, lit);
2159
            break;
2160
        case 0x3F:
2161
            /* MAXSW4 */
2162
            if (!(ctx->amask & AMASK_MVI))
2163
                goto invalid_opc;
2164
            gen_maxsw4 (ra, rb, rc, islit, lit);
2165
            break;
2166
        case 0x70:
2167
            /* FTOIT */
2168
            if (!(ctx->amask & AMASK_FIX))
2169
                goto invalid_opc;
2170
            if (likely(rc != 31)) {
2171
                if (ra != 31)
2172
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2173
                else
2174
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2175
            }
2176
            break;
2177
        case 0x78:
2178
            /* FTOIS */
2179
            if (!(ctx->amask & AMASK_FIX))
2180
                goto invalid_opc;
2181
            if (rc != 31) {
2182
                TCGv_i32 tmp1 = tcg_temp_new_i32();
2183
                if (ra != 31)
2184
                    gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2185
                else {
2186
                    TCGv tmp2 = tcg_const_i64(0);
2187
                    gen_helper_s_to_memory(tmp1, tmp2);
2188
                    tcg_temp_free(tmp2);
2189
                }
2190
                tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2191
                tcg_temp_free_i32(tmp1);
2192
            }
2193
            break;
2194
        default:
2195
            goto invalid_opc;
2196
        }
2197
        break;
2198
    case 0x1D:
2199
        /* HW_MTPR (PALcode) */
2200
#if defined (CONFIG_USER_ONLY)
2201
        goto invalid_opc;
2202
#else
2203
        if (!ctx->pal_mode)
2204
            goto invalid_opc;
2205
        else {
2206
            TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2207
            if (ra != 31)
2208
                gen_helper_mtpr(tmp1, cpu_ir[ra]);
2209
            else {
2210
                TCGv tmp2 = tcg_const_i64(0);
2211
                gen_helper_mtpr(tmp1, tmp2);
2212
                tcg_temp_free(tmp2);
2213
            }
2214
            tcg_temp_free(tmp1);
2215
            ret = 2;
2216
        }
2217
        break;
2218
#endif
2219
    case 0x1E:
2220
        /* HW_REI (PALcode) */
2221
#if defined (CONFIG_USER_ONLY)
2222
        goto invalid_opc;
2223
#else
2224
        if (!ctx->pal_mode)
2225
            goto invalid_opc;
2226
        if (rb == 31) {
2227
            /* "Old" alpha */
2228
            gen_helper_hw_rei();
2229
        } else {
2230
            TCGv tmp;
2231

    
2232
            if (ra != 31) {
2233
                tmp = tcg_temp_new();
2234
                tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2235
            } else
2236
                tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2237
            gen_helper_hw_ret(tmp);
2238
            tcg_temp_free(tmp);
2239
        }
2240
        ret = 2;
2241
        break;
2242
#endif
2243
    case 0x1F:
2244
        /* HW_ST (PALcode) */
2245
#if defined (CONFIG_USER_ONLY)
2246
        goto invalid_opc;
2247
#else
2248
        if (!ctx->pal_mode)
2249
            goto invalid_opc;
2250
        else {
2251
            TCGv addr, val;
2252
            addr = tcg_temp_new();
2253
            if (rb != 31)
2254
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2255
            else
2256
                tcg_gen_movi_i64(addr, disp12);
2257
            if (ra != 31)
2258
                val = cpu_ir[ra];
2259
            else {
2260
                val = tcg_temp_new();
2261
                tcg_gen_movi_i64(val, 0);
2262
            }
2263
            switch ((insn >> 12) & 0xF) {
2264
            case 0x0:
2265
                /* Longword physical access */
2266
                gen_helper_stl_raw(val, addr);
2267
                break;
2268
            case 0x1:
2269
                /* Quadword physical access */
2270
                gen_helper_stq_raw(val, addr);
2271
                break;
2272
            case 0x2:
2273
                /* Longword physical access with lock */
2274
                gen_helper_stl_c_raw(val, val, addr);
2275
                break;
2276
            case 0x3:
2277
                /* Quadword physical access with lock */
2278
                gen_helper_stq_c_raw(val, val, addr);
2279
                break;
2280
            case 0x4:
2281
                /* Longword virtual access */
2282
                gen_helper_st_virt_to_phys(addr, addr);
2283
                gen_helper_stl_raw(val, addr);
2284
                break;
2285
            case 0x5:
2286
                /* Quadword virtual access */
2287
                gen_helper_st_virt_to_phys(addr, addr);
2288
                gen_helper_stq_raw(val, addr);
2289
                break;
2290
            case 0x6:
2291
                /* Invalid */
2292
                goto invalid_opc;
2293
            case 0x7:
2294
                /* Invalid */
2295
                goto invalid_opc;
2296
            case 0x8:
2297
                /* Invalid */
2298
                goto invalid_opc;
2299
            case 0x9:
2300
                /* Invalid */
2301
                goto invalid_opc;
2302
            case 0xA:
2303
                /* Invalid */
2304
                goto invalid_opc;
2305
            case 0xB:
2306
                /* Invalid */
2307
                goto invalid_opc;
2308
            case 0xC:
2309
                /* Longword virtual access with alternate access mode */
2310
                gen_helper_set_alt_mode();
2311
                gen_helper_st_virt_to_phys(addr, addr);
2312
                gen_helper_stl_raw(val, addr);
2313
                gen_helper_restore_mode();
2314
                break;
2315
            case 0xD:
2316
                /* Quadword virtual access with alternate access mode */
2317
                gen_helper_set_alt_mode();
2318
                gen_helper_st_virt_to_phys(addr, addr);
2319
                gen_helper_stl_raw(val, addr);
2320
                gen_helper_restore_mode();
2321
                break;
2322
            case 0xE:
2323
                /* Invalid */
2324
                goto invalid_opc;
2325
            case 0xF:
2326
                /* Invalid */
2327
                goto invalid_opc;
2328
            }
2329
            if (ra == 31)
2330
                tcg_temp_free(val);
2331
            tcg_temp_free(addr);
2332
        }
2333
        break;
2334
#endif
2335
    case 0x20:
2336
        /* LDF */
2337
        gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2338
        break;
2339
    case 0x21:
2340
        /* LDG */
2341
        gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2342
        break;
2343
    case 0x22:
2344
        /* LDS */
2345
        gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2346
        break;
2347
    case 0x23:
2348
        /* LDT */
2349
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2350
        break;
2351
    case 0x24:
2352
        /* STF */
2353
        gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2354
        break;
2355
    case 0x25:
2356
        /* STG */
2357
        gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2358
        break;
2359
    case 0x26:
2360
        /* STS */
2361
        gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2362
        break;
2363
    case 0x27:
2364
        /* STT */
2365
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2366
        break;
2367
    case 0x28:
2368
        /* LDL */
2369
        gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2370
        break;
2371
    case 0x29:
2372
        /* LDQ */
2373
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2374
        break;
2375
    case 0x2A:
2376
        /* LDL_L */
2377
        gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2378
        break;
2379
    case 0x2B:
2380
        /* LDQ_L */
2381
        gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2382
        break;
2383
    case 0x2C:
2384
        /* STL */
2385
        gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2386
        break;
2387
    case 0x2D:
2388
        /* STQ */
2389
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2390
        break;
2391
    case 0x2E:
2392
        /* STL_C */
2393
        gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2394
        break;
2395
    case 0x2F:
2396
        /* STQ_C */
2397
        gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2398
        break;
2399
    case 0x30:
2400
        /* BR */
2401
        if (ra != 31)
2402
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2403
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2404
        ret = 1;
2405
        break;
2406
    case 0x31: /* FBEQ */
2407
    case 0x32: /* FBLT */
2408
    case 0x33: /* FBLE */
2409
        gen_fbcond(ctx, opc, ra, disp21);
2410
        ret = 1;
2411
        break;
2412
    case 0x34:
2413
        /* BSR */
2414
        if (ra != 31)
2415
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2416
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2417
        ret = 1;
2418
        break;
2419
    case 0x35: /* FBNE */
2420
    case 0x36: /* FBGE */
2421
    case 0x37: /* FBGT */
2422
        gen_fbcond(ctx, opc, ra, disp21);
2423
        ret = 1;
2424
        break;
2425
    case 0x38:
2426
        /* BLBC */
2427
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2428
        ret = 1;
2429
        break;
2430
    case 0x39:
2431
        /* BEQ */
2432
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2433
        ret = 1;
2434
        break;
2435
    case 0x3A:
2436
        /* BLT */
2437
        gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2438
        ret = 1;
2439
        break;
2440
    case 0x3B:
2441
        /* BLE */
2442
        gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2443
        ret = 1;
2444
        break;
2445
    case 0x3C:
2446
        /* BLBS */
2447
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2448
        ret = 1;
2449
        break;
2450
    case 0x3D:
2451
        /* BNE */
2452
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2453
        ret = 1;
2454
        break;
2455
    case 0x3E:
2456
        /* BGE */
2457
        gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2458
        ret = 1;
2459
        break;
2460
    case 0x3F:
2461
        /* BGT */
2462
        gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2463
        ret = 1;
2464
        break;
2465
    invalid_opc:
2466
        gen_invalid(ctx);
2467
        ret = 3;
2468
        break;
2469
    }
2470

    
2471
    return ret;
2472
}
2473

    
2474
static inline void gen_intermediate_code_internal(CPUState *env,
2475
                                                  TranslationBlock *tb,
2476
                                                  int search_pc)
2477
{
2478
    DisasContext ctx, *ctxp = &ctx;
2479
    target_ulong pc_start;
2480
    uint32_t insn;
2481
    uint16_t *gen_opc_end;
2482
    CPUBreakpoint *bp;
2483
    int j, lj = -1;
2484
    int ret;
2485
    int num_insns;
2486
    int max_insns;
2487

    
2488
    pc_start = tb->pc;
2489
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2490
    ctx.pc = pc_start;
2491
    ctx.amask = env->amask;
2492
    ctx.env = env;
2493
#if defined (CONFIG_USER_ONLY)
2494
    ctx.mem_idx = 0;
2495
#else
2496
    ctx.mem_idx = ((env->ps >> 3) & 3);
2497
    ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2498
#endif
2499
    num_insns = 0;
2500
    max_insns = tb->cflags & CF_COUNT_MASK;
2501
    if (max_insns == 0)
2502
        max_insns = CF_COUNT_MASK;
2503

    
2504
    gen_icount_start();
2505
    for (ret = 0; ret == 0;) {
2506
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
2507
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
2508
                if (bp->pc == ctx.pc) {
2509
                    gen_excp(&ctx, EXCP_DEBUG, 0);
2510
                    break;
2511
                }
2512
            }
2513
        }
2514
        if (search_pc) {
2515
            j = gen_opc_ptr - gen_opc_buf;
2516
            if (lj < j) {
2517
                lj++;
2518
                while (lj < j)
2519
                    gen_opc_instr_start[lj++] = 0;
2520
            }
2521
            gen_opc_pc[lj] = ctx.pc;
2522
            gen_opc_instr_start[lj] = 1;
2523
            gen_opc_icount[lj] = num_insns;
2524
        }
2525
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2526
            gen_io_start();
2527
        insn = ldl_code(ctx.pc);
2528
        num_insns++;
2529
        ctx.pc += 4;
2530
        ret = translate_one(ctxp, insn);
2531
        if (ret != 0)
2532
            break;
2533
        /* if we reach a page boundary or are single stepping, stop
2534
         * generation
2535
         */
2536
        if (env->singlestep_enabled) {
2537
            gen_excp(&ctx, EXCP_DEBUG, 0);
2538
            break;
2539
        }
2540

    
2541
        if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2542
            break;
2543

    
2544
        if (gen_opc_ptr >= gen_opc_end)
2545
            break;
2546

    
2547
        if (num_insns >= max_insns)
2548
            break;
2549

    
2550
        if (singlestep) {
2551
            break;
2552
        }
2553
    }
2554
    if (ret != 1 && ret != 3) {
2555
        tcg_gen_movi_i64(cpu_pc, ctx.pc);
2556
    }
2557
    if (tb->cflags & CF_LAST_IO)
2558
        gen_io_end();
2559
    /* Generate the return instruction */
2560
    tcg_gen_exit_tb(0);
2561
    gen_icount_end(tb, num_insns);
2562
    *gen_opc_ptr = INDEX_op_end;
2563
    if (search_pc) {
2564
        j = gen_opc_ptr - gen_opc_buf;
2565
        lj++;
2566
        while (lj <= j)
2567
            gen_opc_instr_start[lj++] = 0;
2568
    } else {
2569
        tb->size = ctx.pc - pc_start;
2570
        tb->icount = num_insns;
2571
    }
2572
#ifdef DEBUG_DISAS
2573
    log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
2574
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2575
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
2576
        log_target_disas(pc_start, ctx.pc - pc_start, 1);
2577
        qemu_log("\n");
2578
    }
2579
#endif
2580
}
2581

    
2582
void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2583
{
2584
    gen_intermediate_code_internal(env, tb, 0);
2585
}
2586

    
2587
void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2588
{
2589
    gen_intermediate_code_internal(env, tb, 1);
2590
}
2591

    
2592
struct cpu_def_t {
2593
    const char *name;
2594
    int implver, amask;
2595
};
2596

    
2597
static const struct cpu_def_t cpu_defs[] = {
2598
    { "ev4",   IMPLVER_2106x, 0 },
2599
    { "ev5",   IMPLVER_21164, 0 },
2600
    { "ev56",  IMPLVER_21164, AMASK_BWX },
2601
    { "pca56", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
2602
    { "ev6",   IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
2603
    { "ev67",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2604
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
2605
    { "ev68",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2606
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
2607
    { "21064", IMPLVER_2106x, 0 },
2608
    { "21164", IMPLVER_21164, 0 },
2609
    { "21164a", IMPLVER_21164, AMASK_BWX },
2610
    { "21164pc", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
2611
    { "21264", IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
2612
    { "21264a", IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2613
                                | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), }
2614
};
2615

    
2616
CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2617
{
2618
    CPUAlphaState *env;
2619
    uint64_t hwpcb;
2620
    int implver, amask, i, max;
2621

    
2622
    env = qemu_mallocz(sizeof(CPUAlphaState));
2623
    cpu_exec_init(env);
2624
    alpha_translate_init();
2625
    tlb_flush(env, 1);
2626

    
2627
    /* Default to ev67; no reason not to emulate insns by default.  */
2628
    implver = IMPLVER_21264;
2629
    amask = (AMASK_BWX | AMASK_FIX | AMASK_CIX | AMASK_MVI
2630
             | AMASK_TRAP | AMASK_PREFETCH);
2631

    
2632
    max = ARRAY_SIZE(cpu_defs);
2633
    for (i = 0; i < max; i++) {
2634
        if (strcmp (cpu_model, cpu_defs[i].name) == 0) {
2635
            implver = cpu_defs[i].implver;
2636
            amask = cpu_defs[i].amask;
2637
            break;
2638
        }
2639
    }
2640
    env->implver = implver;
2641
    env->amask = amask;
2642

    
2643
    env->ps = 0x1F00;
2644
#if defined (CONFIG_USER_ONLY)
2645
    env->ps |= 1 << 3;
2646
#endif
2647
    pal_init(env);
2648
    /* Initialize IPR */
2649
    hwpcb = env->ipr[IPR_PCBB];
2650
    env->ipr[IPR_ASN] = 0;
2651
    env->ipr[IPR_ASTEN] = 0;
2652
    env->ipr[IPR_ASTSR] = 0;
2653
    env->ipr[IPR_DATFX] = 0;
2654
    /* XXX: fix this */
2655
    //    env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2656
    //    env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2657
    //    env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2658
    //    env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2659
    env->ipr[IPR_FEN] = 0;
2660
    env->ipr[IPR_IPL] = 31;
2661
    env->ipr[IPR_MCES] = 0;
2662
    env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2663
    //    env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2664
    env->ipr[IPR_SISR] = 0;
2665
    env->ipr[IPR_VIRBND] = -1ULL;
2666

    
2667
    qemu_init_vcpu(env);
2668
    return env;
2669
}
2670

    
2671
void gen_pc_load(CPUState *env, TranslationBlock *tb,
2672
                unsigned long searched_pc, int pc_pos, void *puc)
2673
{
2674
    env->pc = gen_opc_pc[pc_pos];
2675
}