Statistics
| Branch: | Revision:

root / target-alpha / translate.c @ ab471ade

History | View | Annotate | Download (78.2 kB)

1
/*
2
 *  Alpha emulation cpu translation for qemu.
3
 *
4
 *  Copyright (c) 2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18
 */
19

    
20
#include <stdint.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23

    
24
#include "cpu.h"
25
#include "exec-all.h"
26
#include "disas.h"
27
#include "host-utils.h"
28
#include "tcg-op.h"
29
#include "qemu-common.h"
30

    
31
#include "helper.h"
32
#define GEN_HELPER 1
33
#include "helper.h"
34

    
35
#undef ALPHA_DEBUG_DISAS
36

    
37
#ifdef ALPHA_DEBUG_DISAS
38
#  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
39
#else
40
#  define LOG_DISAS(...) do { } while (0)
41
#endif
42

    
43
typedef struct DisasContext DisasContext;
44
struct DisasContext {
45
    uint64_t pc;
46
    int mem_idx;
47
#if !defined (CONFIG_USER_ONLY)
48
    int pal_mode;
49
#endif
50
    CPUAlphaState *env;
51
    uint32_t amask;
52
};
53

    
54
/* global register indexes */
55
static TCGv_ptr cpu_env;
56
static TCGv cpu_ir[31];
57
static TCGv cpu_fir[31];
58
static TCGv cpu_pc;
59
static TCGv cpu_lock;
60
#ifdef CONFIG_USER_ONLY
61
static TCGv cpu_uniq;
62
#endif
63

    
64
/* register names */
65
static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
66

    
67
#include "gen-icount.h"
68

    
69
static void alpha_translate_init(void)
70
{
71
    int i;
72
    char *p;
73
    static int done_init = 0;
74

    
75
    if (done_init)
76
        return;
77

    
78
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
79

    
80
    p = cpu_reg_names;
81
    for (i = 0; i < 31; i++) {
82
        sprintf(p, "ir%d", i);
83
        cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
84
                                           offsetof(CPUState, ir[i]), p);
85
        p += (i < 10) ? 4 : 5;
86

    
87
        sprintf(p, "fir%d", i);
88
        cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
89
                                            offsetof(CPUState, fir[i]), p);
90
        p += (i < 10) ? 5 : 6;
91
    }
92

    
93
    cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
94
                                    offsetof(CPUState, pc), "pc");
95

    
96
    cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
97
                                      offsetof(CPUState, lock), "lock");
98

    
99
#ifdef CONFIG_USER_ONLY
100
    cpu_uniq = tcg_global_mem_new_i64(TCG_AREG0,
101
                                      offsetof(CPUState, unique), "uniq");
102
#endif
103

    
104
    /* register helpers */
105
#define GEN_HELPER 2
106
#include "helper.h"
107

    
108
    done_init = 1;
109
}
110

    
111
static inline void gen_excp(DisasContext *ctx, int exception, int error_code)
112
{
113
    TCGv_i32 tmp1, tmp2;
114

    
115
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
116
    tmp1 = tcg_const_i32(exception);
117
    tmp2 = tcg_const_i32(error_code);
118
    gen_helper_excp(tmp1, tmp2);
119
    tcg_temp_free_i32(tmp2);
120
    tcg_temp_free_i32(tmp1);
121
}
122

    
123
static inline void gen_invalid(DisasContext *ctx)
124
{
125
    gen_excp(ctx, EXCP_OPCDEC, 0);
126
}
127

    
128
static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
129
{
130
    TCGv tmp = tcg_temp_new();
131
    TCGv_i32 tmp32 = tcg_temp_new_i32();
132
    tcg_gen_qemu_ld32u(tmp, t1, flags);
133
    tcg_gen_trunc_i64_i32(tmp32, tmp);
134
    gen_helper_memory_to_f(t0, tmp32);
135
    tcg_temp_free_i32(tmp32);
136
    tcg_temp_free(tmp);
137
}
138

    
139
static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
140
{
141
    TCGv tmp = tcg_temp_new();
142
    tcg_gen_qemu_ld64(tmp, t1, flags);
143
    gen_helper_memory_to_g(t0, tmp);
144
    tcg_temp_free(tmp);
145
}
146

    
147
static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
148
{
149
    TCGv tmp = tcg_temp_new();
150
    TCGv_i32 tmp32 = tcg_temp_new_i32();
151
    tcg_gen_qemu_ld32u(tmp, t1, flags);
152
    tcg_gen_trunc_i64_i32(tmp32, tmp);
153
    gen_helper_memory_to_s(t0, tmp32);
154
    tcg_temp_free_i32(tmp32);
155
    tcg_temp_free(tmp);
156
}
157

    
158
static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
159
{
160
    tcg_gen_mov_i64(cpu_lock, t1);
161
    tcg_gen_qemu_ld32s(t0, t1, flags);
162
}
163

    
164
static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
165
{
166
    tcg_gen_mov_i64(cpu_lock, t1);
167
    tcg_gen_qemu_ld64(t0, t1, flags);
168
}
169

    
170
static inline void gen_load_mem(DisasContext *ctx,
171
                                void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
172
                                                          int flags),
173
                                int ra, int rb, int32_t disp16, int fp,
174
                                int clear)
175
{
176
    TCGv addr;
177

    
178
    if (unlikely(ra == 31))
179
        return;
180

    
181
    addr = tcg_temp_new();
182
    if (rb != 31) {
183
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
184
        if (clear)
185
            tcg_gen_andi_i64(addr, addr, ~0x7);
186
    } else {
187
        if (clear)
188
            disp16 &= ~0x7;
189
        tcg_gen_movi_i64(addr, disp16);
190
    }
191
    if (fp)
192
        tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
193
    else
194
        tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
195
    tcg_temp_free(addr);
196
}
197

    
198
static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
199
{
200
    TCGv_i32 tmp32 = tcg_temp_new_i32();
201
    TCGv tmp = tcg_temp_new();
202
    gen_helper_f_to_memory(tmp32, t0);
203
    tcg_gen_extu_i32_i64(tmp, tmp32);
204
    tcg_gen_qemu_st32(tmp, t1, flags);
205
    tcg_temp_free(tmp);
206
    tcg_temp_free_i32(tmp32);
207
}
208

    
209
static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
210
{
211
    TCGv tmp = tcg_temp_new();
212
    gen_helper_g_to_memory(tmp, t0);
213
    tcg_gen_qemu_st64(tmp, t1, flags);
214
    tcg_temp_free(tmp);
215
}
216

    
217
static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
218
{
219
    TCGv_i32 tmp32 = tcg_temp_new_i32();
220
    TCGv tmp = tcg_temp_new();
221
    gen_helper_s_to_memory(tmp32, t0);
222
    tcg_gen_extu_i32_i64(tmp, tmp32);
223
    tcg_gen_qemu_st32(tmp, t1, flags);
224
    tcg_temp_free(tmp);
225
    tcg_temp_free_i32(tmp32);
226
}
227

    
228
static inline void gen_qemu_stl_c(TCGv t0, TCGv t1, int flags)
229
{
230
    int l1, l2;
231

    
232
    l1 = gen_new_label();
233
    l2 = gen_new_label();
234
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
235
    tcg_gen_qemu_st32(t0, t1, flags);
236
    tcg_gen_movi_i64(t0, 1);
237
    tcg_gen_br(l2);
238
    gen_set_label(l1);
239
    tcg_gen_movi_i64(t0, 0);
240
    gen_set_label(l2);
241
    tcg_gen_movi_i64(cpu_lock, -1);
242
}
243

    
244
static inline void gen_qemu_stq_c(TCGv t0, TCGv t1, int flags)
245
{
246
    int l1, l2;
247

    
248
    l1 = gen_new_label();
249
    l2 = gen_new_label();
250
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
251
    tcg_gen_qemu_st64(t0, t1, flags);
252
    tcg_gen_movi_i64(t0, 1);
253
    tcg_gen_br(l2);
254
    gen_set_label(l1);
255
    tcg_gen_movi_i64(t0, 0);
256
    gen_set_label(l2);
257
    tcg_gen_movi_i64(cpu_lock, -1);
258
}
259

    
260
static inline void gen_store_mem(DisasContext *ctx,
261
                                 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
262
                                                            int flags),
263
                                 int ra, int rb, int32_t disp16, int fp,
264
                                 int clear, int local)
265
{
266
    TCGv addr;
267
    if (local)
268
        addr = tcg_temp_local_new();
269
    else
270
        addr = tcg_temp_new();
271
    if (rb != 31) {
272
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
273
        if (clear)
274
            tcg_gen_andi_i64(addr, addr, ~0x7);
275
    } else {
276
        if (clear)
277
            disp16 &= ~0x7;
278
        tcg_gen_movi_i64(addr, disp16);
279
    }
280
    if (ra != 31) {
281
        if (fp)
282
            tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
283
        else
284
            tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
285
    } else {
286
        TCGv zero;
287
        if (local)
288
            zero = tcg_const_local_i64(0);
289
        else
290
            zero = tcg_const_i64(0);
291
        tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
292
        tcg_temp_free(zero);
293
    }
294
    tcg_temp_free(addr);
295
}
296

    
297
static inline void gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
298
                             int32_t disp, int mask)
299
{
300
    int l1, l2;
301

    
302
    l1 = gen_new_label();
303
    l2 = gen_new_label();
304
    if (likely(ra != 31)) {
305
        if (mask) {
306
            TCGv tmp = tcg_temp_new();
307
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
308
            tcg_gen_brcondi_i64(cond, tmp, 0, l1);
309
            tcg_temp_free(tmp);
310
        } else
311
            tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
312
    } else {
313
        /* Very uncommon case - Do not bother to optimize.  */
314
        TCGv tmp = tcg_const_i64(0);
315
        tcg_gen_brcondi_i64(cond, tmp, 0, l1);
316
        tcg_temp_free(tmp);
317
    }
318
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
319
    tcg_gen_br(l2);
320
    gen_set_label(l1);
321
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
322
    gen_set_label(l2);
323
}
324

    
325
static inline void gen_fbcond(DisasContext *ctx, int opc, int ra, int32_t disp)
326
{
327
    int l1, l2;
328
    TCGv tmp;
329
    TCGv src;
330

    
331
    l1 = gen_new_label();
332
    l2 = gen_new_label();
333
    if (ra != 31) {
334
        tmp = tcg_temp_new();
335
        src = cpu_fir[ra];
336
    } else  {
337
        tmp = tcg_const_i64(0);
338
        src = tmp;
339
    }
340
    switch (opc) {
341
    case 0x31: /* FBEQ */
342
        gen_helper_cmpfeq(tmp, src);
343
        break;
344
    case 0x32: /* FBLT */
345
        gen_helper_cmpflt(tmp, src);
346
        break;
347
    case 0x33: /* FBLE */
348
        gen_helper_cmpfle(tmp, src);
349
        break;
350
    case 0x35: /* FBNE */
351
        gen_helper_cmpfne(tmp, src);
352
        break;
353
    case 0x36: /* FBGE */
354
        gen_helper_cmpfge(tmp, src);
355
        break;
356
    case 0x37: /* FBGT */
357
        gen_helper_cmpfgt(tmp, src);
358
        break;
359
    default:
360
        abort();
361
    }
362
    tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
363
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
364
    tcg_gen_br(l2);
365
    gen_set_label(l1);
366
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
367
    gen_set_label(l2);
368
}
369

    
370
static inline void gen_cmov(TCGCond inv_cond, int ra, int rb, int rc,
371
                            int islit, uint8_t lit, int mask)
372
{
373
    int l1;
374

    
375
    if (unlikely(rc == 31))
376
        return;
377

    
378
    l1 = gen_new_label();
379

    
380
    if (ra != 31) {
381
        if (mask) {
382
            TCGv tmp = tcg_temp_new();
383
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
384
            tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
385
            tcg_temp_free(tmp);
386
        } else
387
            tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
388
    } else {
389
        /* Very uncommon case - Do not bother to optimize.  */
390
        TCGv tmp = tcg_const_i64(0);
391
        tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
392
        tcg_temp_free(tmp);
393
    }
394

    
395
    if (islit)
396
        tcg_gen_movi_i64(cpu_ir[rc], lit);
397
    else
398
        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
399
    gen_set_label(l1);
400
}
401

    
402
#define FARITH2(name)                                       \
403
static inline void glue(gen_f, name)(int rb, int rc)        \
404
{                                                           \
405
    if (unlikely(rc == 31))                                 \
406
      return;                                               \
407
                                                            \
408
    if (rb != 31)                                           \
409
        gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]);    \
410
    else {                                                  \
411
        TCGv tmp = tcg_const_i64(0);                        \
412
        gen_helper_ ## name (cpu_fir[rc], tmp);            \
413
        tcg_temp_free(tmp);                                 \
414
    }                                                       \
415
}
416
FARITH2(sqrts)
417
FARITH2(sqrtf)
418
FARITH2(sqrtg)
419
FARITH2(sqrtt)
420
FARITH2(cvtgf)
421
FARITH2(cvtgq)
422
FARITH2(cvtqf)
423
FARITH2(cvtqg)
424
FARITH2(cvtst)
425
FARITH2(cvtts)
426
FARITH2(cvttq)
427
FARITH2(cvtqs)
428
FARITH2(cvtqt)
429
FARITH2(cvtlq)
430
FARITH2(cvtql)
431
FARITH2(cvtqlv)
432
FARITH2(cvtqlsv)
433

    
434
#define FARITH3(name)                                                     \
435
static inline void glue(gen_f, name)(int ra, int rb, int rc)              \
436
{                                                                         \
437
    if (unlikely(rc == 31))                                               \
438
        return;                                                           \
439
                                                                          \
440
    if (ra != 31) {                                                       \
441
        if (rb != 31)                                                     \
442
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);  \
443
        else {                                                            \
444
            TCGv tmp = tcg_const_i64(0);                                  \
445
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp);          \
446
            tcg_temp_free(tmp);                                           \
447
        }                                                                 \
448
    } else {                                                              \
449
        TCGv tmp = tcg_const_i64(0);                                      \
450
        if (rb != 31)                                                     \
451
            gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]);          \
452
        else                                                              \
453
            gen_helper_ ## name (cpu_fir[rc], tmp, tmp);                   \
454
        tcg_temp_free(tmp);                                               \
455
    }                                                                     \
456
}
457

    
458
FARITH3(addf)
459
FARITH3(subf)
460
FARITH3(mulf)
461
FARITH3(divf)
462
FARITH3(addg)
463
FARITH3(subg)
464
FARITH3(mulg)
465
FARITH3(divg)
466
FARITH3(cmpgeq)
467
FARITH3(cmpglt)
468
FARITH3(cmpgle)
469
FARITH3(adds)
470
FARITH3(subs)
471
FARITH3(muls)
472
FARITH3(divs)
473
FARITH3(addt)
474
FARITH3(subt)
475
FARITH3(mult)
476
FARITH3(divt)
477
FARITH3(cmptun)
478
FARITH3(cmpteq)
479
FARITH3(cmptlt)
480
FARITH3(cmptle)
481
FARITH3(cpys)
482
FARITH3(cpysn)
483
FARITH3(cpyse)
484

    
485
#define FCMOV(name)                                                   \
486
static inline void glue(gen_f, name)(int ra, int rb, int rc)          \
487
{                                                                     \
488
    int l1;                                                           \
489
    TCGv tmp;                                                         \
490
                                                                      \
491
    if (unlikely(rc == 31))                                           \
492
        return;                                                       \
493
                                                                      \
494
    l1 = gen_new_label();                                             \
495
    tmp = tcg_temp_new();                                 \
496
    if (ra != 31) {                                                   \
497
        tmp = tcg_temp_new();                             \
498
        gen_helper_ ## name (tmp, cpu_fir[ra]);                       \
499
    } else  {                                                         \
500
        tmp = tcg_const_i64(0);                                       \
501
        gen_helper_ ## name (tmp, tmp);                               \
502
    }                                                                 \
503
    tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);                     \
504
    if (rb != 31)                                                     \
505
        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);                    \
506
    else                                                              \
507
        tcg_gen_movi_i64(cpu_fir[rc], 0);                             \
508
    gen_set_label(l1);                                                \
509
}
510
FCMOV(cmpfeq)
511
FCMOV(cmpfne)
512
FCMOV(cmpflt)
513
FCMOV(cmpfge)
514
FCMOV(cmpfle)
515
FCMOV(cmpfgt)
516

    
517
/* Implement zapnot with an immediate operand, which expands to some
518
   form of immediate AND.  This is a basic building block in the
519
   definition of many of the other byte manipulation instructions.  */
520
static inline void gen_zapnoti(int ra, int rc, uint8_t lit)
521
{
522
    uint64_t mask;
523
    int i;
524

    
525
    switch (lit) {
526
    case 0x00:
527
        tcg_gen_movi_i64(cpu_ir[rc], 0);
528
        break;
529
    case 0x01:
530
        tcg_gen_ext8u_i64(cpu_ir[rc], cpu_ir[ra]);
531
        break;
532
    case 0x03:
533
        tcg_gen_ext16u_i64(cpu_ir[rc], cpu_ir[ra]);
534
        break;
535
    case 0x0f:
536
        tcg_gen_ext32u_i64(cpu_ir[rc], cpu_ir[ra]);
537
        break;
538
    case 0xff:
539
        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
540
        break;
541
    default:
542
        for (mask = i = 0; i < 8; ++i) {
543
            if ((lit >> i) & 1)
544
                mask |= 0xffull << (i * 8);
545
        }
546
        tcg_gen_andi_i64 (cpu_ir[rc], cpu_ir[ra], mask);
547
        break;
548
    }
549
}
550

    
551
static inline void gen_zapnot(int ra, int rb, int rc, int islit, uint8_t lit)
552
{
553
    if (unlikely(rc == 31))
554
        return;
555
    else if (unlikely(ra == 31))
556
        tcg_gen_movi_i64(cpu_ir[rc], 0);
557
    else if (islit)
558
        gen_zapnoti(ra, rc, lit);
559
    else
560
        gen_helper_zapnot (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
561
}
562

    
563
static inline void gen_zap(int ra, int rb, int rc, int islit, uint8_t lit)
564
{
565
    if (unlikely(rc == 31))
566
        return;
567
    else if (unlikely(ra == 31))
568
        tcg_gen_movi_i64(cpu_ir[rc], 0);
569
    else if (islit)
570
        gen_zapnoti(ra, rc, ~lit);
571
    else
572
        gen_helper_zap (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
573
}
574

    
575

    
576
/* EXTWH, EXTWH, EXTLH, EXTQH */
577
static inline void gen_ext_h(int ra, int rb, int rc, int islit,
578
                             uint8_t lit, uint8_t byte_mask)
579
{
580
    if (unlikely(rc == 31))
581
        return;
582
    else if (unlikely(ra == 31))
583
        tcg_gen_movi_i64(cpu_ir[rc], 0);
584
    else {
585
        if (islit) {
586
            lit = (64 - (lit & 7) * 8) & 0x3f;
587
            tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit);
588
        } else {
589
            TCGv tmp1 = tcg_temp_new();
590
            tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
591
            tcg_gen_shli_i64(tmp1, tmp1, 3);
592
            tcg_gen_neg_i64(tmp1, tmp1);
593
            tcg_gen_andi_i64(tmp1, tmp1, 0x3f);
594
            tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
595
            tcg_temp_free(tmp1);
596
        }
597
        gen_zapnoti(rc, rc, byte_mask);
598
    }
599
}
600

    
601
/* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
602
static inline void gen_ext_l(int ra, int rb, int rc, int islit,
603
                             uint8_t lit, uint8_t byte_mask)
604
{
605
    if (unlikely(rc == 31))
606
        return;
607
    else if (unlikely(ra == 31))
608
        tcg_gen_movi_i64(cpu_ir[rc], 0);
609
    else {
610
        if (islit) {
611
            tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
612
        } else {
613
            TCGv tmp = tcg_temp_new();
614
            tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
615
            tcg_gen_shli_i64(tmp, tmp, 3);
616
            tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
617
            tcg_temp_free(tmp);
618
        }
619
        gen_zapnoti(rc, rc, byte_mask);
620
    }
621
}
622

    
623
/* Code to call arith3 helpers */
624
#define ARITH3(name)                                                  \
625
static inline void glue(gen_, name)(int ra, int rb, int rc, int islit,\
626
                                    uint8_t lit)                      \
627
{                                                                     \
628
    if (unlikely(rc == 31))                                           \
629
        return;                                                       \
630
                                                                      \
631
    if (ra != 31) {                                                   \
632
        if (islit) {                                                  \
633
            TCGv tmp = tcg_const_i64(lit);                            \
634
            gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp);         \
635
            tcg_temp_free(tmp);                                       \
636
        } else                                                        \
637
            gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
638
    } else {                                                          \
639
        TCGv tmp1 = tcg_const_i64(0);                                 \
640
        if (islit) {                                                  \
641
            TCGv tmp2 = tcg_const_i64(lit);                           \
642
            gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2);             \
643
            tcg_temp_free(tmp2);                                      \
644
        } else                                                        \
645
            gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]);       \
646
        tcg_temp_free(tmp1);                                          \
647
    }                                                                 \
648
}
649
ARITH3(cmpbge)
650
ARITH3(addlv)
651
ARITH3(sublv)
652
ARITH3(addqv)
653
ARITH3(subqv)
654
ARITH3(mskbl)
655
ARITH3(insbl)
656
ARITH3(mskwl)
657
ARITH3(inswl)
658
ARITH3(mskll)
659
ARITH3(insll)
660
ARITH3(mskql)
661
ARITH3(insql)
662
ARITH3(mskwh)
663
ARITH3(inswh)
664
ARITH3(msklh)
665
ARITH3(inslh)
666
ARITH3(mskqh)
667
ARITH3(insqh)
668
ARITH3(umulh)
669
ARITH3(mullv)
670
ARITH3(mulqv)
671
ARITH3(minub8)
672
ARITH3(minsb8)
673
ARITH3(minuw4)
674
ARITH3(minsw4)
675
ARITH3(maxub8)
676
ARITH3(maxsb8)
677
ARITH3(maxuw4)
678
ARITH3(maxsw4)
679
ARITH3(perr)
680

    
681
#define MVIOP2(name)                                    \
682
static inline void glue(gen_, name)(int rb, int rc)     \
683
{                                                       \
684
    if (unlikely(rc == 31))                             \
685
        return;                                         \
686
    if (unlikely(rb == 31))                             \
687
        tcg_gen_movi_i64(cpu_ir[rc], 0);                \
688
    else                                                \
689
        gen_helper_ ## name (cpu_ir[rc], cpu_ir[rb]);   \
690
}
691
MVIOP2(pklb)
692
MVIOP2(pkwb)
693
MVIOP2(unpkbl)
694
MVIOP2(unpkbw)
695

    
696
static inline void gen_cmp(TCGCond cond, int ra, int rb, int rc, int islit,
697
                           uint8_t lit)
698
{
699
    int l1, l2;
700
    TCGv tmp;
701

    
702
    if (unlikely(rc == 31))
703
        return;
704

    
705
    l1 = gen_new_label();
706
    l2 = gen_new_label();
707

    
708
    if (ra != 31) {
709
        tmp = tcg_temp_new();
710
        tcg_gen_mov_i64(tmp, cpu_ir[ra]);
711
    } else
712
        tmp = tcg_const_i64(0);
713
    if (islit)
714
        tcg_gen_brcondi_i64(cond, tmp, lit, l1);
715
    else
716
        tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
717

    
718
    tcg_gen_movi_i64(cpu_ir[rc], 0);
719
    tcg_gen_br(l2);
720
    gen_set_label(l1);
721
    tcg_gen_movi_i64(cpu_ir[rc], 1);
722
    gen_set_label(l2);
723
}
724

    
725
static inline int translate_one(DisasContext *ctx, uint32_t insn)
726
{
727
    uint32_t palcode;
728
    int32_t disp21, disp16, disp12;
729
    uint16_t fn11, fn16;
730
    uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit, real_islit;
731
    uint8_t lit;
732
    int ret;
733

    
734
    /* Decode all instruction fields */
735
    opc = insn >> 26;
736
    ra = (insn >> 21) & 0x1F;
737
    rb = (insn >> 16) & 0x1F;
738
    rc = insn & 0x1F;
739
    sbz = (insn >> 13) & 0x07;
740
    real_islit = islit = (insn >> 12) & 1;
741
    if (rb == 31 && !islit) {
742
        islit = 1;
743
        lit = 0;
744
    } else
745
        lit = (insn >> 13) & 0xFF;
746
    palcode = insn & 0x03FFFFFF;
747
    disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
748
    disp16 = (int16_t)(insn & 0x0000FFFF);
749
    disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
750
    fn16 = insn & 0x0000FFFF;
751
    fn11 = (insn >> 5) & 0x000007FF;
752
    fpfn = fn11 & 0x3F;
753
    fn7 = (insn >> 5) & 0x0000007F;
754
    fn2 = (insn >> 5) & 0x00000003;
755
    ret = 0;
756
    LOG_DISAS("opc %02x ra %2d rb %2d rc %2d disp16 %6d\n",
757
              opc, ra, rb, rc, disp16);
758

    
759
    switch (opc) {
760
    case 0x00:
761
        /* CALL_PAL */
762
#ifdef CONFIG_USER_ONLY
763
        if (palcode == 0x9E) {
764
            /* RDUNIQUE */
765
            tcg_gen_mov_i64(cpu_ir[IR_V0], cpu_uniq);
766
            break;
767
        } else if (palcode == 0x9F) {
768
            /* WRUNIQUE */
769
            tcg_gen_mov_i64(cpu_uniq, cpu_ir[IR_A0]);
770
            break;
771
        }
772
#endif
773
        if (palcode >= 0x80 && palcode < 0xC0) {
774
            /* Unprivileged PAL call */
775
            gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x3F) << 6), 0);
776
            ret = 3;
777
            break;
778
        }
779
#ifndef CONFIG_USER_ONLY
780
        if (palcode < 0x40) {
781
            /* Privileged PAL code */
782
            if (ctx->mem_idx & 1)
783
                goto invalid_opc;
784
            gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x3F) << 6), 0);
785
            ret = 3;
786
        }
787
#endif
788
        /* Invalid PAL call */
789
        goto invalid_opc;
790
    case 0x01:
791
        /* OPC01 */
792
        goto invalid_opc;
793
    case 0x02:
794
        /* OPC02 */
795
        goto invalid_opc;
796
    case 0x03:
797
        /* OPC03 */
798
        goto invalid_opc;
799
    case 0x04:
800
        /* OPC04 */
801
        goto invalid_opc;
802
    case 0x05:
803
        /* OPC05 */
804
        goto invalid_opc;
805
    case 0x06:
806
        /* OPC06 */
807
        goto invalid_opc;
808
    case 0x07:
809
        /* OPC07 */
810
        goto invalid_opc;
811
    case 0x08:
812
        /* LDA */
813
        if (likely(ra != 31)) {
814
            if (rb != 31)
815
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
816
            else
817
                tcg_gen_movi_i64(cpu_ir[ra], disp16);
818
        }
819
        break;
820
    case 0x09:
821
        /* LDAH */
822
        if (likely(ra != 31)) {
823
            if (rb != 31)
824
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
825
            else
826
                tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
827
        }
828
        break;
829
    case 0x0A:
830
        /* LDBU */
831
        if (!(ctx->amask & AMASK_BWX))
832
            goto invalid_opc;
833
        gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
834
        break;
835
    case 0x0B:
836
        /* LDQ_U */
837
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
838
        break;
839
    case 0x0C:
840
        /* LDWU */
841
        if (!(ctx->amask & AMASK_BWX))
842
            goto invalid_opc;
843
        gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
844
        break;
845
    case 0x0D:
846
        /* STW */
847
        gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
848
        break;
849
    case 0x0E:
850
        /* STB */
851
        gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
852
        break;
853
    case 0x0F:
854
        /* STQ_U */
855
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
856
        break;
857
    case 0x10:
858
        switch (fn7) {
859
        case 0x00:
860
            /* ADDL */
861
            if (likely(rc != 31)) {
862
                if (ra != 31) {
863
                    if (islit) {
864
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
865
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
866
                    } else {
867
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
868
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
869
                    }
870
                } else {
871
                    if (islit)
872
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
873
                    else
874
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
875
                }
876
            }
877
            break;
878
        case 0x02:
879
            /* S4ADDL */
880
            if (likely(rc != 31)) {
881
                if (ra != 31) {
882
                    TCGv tmp = tcg_temp_new();
883
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
884
                    if (islit)
885
                        tcg_gen_addi_i64(tmp, tmp, lit);
886
                    else
887
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
888
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
889
                    tcg_temp_free(tmp);
890
                } else {
891
                    if (islit)
892
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
893
                    else
894
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
895
                }
896
            }
897
            break;
898
        case 0x09:
899
            /* SUBL */
900
            if (likely(rc != 31)) {
901
                if (ra != 31) {
902
                    if (islit)
903
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
904
                    else
905
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
906
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
907
                } else {
908
                    if (islit)
909
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
910
                    else {
911
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
912
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
913
                }
914
            }
915
            break;
916
        case 0x0B:
917
            /* S4SUBL */
918
            if (likely(rc != 31)) {
919
                if (ra != 31) {
920
                    TCGv tmp = tcg_temp_new();
921
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
922
                    if (islit)
923
                        tcg_gen_subi_i64(tmp, tmp, lit);
924
                    else
925
                        tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
926
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
927
                    tcg_temp_free(tmp);
928
                } else {
929
                    if (islit)
930
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
931
                    else {
932
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
933
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
934
                    }
935
                }
936
            }
937
            break;
938
        case 0x0F:
939
            /* CMPBGE */
940
            gen_cmpbge(ra, rb, rc, islit, lit);
941
            break;
942
        case 0x12:
943
            /* S8ADDL */
944
            if (likely(rc != 31)) {
945
                if (ra != 31) {
946
                    TCGv tmp = tcg_temp_new();
947
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
948
                    if (islit)
949
                        tcg_gen_addi_i64(tmp, tmp, lit);
950
                    else
951
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
952
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
953
                    tcg_temp_free(tmp);
954
                } else {
955
                    if (islit)
956
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
957
                    else
958
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
959
                }
960
            }
961
            break;
962
        case 0x1B:
963
            /* S8SUBL */
964
            if (likely(rc != 31)) {
965
                if (ra != 31) {
966
                    TCGv tmp = tcg_temp_new();
967
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
968
                    if (islit)
969
                        tcg_gen_subi_i64(tmp, tmp, lit);
970
                    else
971
                       tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
972
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
973
                    tcg_temp_free(tmp);
974
                } else {
975
                    if (islit)
976
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
977
                    else
978
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
979
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
980
                    }
981
                }
982
            }
983
            break;
984
        case 0x1D:
985
            /* CMPULT */
986
            gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
987
            break;
988
        case 0x20:
989
            /* ADDQ */
990
            if (likely(rc != 31)) {
991
                if (ra != 31) {
992
                    if (islit)
993
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
994
                    else
995
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
996
                } else {
997
                    if (islit)
998
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
999
                    else
1000
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1001
                }
1002
            }
1003
            break;
1004
        case 0x22:
1005
            /* S4ADDQ */
1006
            if (likely(rc != 31)) {
1007
                if (ra != 31) {
1008
                    TCGv tmp = tcg_temp_new();
1009
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1010
                    if (islit)
1011
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
1012
                    else
1013
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1014
                    tcg_temp_free(tmp);
1015
                } else {
1016
                    if (islit)
1017
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1018
                    else
1019
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1020
                }
1021
            }
1022
            break;
1023
        case 0x29:
1024
            /* SUBQ */
1025
            if (likely(rc != 31)) {
1026
                if (ra != 31) {
1027
                    if (islit)
1028
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1029
                    else
1030
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1031
                } else {
1032
                    if (islit)
1033
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1034
                    else
1035
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1036
                }
1037
            }
1038
            break;
1039
        case 0x2B:
1040
            /* S4SUBQ */
1041
            if (likely(rc != 31)) {
1042
                if (ra != 31) {
1043
                    TCGv tmp = tcg_temp_new();
1044
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1045
                    if (islit)
1046
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1047
                    else
1048
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1049
                    tcg_temp_free(tmp);
1050
                } else {
1051
                    if (islit)
1052
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1053
                    else
1054
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1055
                }
1056
            }
1057
            break;
1058
        case 0x2D:
1059
            /* CMPEQ */
1060
            gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
1061
            break;
1062
        case 0x32:
1063
            /* S8ADDQ */
1064
            if (likely(rc != 31)) {
1065
                if (ra != 31) {
1066
                    TCGv tmp = tcg_temp_new();
1067
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1068
                    if (islit)
1069
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
1070
                    else
1071
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1072
                    tcg_temp_free(tmp);
1073
                } else {
1074
                    if (islit)
1075
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1076
                    else
1077
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1078
                }
1079
            }
1080
            break;
1081
        case 0x3B:
1082
            /* S8SUBQ */
1083
            if (likely(rc != 31)) {
1084
                if (ra != 31) {
1085
                    TCGv tmp = tcg_temp_new();
1086
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1087
                    if (islit)
1088
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1089
                    else
1090
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1091
                    tcg_temp_free(tmp);
1092
                } else {
1093
                    if (islit)
1094
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1095
                    else
1096
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1097
                }
1098
            }
1099
            break;
1100
        case 0x3D:
1101
            /* CMPULE */
1102
            gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1103
            break;
1104
        case 0x40:
1105
            /* ADDL/V */
1106
            gen_addlv(ra, rb, rc, islit, lit);
1107
            break;
1108
        case 0x49:
1109
            /* SUBL/V */
1110
            gen_sublv(ra, rb, rc, islit, lit);
1111
            break;
1112
        case 0x4D:
1113
            /* CMPLT */
1114
            gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1115
            break;
1116
        case 0x60:
1117
            /* ADDQ/V */
1118
            gen_addqv(ra, rb, rc, islit, lit);
1119
            break;
1120
        case 0x69:
1121
            /* SUBQ/V */
1122
            gen_subqv(ra, rb, rc, islit, lit);
1123
            break;
1124
        case 0x6D:
1125
            /* CMPLE */
1126
            gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1127
            break;
1128
        default:
1129
            goto invalid_opc;
1130
        }
1131
        break;
1132
    case 0x11:
1133
        switch (fn7) {
1134
        case 0x00:
1135
            /* AND */
1136
            if (likely(rc != 31)) {
1137
                if (ra == 31)
1138
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1139
                else if (islit)
1140
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1141
                else
1142
                    tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1143
            }
1144
            break;
1145
        case 0x08:
1146
            /* BIC */
1147
            if (likely(rc != 31)) {
1148
                if (ra != 31) {
1149
                    if (islit)
1150
                        tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1151
                    else
1152
                        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1153
                } else
1154
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1155
            }
1156
            break;
1157
        case 0x14:
1158
            /* CMOVLBS */
1159
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1160
            break;
1161
        case 0x16:
1162
            /* CMOVLBC */
1163
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1164
            break;
1165
        case 0x20:
1166
            /* BIS */
1167
            if (likely(rc != 31)) {
1168
                if (ra != 31) {
1169
                    if (islit)
1170
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1171
                    else
1172
                        tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1173
                } else {
1174
                    if (islit)
1175
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1176
                    else
1177
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1178
                }
1179
            }
1180
            break;
1181
        case 0x24:
1182
            /* CMOVEQ */
1183
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1184
            break;
1185
        case 0x26:
1186
            /* CMOVNE */
1187
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1188
            break;
1189
        case 0x28:
1190
            /* ORNOT */
1191
            if (likely(rc != 31)) {
1192
                if (ra != 31) {
1193
                    if (islit)
1194
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1195
                    else
1196
                        tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1197
                } else {
1198
                    if (islit)
1199
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1200
                    else
1201
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1202
                }
1203
            }
1204
            break;
1205
        case 0x40:
1206
            /* XOR */
1207
            if (likely(rc != 31)) {
1208
                if (ra != 31) {
1209
                    if (islit)
1210
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1211
                    else
1212
                        tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1213
                } else {
1214
                    if (islit)
1215
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1216
                    else
1217
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1218
                }
1219
            }
1220
            break;
1221
        case 0x44:
1222
            /* CMOVLT */
1223
            gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1224
            break;
1225
        case 0x46:
1226
            /* CMOVGE */
1227
            gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1228
            break;
1229
        case 0x48:
1230
            /* EQV */
1231
            if (likely(rc != 31)) {
1232
                if (ra != 31) {
1233
                    if (islit)
1234
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1235
                    else
1236
                        tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1237
                } else {
1238
                    if (islit)
1239
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1240
                    else
1241
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1242
                }
1243
            }
1244
            break;
1245
        case 0x61:
1246
            /* AMASK */
1247
            if (likely(rc != 31)) {
1248
                if (islit)
1249
                    tcg_gen_movi_i64(cpu_ir[rc], lit);
1250
                else
1251
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1252
                switch (ctx->env->implver) {
1253
                case IMPLVER_2106x:
1254
                    /* EV4, EV45, LCA, LCA45 & EV5 */
1255
                    break;
1256
                case IMPLVER_21164:
1257
                case IMPLVER_21264:
1258
                case IMPLVER_21364:
1259
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[rc],
1260
                                     ~(uint64_t)ctx->amask);
1261
                    break;
1262
                }
1263
            }
1264
            break;
1265
        case 0x64:
1266
            /* CMOVLE */
1267
            gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1268
            break;
1269
        case 0x66:
1270
            /* CMOVGT */
1271
            gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1272
            break;
1273
        case 0x6C:
1274
            /* IMPLVER */
1275
            if (rc != 31)
1276
                tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
1277
            break;
1278
        default:
1279
            goto invalid_opc;
1280
        }
1281
        break;
1282
    case 0x12:
1283
        switch (fn7) {
1284
        case 0x02:
1285
            /* MSKBL */
1286
            gen_mskbl(ra, rb, rc, islit, lit);
1287
            break;
1288
        case 0x06:
1289
            /* EXTBL */
1290
            gen_ext_l(ra, rb, rc, islit, lit, 0x01);
1291
            break;
1292
        case 0x0B:
1293
            /* INSBL */
1294
            gen_insbl(ra, rb, rc, islit, lit);
1295
            break;
1296
        case 0x12:
1297
            /* MSKWL */
1298
            gen_mskwl(ra, rb, rc, islit, lit);
1299
            break;
1300
        case 0x16:
1301
            /* EXTWL */
1302
            gen_ext_l(ra, rb, rc, islit, lit, 0x03);
1303
            break;
1304
        case 0x1B:
1305
            /* INSWL */
1306
            gen_inswl(ra, rb, rc, islit, lit);
1307
            break;
1308
        case 0x22:
1309
            /* MSKLL */
1310
            gen_mskll(ra, rb, rc, islit, lit);
1311
            break;
1312
        case 0x26:
1313
            /* EXTLL */
1314
            gen_ext_l(ra, rb, rc, islit, lit, 0x0f);
1315
            break;
1316
        case 0x2B:
1317
            /* INSLL */
1318
            gen_insll(ra, rb, rc, islit, lit);
1319
            break;
1320
        case 0x30:
1321
            /* ZAP */
1322
            gen_zap(ra, rb, rc, islit, lit);
1323
            break;
1324
        case 0x31:
1325
            /* ZAPNOT */
1326
            gen_zapnot(ra, rb, rc, islit, lit);
1327
            break;
1328
        case 0x32:
1329
            /* MSKQL */
1330
            gen_mskql(ra, rb, rc, islit, lit);
1331
            break;
1332
        case 0x34:
1333
            /* SRL */
1334
            if (likely(rc != 31)) {
1335
                if (ra != 31) {
1336
                    if (islit)
1337
                        tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1338
                    else {
1339
                        TCGv shift = tcg_temp_new();
1340
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1341
                        tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1342
                        tcg_temp_free(shift);
1343
                    }
1344
                } else
1345
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1346
            }
1347
            break;
1348
        case 0x36:
1349
            /* EXTQL */
1350
            gen_ext_l(ra, rb, rc, islit, lit, 0xff);
1351
            break;
1352
        case 0x39:
1353
            /* SLL */
1354
            if (likely(rc != 31)) {
1355
                if (ra != 31) {
1356
                    if (islit)
1357
                        tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1358
                    else {
1359
                        TCGv shift = tcg_temp_new();
1360
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1361
                        tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1362
                        tcg_temp_free(shift);
1363
                    }
1364
                } else
1365
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1366
            }
1367
            break;
1368
        case 0x3B:
1369
            /* INSQL */
1370
            gen_insql(ra, rb, rc, islit, lit);
1371
            break;
1372
        case 0x3C:
1373
            /* SRA */
1374
            if (likely(rc != 31)) {
1375
                if (ra != 31) {
1376
                    if (islit)
1377
                        tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1378
                    else {
1379
                        TCGv shift = tcg_temp_new();
1380
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1381
                        tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1382
                        tcg_temp_free(shift);
1383
                    }
1384
                } else
1385
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1386
            }
1387
            break;
1388
        case 0x52:
1389
            /* MSKWH */
1390
            gen_mskwh(ra, rb, rc, islit, lit);
1391
            break;
1392
        case 0x57:
1393
            /* INSWH */
1394
            gen_inswh(ra, rb, rc, islit, lit);
1395
            break;
1396
        case 0x5A:
1397
            /* EXTWH */
1398
            gen_ext_h(ra, rb, rc, islit, lit, 0x03);
1399
            break;
1400
        case 0x62:
1401
            /* MSKLH */
1402
            gen_msklh(ra, rb, rc, islit, lit);
1403
            break;
1404
        case 0x67:
1405
            /* INSLH */
1406
            gen_inslh(ra, rb, rc, islit, lit);
1407
            break;
1408
        case 0x6A:
1409
            /* EXTLH */
1410
            gen_ext_h(ra, rb, rc, islit, lit, 0x0f);
1411
            break;
1412
        case 0x72:
1413
            /* MSKQH */
1414
            gen_mskqh(ra, rb, rc, islit, lit);
1415
            break;
1416
        case 0x77:
1417
            /* INSQH */
1418
            gen_insqh(ra, rb, rc, islit, lit);
1419
            break;
1420
        case 0x7A:
1421
            /* EXTQH */
1422
            gen_ext_h(ra, rb, rc, islit, lit, 0xff);
1423
            break;
1424
        default:
1425
            goto invalid_opc;
1426
        }
1427
        break;
1428
    case 0x13:
1429
        switch (fn7) {
1430
        case 0x00:
1431
            /* MULL */
1432
            if (likely(rc != 31)) {
1433
                if (ra == 31)
1434
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1435
                else {
1436
                    if (islit)
1437
                        tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1438
                    else
1439
                        tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1440
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1441
                }
1442
            }
1443
            break;
1444
        case 0x20:
1445
            /* MULQ */
1446
            if (likely(rc != 31)) {
1447
                if (ra == 31)
1448
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1449
                else if (islit)
1450
                    tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1451
                else
1452
                    tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1453
            }
1454
            break;
1455
        case 0x30:
1456
            /* UMULH */
1457
            gen_umulh(ra, rb, rc, islit, lit);
1458
            break;
1459
        case 0x40:
1460
            /* MULL/V */
1461
            gen_mullv(ra, rb, rc, islit, lit);
1462
            break;
1463
        case 0x60:
1464
            /* MULQ/V */
1465
            gen_mulqv(ra, rb, rc, islit, lit);
1466
            break;
1467
        default:
1468
            goto invalid_opc;
1469
        }
1470
        break;
1471
    case 0x14:
1472
        switch (fpfn) { /* f11 & 0x3F */
1473
        case 0x04:
1474
            /* ITOFS */
1475
            if (!(ctx->amask & AMASK_FIX))
1476
                goto invalid_opc;
1477
            if (likely(rc != 31)) {
1478
                if (ra != 31) {
1479
                    TCGv_i32 tmp = tcg_temp_new_i32();
1480
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1481
                    gen_helper_memory_to_s(cpu_fir[rc], tmp);
1482
                    tcg_temp_free_i32(tmp);
1483
                } else
1484
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1485
            }
1486
            break;
1487
        case 0x0A:
1488
            /* SQRTF */
1489
            if (!(ctx->amask & AMASK_FIX))
1490
                goto invalid_opc;
1491
            gen_fsqrtf(rb, rc);
1492
            break;
1493
        case 0x0B:
1494
            /* SQRTS */
1495
            if (!(ctx->amask & AMASK_FIX))
1496
                goto invalid_opc;
1497
            gen_fsqrts(rb, rc);
1498
            break;
1499
        case 0x14:
1500
            /* ITOFF */
1501
            if (!(ctx->amask & AMASK_FIX))
1502
                goto invalid_opc;
1503
            if (likely(rc != 31)) {
1504
                if (ra != 31) {
1505
                    TCGv_i32 tmp = tcg_temp_new_i32();
1506
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1507
                    gen_helper_memory_to_f(cpu_fir[rc], tmp);
1508
                    tcg_temp_free_i32(tmp);
1509
                } else
1510
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1511
            }
1512
            break;
1513
        case 0x24:
1514
            /* ITOFT */
1515
            if (!(ctx->amask & AMASK_FIX))
1516
                goto invalid_opc;
1517
            if (likely(rc != 31)) {
1518
                if (ra != 31)
1519
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1520
                else
1521
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1522
            }
1523
            break;
1524
        case 0x2A:
1525
            /* SQRTG */
1526
            if (!(ctx->amask & AMASK_FIX))
1527
                goto invalid_opc;
1528
            gen_fsqrtg(rb, rc);
1529
            break;
1530
        case 0x02B:
1531
            /* SQRTT */
1532
            if (!(ctx->amask & AMASK_FIX))
1533
                goto invalid_opc;
1534
            gen_fsqrtt(rb, rc);
1535
            break;
1536
        default:
1537
            goto invalid_opc;
1538
        }
1539
        break;
1540
    case 0x15:
1541
        /* VAX floating point */
1542
        /* XXX: rounding mode and trap are ignored (!) */
1543
        switch (fpfn) { /* f11 & 0x3F */
1544
        case 0x00:
1545
            /* ADDF */
1546
            gen_faddf(ra, rb, rc);
1547
            break;
1548
        case 0x01:
1549
            /* SUBF */
1550
            gen_fsubf(ra, rb, rc);
1551
            break;
1552
        case 0x02:
1553
            /* MULF */
1554
            gen_fmulf(ra, rb, rc);
1555
            break;
1556
        case 0x03:
1557
            /* DIVF */
1558
            gen_fdivf(ra, rb, rc);
1559
            break;
1560
        case 0x1E:
1561
            /* CVTDG */
1562
#if 0 // TODO
1563
            gen_fcvtdg(rb, rc);
1564
#else
1565
            goto invalid_opc;
1566
#endif
1567
            break;
1568
        case 0x20:
1569
            /* ADDG */
1570
            gen_faddg(ra, rb, rc);
1571
            break;
1572
        case 0x21:
1573
            /* SUBG */
1574
            gen_fsubg(ra, rb, rc);
1575
            break;
1576
        case 0x22:
1577
            /* MULG */
1578
            gen_fmulg(ra, rb, rc);
1579
            break;
1580
        case 0x23:
1581
            /* DIVG */
1582
            gen_fdivg(ra, rb, rc);
1583
            break;
1584
        case 0x25:
1585
            /* CMPGEQ */
1586
            gen_fcmpgeq(ra, rb, rc);
1587
            break;
1588
        case 0x26:
1589
            /* CMPGLT */
1590
            gen_fcmpglt(ra, rb, rc);
1591
            break;
1592
        case 0x27:
1593
            /* CMPGLE */
1594
            gen_fcmpgle(ra, rb, rc);
1595
            break;
1596
        case 0x2C:
1597
            /* CVTGF */
1598
            gen_fcvtgf(rb, rc);
1599
            break;
1600
        case 0x2D:
1601
            /* CVTGD */
1602
#if 0 // TODO
1603
            gen_fcvtgd(rb, rc);
1604
#else
1605
            goto invalid_opc;
1606
#endif
1607
            break;
1608
        case 0x2F:
1609
            /* CVTGQ */
1610
            gen_fcvtgq(rb, rc);
1611
            break;
1612
        case 0x3C:
1613
            /* CVTQF */
1614
            gen_fcvtqf(rb, rc);
1615
            break;
1616
        case 0x3E:
1617
            /* CVTQG */
1618
            gen_fcvtqg(rb, rc);
1619
            break;
1620
        default:
1621
            goto invalid_opc;
1622
        }
1623
        break;
1624
    case 0x16:
1625
        /* IEEE floating-point */
1626
        /* XXX: rounding mode and traps are ignored (!) */
1627
        switch (fpfn) { /* f11 & 0x3F */
1628
        case 0x00:
1629
            /* ADDS */
1630
            gen_fadds(ra, rb, rc);
1631
            break;
1632
        case 0x01:
1633
            /* SUBS */
1634
            gen_fsubs(ra, rb, rc);
1635
            break;
1636
        case 0x02:
1637
            /* MULS */
1638
            gen_fmuls(ra, rb, rc);
1639
            break;
1640
        case 0x03:
1641
            /* DIVS */
1642
            gen_fdivs(ra, rb, rc);
1643
            break;
1644
        case 0x20:
1645
            /* ADDT */
1646
            gen_faddt(ra, rb, rc);
1647
            break;
1648
        case 0x21:
1649
            /* SUBT */
1650
            gen_fsubt(ra, rb, rc);
1651
            break;
1652
        case 0x22:
1653
            /* MULT */
1654
            gen_fmult(ra, rb, rc);
1655
            break;
1656
        case 0x23:
1657
            /* DIVT */
1658
            gen_fdivt(ra, rb, rc);
1659
            break;
1660
        case 0x24:
1661
            /* CMPTUN */
1662
            gen_fcmptun(ra, rb, rc);
1663
            break;
1664
        case 0x25:
1665
            /* CMPTEQ */
1666
            gen_fcmpteq(ra, rb, rc);
1667
            break;
1668
        case 0x26:
1669
            /* CMPTLT */
1670
            gen_fcmptlt(ra, rb, rc);
1671
            break;
1672
        case 0x27:
1673
            /* CMPTLE */
1674
            gen_fcmptle(ra, rb, rc);
1675
            break;
1676
        case 0x2C:
1677
            /* XXX: incorrect */
1678
            if (fn11 == 0x2AC || fn11 == 0x6AC) {
1679
                /* CVTST */
1680
                gen_fcvtst(rb, rc);
1681
            } else {
1682
                /* CVTTS */
1683
                gen_fcvtts(rb, rc);
1684
            }
1685
            break;
1686
        case 0x2F:
1687
            /* CVTTQ */
1688
            gen_fcvttq(rb, rc);
1689
            break;
1690
        case 0x3C:
1691
            /* CVTQS */
1692
            gen_fcvtqs(rb, rc);
1693
            break;
1694
        case 0x3E:
1695
            /* CVTQT */
1696
            gen_fcvtqt(rb, rc);
1697
            break;
1698
        default:
1699
            goto invalid_opc;
1700
        }
1701
        break;
1702
    case 0x17:
1703
        switch (fn11) {
1704
        case 0x010:
1705
            /* CVTLQ */
1706
            gen_fcvtlq(rb, rc);
1707
            break;
1708
        case 0x020:
1709
            if (likely(rc != 31)) {
1710
                if (ra == rb)
1711
                    /* FMOV */
1712
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1713
                else
1714
                    /* CPYS */
1715
                    gen_fcpys(ra, rb, rc);
1716
            }
1717
            break;
1718
        case 0x021:
1719
            /* CPYSN */
1720
            gen_fcpysn(ra, rb, rc);
1721
            break;
1722
        case 0x022:
1723
            /* CPYSE */
1724
            gen_fcpyse(ra, rb, rc);
1725
            break;
1726
        case 0x024:
1727
            /* MT_FPCR */
1728
            if (likely(ra != 31))
1729
                gen_helper_store_fpcr(cpu_fir[ra]);
1730
            else {
1731
                TCGv tmp = tcg_const_i64(0);
1732
                gen_helper_store_fpcr(tmp);
1733
                tcg_temp_free(tmp);
1734
            }
1735
            break;
1736
        case 0x025:
1737
            /* MF_FPCR */
1738
            if (likely(ra != 31))
1739
                gen_helper_load_fpcr(cpu_fir[ra]);
1740
            break;
1741
        case 0x02A:
1742
            /* FCMOVEQ */
1743
            gen_fcmpfeq(ra, rb, rc);
1744
            break;
1745
        case 0x02B:
1746
            /* FCMOVNE */
1747
            gen_fcmpfne(ra, rb, rc);
1748
            break;
1749
        case 0x02C:
1750
            /* FCMOVLT */
1751
            gen_fcmpflt(ra, rb, rc);
1752
            break;
1753
        case 0x02D:
1754
            /* FCMOVGE */
1755
            gen_fcmpfge(ra, rb, rc);
1756
            break;
1757
        case 0x02E:
1758
            /* FCMOVLE */
1759
            gen_fcmpfle(ra, rb, rc);
1760
            break;
1761
        case 0x02F:
1762
            /* FCMOVGT */
1763
            gen_fcmpfgt(ra, rb, rc);
1764
            break;
1765
        case 0x030:
1766
            /* CVTQL */
1767
            gen_fcvtql(rb, rc);
1768
            break;
1769
        case 0x130:
1770
            /* CVTQL/V */
1771
            gen_fcvtqlv(rb, rc);
1772
            break;
1773
        case 0x530:
1774
            /* CVTQL/SV */
1775
            gen_fcvtqlsv(rb, rc);
1776
            break;
1777
        default:
1778
            goto invalid_opc;
1779
        }
1780
        break;
1781
    case 0x18:
1782
        switch ((uint16_t)disp16) {
1783
        case 0x0000:
1784
            /* TRAPB */
1785
            /* No-op. Just exit from the current tb */
1786
            ret = 2;
1787
            break;
1788
        case 0x0400:
1789
            /* EXCB */
1790
            /* No-op. Just exit from the current tb */
1791
            ret = 2;
1792
            break;
1793
        case 0x4000:
1794
            /* MB */
1795
            /* No-op */
1796
            break;
1797
        case 0x4400:
1798
            /* WMB */
1799
            /* No-op */
1800
            break;
1801
        case 0x8000:
1802
            /* FETCH */
1803
            /* No-op */
1804
            break;
1805
        case 0xA000:
1806
            /* FETCH_M */
1807
            /* No-op */
1808
            break;
1809
        case 0xC000:
1810
            /* RPCC */
1811
            if (ra != 31)
1812
                gen_helper_load_pcc(cpu_ir[ra]);
1813
            break;
1814
        case 0xE000:
1815
            /* RC */
1816
            if (ra != 31)
1817
                gen_helper_rc(cpu_ir[ra]);
1818
            break;
1819
        case 0xE800:
1820
            /* ECB */
1821
            break;
1822
        case 0xF000:
1823
            /* RS */
1824
            if (ra != 31)
1825
                gen_helper_rs(cpu_ir[ra]);
1826
            break;
1827
        case 0xF800:
1828
            /* WH64 */
1829
            /* No-op */
1830
            break;
1831
        default:
1832
            goto invalid_opc;
1833
        }
1834
        break;
1835
    case 0x19:
1836
        /* HW_MFPR (PALcode) */
1837
#if defined (CONFIG_USER_ONLY)
1838
        goto invalid_opc;
1839
#else
1840
        if (!ctx->pal_mode)
1841
            goto invalid_opc;
1842
        if (ra != 31) {
1843
            TCGv tmp = tcg_const_i32(insn & 0xFF);
1844
            gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1845
            tcg_temp_free(tmp);
1846
        }
1847
        break;
1848
#endif
1849
    case 0x1A:
1850
        if (rb != 31)
1851
            tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1852
        else
1853
            tcg_gen_movi_i64(cpu_pc, 0);
1854
        if (ra != 31)
1855
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1856
        /* Those four jumps only differ by the branch prediction hint */
1857
        switch (fn2) {
1858
        case 0x0:
1859
            /* JMP */
1860
            break;
1861
        case 0x1:
1862
            /* JSR */
1863
            break;
1864
        case 0x2:
1865
            /* RET */
1866
            break;
1867
        case 0x3:
1868
            /* JSR_COROUTINE */
1869
            break;
1870
        }
1871
        ret = 1;
1872
        break;
1873
    case 0x1B:
1874
        /* HW_LD (PALcode) */
1875
#if defined (CONFIG_USER_ONLY)
1876
        goto invalid_opc;
1877
#else
1878
        if (!ctx->pal_mode)
1879
            goto invalid_opc;
1880
        if (ra != 31) {
1881
            TCGv addr = tcg_temp_new();
1882
            if (rb != 31)
1883
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1884
            else
1885
                tcg_gen_movi_i64(addr, disp12);
1886
            switch ((insn >> 12) & 0xF) {
1887
            case 0x0:
1888
                /* Longword physical access (hw_ldl/p) */
1889
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1890
                break;
1891
            case 0x1:
1892
                /* Quadword physical access (hw_ldq/p) */
1893
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1894
                break;
1895
            case 0x2:
1896
                /* Longword physical access with lock (hw_ldl_l/p) */
1897
                gen_helper_ldl_l_raw(cpu_ir[ra], addr);
1898
                break;
1899
            case 0x3:
1900
                /* Quadword physical access with lock (hw_ldq_l/p) */
1901
                gen_helper_ldq_l_raw(cpu_ir[ra], addr);
1902
                break;
1903
            case 0x4:
1904
                /* Longword virtual PTE fetch (hw_ldl/v) */
1905
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1906
                break;
1907
            case 0x5:
1908
                /* Quadword virtual PTE fetch (hw_ldq/v) */
1909
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1910
                break;
1911
            case 0x6:
1912
                /* Incpu_ir[ra]id */
1913
                goto invalid_opc;
1914
            case 0x7:
1915
                /* Incpu_ir[ra]id */
1916
                goto invalid_opc;
1917
            case 0x8:
1918
                /* Longword virtual access (hw_ldl) */
1919
                gen_helper_st_virt_to_phys(addr, addr);
1920
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1921
                break;
1922
            case 0x9:
1923
                /* Quadword virtual access (hw_ldq) */
1924
                gen_helper_st_virt_to_phys(addr, addr);
1925
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1926
                break;
1927
            case 0xA:
1928
                /* Longword virtual access with protection check (hw_ldl/w) */
1929
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1930
                break;
1931
            case 0xB:
1932
                /* Quadword virtual access with protection check (hw_ldq/w) */
1933
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1934
                break;
1935
            case 0xC:
1936
                /* Longword virtual access with alt access mode (hw_ldl/a)*/
1937
                gen_helper_set_alt_mode();
1938
                gen_helper_st_virt_to_phys(addr, addr);
1939
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1940
                gen_helper_restore_mode();
1941
                break;
1942
            case 0xD:
1943
                /* Quadword virtual access with alt access mode (hw_ldq/a) */
1944
                gen_helper_set_alt_mode();
1945
                gen_helper_st_virt_to_phys(addr, addr);
1946
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1947
                gen_helper_restore_mode();
1948
                break;
1949
            case 0xE:
1950
                /* Longword virtual access with alternate access mode and
1951
                 * protection checks (hw_ldl/wa)
1952
                 */
1953
                gen_helper_set_alt_mode();
1954
                gen_helper_ldl_data(cpu_ir[ra], addr);
1955
                gen_helper_restore_mode();
1956
                break;
1957
            case 0xF:
1958
                /* Quadword virtual access with alternate access mode and
1959
                 * protection checks (hw_ldq/wa)
1960
                 */
1961
                gen_helper_set_alt_mode();
1962
                gen_helper_ldq_data(cpu_ir[ra], addr);
1963
                gen_helper_restore_mode();
1964
                break;
1965
            }
1966
            tcg_temp_free(addr);
1967
        }
1968
        break;
1969
#endif
1970
    case 0x1C:
1971
        switch (fn7) {
1972
        case 0x00:
1973
            /* SEXTB */
1974
            if (!(ctx->amask & AMASK_BWX))
1975
                goto invalid_opc;
1976
            if (likely(rc != 31)) {
1977
                if (islit)
1978
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1979
                else
1980
                    tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1981
            }
1982
            break;
1983
        case 0x01:
1984
            /* SEXTW */
1985
            if (!(ctx->amask & AMASK_BWX))
1986
                goto invalid_opc;
1987
            if (likely(rc != 31)) {
1988
                if (islit)
1989
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1990
                else
1991
                    tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1992
            }
1993
            break;
1994
        case 0x30:
1995
            /* CTPOP */
1996
            if (!(ctx->amask & AMASK_CIX))
1997
                goto invalid_opc;
1998
            if (likely(rc != 31)) {
1999
                if (islit)
2000
                    tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
2001
                else
2002
                    gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
2003
            }
2004
            break;
2005
        case 0x31:
2006
            /* PERR */
2007
            if (!(ctx->amask & AMASK_MVI))
2008
                goto invalid_opc;
2009
            gen_perr(ra, rb, rc, islit, lit);
2010
            break;
2011
        case 0x32:
2012
            /* CTLZ */
2013
            if (!(ctx->amask & AMASK_CIX))
2014
                goto invalid_opc;
2015
            if (likely(rc != 31)) {
2016
                if (islit)
2017
                    tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
2018
                else
2019
                    gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
2020
            }
2021
            break;
2022
        case 0x33:
2023
            /* CTTZ */
2024
            if (!(ctx->amask & AMASK_CIX))
2025
                goto invalid_opc;
2026
            if (likely(rc != 31)) {
2027
                if (islit)
2028
                    tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
2029
                else
2030
                    gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
2031
            }
2032
            break;
2033
        case 0x34:
2034
            /* UNPKBW */
2035
            if (!(ctx->amask & AMASK_MVI))
2036
                goto invalid_opc;
2037
            if (real_islit || ra != 31)
2038
                goto invalid_opc;
2039
            gen_unpkbw (rb, rc);
2040
            break;
2041
        case 0x35:
2042
            /* UNPKBL */
2043
            if (!(ctx->amask & AMASK_MVI))
2044
                goto invalid_opc;
2045
            if (real_islit || ra != 31)
2046
                goto invalid_opc;
2047
            gen_unpkbl (rb, rc);
2048
            break;
2049
        case 0x36:
2050
            /* PKWB */
2051
            if (!(ctx->amask & AMASK_MVI))
2052
                goto invalid_opc;
2053
            if (real_islit || ra != 31)
2054
                goto invalid_opc;
2055
            gen_pkwb (rb, rc);
2056
            break;
2057
        case 0x37:
2058
            /* PKLB */
2059
            if (!(ctx->amask & AMASK_MVI))
2060
                goto invalid_opc;
2061
            if (real_islit || ra != 31)
2062
                goto invalid_opc;
2063
            gen_pklb (rb, rc);
2064
            break;
2065
        case 0x38:
2066
            /* MINSB8 */
2067
            if (!(ctx->amask & AMASK_MVI))
2068
                goto invalid_opc;
2069
            gen_minsb8 (ra, rb, rc, islit, lit);
2070
            break;
2071
        case 0x39:
2072
            /* MINSW4 */
2073
            if (!(ctx->amask & AMASK_MVI))
2074
                goto invalid_opc;
2075
            gen_minsw4 (ra, rb, rc, islit, lit);
2076
            break;
2077
        case 0x3A:
2078
            /* MINUB8 */
2079
            if (!(ctx->amask & AMASK_MVI))
2080
                goto invalid_opc;
2081
            gen_minub8 (ra, rb, rc, islit, lit);
2082
            break;
2083
        case 0x3B:
2084
            /* MINUW4 */
2085
            if (!(ctx->amask & AMASK_MVI))
2086
                goto invalid_opc;
2087
            gen_minuw4 (ra, rb, rc, islit, lit);
2088
            break;
2089
        case 0x3C:
2090
            /* MAXUB8 */
2091
            if (!(ctx->amask & AMASK_MVI))
2092
                goto invalid_opc;
2093
            gen_maxub8 (ra, rb, rc, islit, lit);
2094
            break;
2095
        case 0x3D:
2096
            /* MAXUW4 */
2097
            if (!(ctx->amask & AMASK_MVI))
2098
                goto invalid_opc;
2099
            gen_maxuw4 (ra, rb, rc, islit, lit);
2100
            break;
2101
        case 0x3E:
2102
            /* MAXSB8 */
2103
            if (!(ctx->amask & AMASK_MVI))
2104
                goto invalid_opc;
2105
            gen_maxsb8 (ra, rb, rc, islit, lit);
2106
            break;
2107
        case 0x3F:
2108
            /* MAXSW4 */
2109
            if (!(ctx->amask & AMASK_MVI))
2110
                goto invalid_opc;
2111
            gen_maxsw4 (ra, rb, rc, islit, lit);
2112
            break;
2113
        case 0x70:
2114
            /* FTOIT */
2115
            if (!(ctx->amask & AMASK_FIX))
2116
                goto invalid_opc;
2117
            if (likely(rc != 31)) {
2118
                if (ra != 31)
2119
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2120
                else
2121
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2122
            }
2123
            break;
2124
        case 0x78:
2125
            /* FTOIS */
2126
            if (!(ctx->amask & AMASK_FIX))
2127
                goto invalid_opc;
2128
            if (rc != 31) {
2129
                TCGv_i32 tmp1 = tcg_temp_new_i32();
2130
                if (ra != 31)
2131
                    gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2132
                else {
2133
                    TCGv tmp2 = tcg_const_i64(0);
2134
                    gen_helper_s_to_memory(tmp1, tmp2);
2135
                    tcg_temp_free(tmp2);
2136
                }
2137
                tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2138
                tcg_temp_free_i32(tmp1);
2139
            }
2140
            break;
2141
        default:
2142
            goto invalid_opc;
2143
        }
2144
        break;
2145
    case 0x1D:
2146
        /* HW_MTPR (PALcode) */
2147
#if defined (CONFIG_USER_ONLY)
2148
        goto invalid_opc;
2149
#else
2150
        if (!ctx->pal_mode)
2151
            goto invalid_opc;
2152
        else {
2153
            TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2154
            if (ra != 31)
2155
                gen_helper_mtpr(tmp1, cpu_ir[ra]);
2156
            else {
2157
                TCGv tmp2 = tcg_const_i64(0);
2158
                gen_helper_mtpr(tmp1, tmp2);
2159
                tcg_temp_free(tmp2);
2160
            }
2161
            tcg_temp_free(tmp1);
2162
            ret = 2;
2163
        }
2164
        break;
2165
#endif
2166
    case 0x1E:
2167
        /* HW_REI (PALcode) */
2168
#if defined (CONFIG_USER_ONLY)
2169
        goto invalid_opc;
2170
#else
2171
        if (!ctx->pal_mode)
2172
            goto invalid_opc;
2173
        if (rb == 31) {
2174
            /* "Old" alpha */
2175
            gen_helper_hw_rei();
2176
        } else {
2177
            TCGv tmp;
2178

    
2179
            if (ra != 31) {
2180
                tmp = tcg_temp_new();
2181
                tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2182
            } else
2183
                tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2184
            gen_helper_hw_ret(tmp);
2185
            tcg_temp_free(tmp);
2186
        }
2187
        ret = 2;
2188
        break;
2189
#endif
2190
    case 0x1F:
2191
        /* HW_ST (PALcode) */
2192
#if defined (CONFIG_USER_ONLY)
2193
        goto invalid_opc;
2194
#else
2195
        if (!ctx->pal_mode)
2196
            goto invalid_opc;
2197
        else {
2198
            TCGv addr, val;
2199
            addr = tcg_temp_new();
2200
            if (rb != 31)
2201
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2202
            else
2203
                tcg_gen_movi_i64(addr, disp12);
2204
            if (ra != 31)
2205
                val = cpu_ir[ra];
2206
            else {
2207
                val = tcg_temp_new();
2208
                tcg_gen_movi_i64(val, 0);
2209
            }
2210
            switch ((insn >> 12) & 0xF) {
2211
            case 0x0:
2212
                /* Longword physical access */
2213
                gen_helper_stl_raw(val, addr);
2214
                break;
2215
            case 0x1:
2216
                /* Quadword physical access */
2217
                gen_helper_stq_raw(val, addr);
2218
                break;
2219
            case 0x2:
2220
                /* Longword physical access with lock */
2221
                gen_helper_stl_c_raw(val, val, addr);
2222
                break;
2223
            case 0x3:
2224
                /* Quadword physical access with lock */
2225
                gen_helper_stq_c_raw(val, val, addr);
2226
                break;
2227
            case 0x4:
2228
                /* Longword virtual access */
2229
                gen_helper_st_virt_to_phys(addr, addr);
2230
                gen_helper_stl_raw(val, addr);
2231
                break;
2232
            case 0x5:
2233
                /* Quadword virtual access */
2234
                gen_helper_st_virt_to_phys(addr, addr);
2235
                gen_helper_stq_raw(val, addr);
2236
                break;
2237
            case 0x6:
2238
                /* Invalid */
2239
                goto invalid_opc;
2240
            case 0x7:
2241
                /* Invalid */
2242
                goto invalid_opc;
2243
            case 0x8:
2244
                /* Invalid */
2245
                goto invalid_opc;
2246
            case 0x9:
2247
                /* Invalid */
2248
                goto invalid_opc;
2249
            case 0xA:
2250
                /* Invalid */
2251
                goto invalid_opc;
2252
            case 0xB:
2253
                /* Invalid */
2254
                goto invalid_opc;
2255
            case 0xC:
2256
                /* Longword virtual access with alternate access mode */
2257
                gen_helper_set_alt_mode();
2258
                gen_helper_st_virt_to_phys(addr, addr);
2259
                gen_helper_stl_raw(val, addr);
2260
                gen_helper_restore_mode();
2261
                break;
2262
            case 0xD:
2263
                /* Quadword virtual access with alternate access mode */
2264
                gen_helper_set_alt_mode();
2265
                gen_helper_st_virt_to_phys(addr, addr);
2266
                gen_helper_stl_raw(val, addr);
2267
                gen_helper_restore_mode();
2268
                break;
2269
            case 0xE:
2270
                /* Invalid */
2271
                goto invalid_opc;
2272
            case 0xF:
2273
                /* Invalid */
2274
                goto invalid_opc;
2275
            }
2276
            if (ra == 31)
2277
                tcg_temp_free(val);
2278
            tcg_temp_free(addr);
2279
        }
2280
        break;
2281
#endif
2282
    case 0x20:
2283
        /* LDF */
2284
        gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2285
        break;
2286
    case 0x21:
2287
        /* LDG */
2288
        gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2289
        break;
2290
    case 0x22:
2291
        /* LDS */
2292
        gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2293
        break;
2294
    case 0x23:
2295
        /* LDT */
2296
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2297
        break;
2298
    case 0x24:
2299
        /* STF */
2300
        gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2301
        break;
2302
    case 0x25:
2303
        /* STG */
2304
        gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2305
        break;
2306
    case 0x26:
2307
        /* STS */
2308
        gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2309
        break;
2310
    case 0x27:
2311
        /* STT */
2312
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2313
        break;
2314
    case 0x28:
2315
        /* LDL */
2316
        gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2317
        break;
2318
    case 0x29:
2319
        /* LDQ */
2320
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2321
        break;
2322
    case 0x2A:
2323
        /* LDL_L */
2324
        gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2325
        break;
2326
    case 0x2B:
2327
        /* LDQ_L */
2328
        gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2329
        break;
2330
    case 0x2C:
2331
        /* STL */
2332
        gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2333
        break;
2334
    case 0x2D:
2335
        /* STQ */
2336
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2337
        break;
2338
    case 0x2E:
2339
        /* STL_C */
2340
        gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2341
        break;
2342
    case 0x2F:
2343
        /* STQ_C */
2344
        gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2345
        break;
2346
    case 0x30:
2347
        /* BR */
2348
        if (ra != 31)
2349
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2350
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2351
        ret = 1;
2352
        break;
2353
    case 0x31: /* FBEQ */
2354
    case 0x32: /* FBLT */
2355
    case 0x33: /* FBLE */
2356
        gen_fbcond(ctx, opc, ra, disp21);
2357
        ret = 1;
2358
        break;
2359
    case 0x34:
2360
        /* BSR */
2361
        if (ra != 31)
2362
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2363
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2364
        ret = 1;
2365
        break;
2366
    case 0x35: /* FBNE */
2367
    case 0x36: /* FBGE */
2368
    case 0x37: /* FBGT */
2369
        gen_fbcond(ctx, opc, ra, disp21);
2370
        ret = 1;
2371
        break;
2372
    case 0x38:
2373
        /* BLBC */
2374
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2375
        ret = 1;
2376
        break;
2377
    case 0x39:
2378
        /* BEQ */
2379
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2380
        ret = 1;
2381
        break;
2382
    case 0x3A:
2383
        /* BLT */
2384
        gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2385
        ret = 1;
2386
        break;
2387
    case 0x3B:
2388
        /* BLE */
2389
        gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2390
        ret = 1;
2391
        break;
2392
    case 0x3C:
2393
        /* BLBS */
2394
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2395
        ret = 1;
2396
        break;
2397
    case 0x3D:
2398
        /* BNE */
2399
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2400
        ret = 1;
2401
        break;
2402
    case 0x3E:
2403
        /* BGE */
2404
        gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2405
        ret = 1;
2406
        break;
2407
    case 0x3F:
2408
        /* BGT */
2409
        gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2410
        ret = 1;
2411
        break;
2412
    invalid_opc:
2413
        gen_invalid(ctx);
2414
        ret = 3;
2415
        break;
2416
    }
2417

    
2418
    return ret;
2419
}
2420

    
2421
static inline void gen_intermediate_code_internal(CPUState *env,
2422
                                                  TranslationBlock *tb,
2423
                                                  int search_pc)
2424
{
2425
    DisasContext ctx, *ctxp = &ctx;
2426
    target_ulong pc_start;
2427
    uint32_t insn;
2428
    uint16_t *gen_opc_end;
2429
    CPUBreakpoint *bp;
2430
    int j, lj = -1;
2431
    int ret;
2432
    int num_insns;
2433
    int max_insns;
2434

    
2435
    pc_start = tb->pc;
2436
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2437
    ctx.pc = pc_start;
2438
    ctx.amask = env->amask;
2439
    ctx.env = env;
2440
#if defined (CONFIG_USER_ONLY)
2441
    ctx.mem_idx = 0;
2442
#else
2443
    ctx.mem_idx = ((env->ps >> 3) & 3);
2444
    ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2445
#endif
2446
    num_insns = 0;
2447
    max_insns = tb->cflags & CF_COUNT_MASK;
2448
    if (max_insns == 0)
2449
        max_insns = CF_COUNT_MASK;
2450

    
2451
    gen_icount_start();
2452
    for (ret = 0; ret == 0;) {
2453
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
2454
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
2455
                if (bp->pc == ctx.pc) {
2456
                    gen_excp(&ctx, EXCP_DEBUG, 0);
2457
                    break;
2458
                }
2459
            }
2460
        }
2461
        if (search_pc) {
2462
            j = gen_opc_ptr - gen_opc_buf;
2463
            if (lj < j) {
2464
                lj++;
2465
                while (lj < j)
2466
                    gen_opc_instr_start[lj++] = 0;
2467
            }
2468
            gen_opc_pc[lj] = ctx.pc;
2469
            gen_opc_instr_start[lj] = 1;
2470
            gen_opc_icount[lj] = num_insns;
2471
        }
2472
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2473
            gen_io_start();
2474
        insn = ldl_code(ctx.pc);
2475
        num_insns++;
2476
        ctx.pc += 4;
2477
        ret = translate_one(ctxp, insn);
2478
        if (ret != 0)
2479
            break;
2480
        /* if we reach a page boundary or are single stepping, stop
2481
         * generation
2482
         */
2483
        if (env->singlestep_enabled) {
2484
            gen_excp(&ctx, EXCP_DEBUG, 0);
2485
            break;
2486
        }
2487

    
2488
        if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2489
            break;
2490

    
2491
        if (gen_opc_ptr >= gen_opc_end)
2492
            break;
2493

    
2494
        if (num_insns >= max_insns)
2495
            break;
2496

    
2497
        if (singlestep) {
2498
            break;
2499
        }
2500
    }
2501
    if (ret != 1 && ret != 3) {
2502
        tcg_gen_movi_i64(cpu_pc, ctx.pc);
2503
    }
2504
    if (tb->cflags & CF_LAST_IO)
2505
        gen_io_end();
2506
    /* Generate the return instruction */
2507
    tcg_gen_exit_tb(0);
2508
    gen_icount_end(tb, num_insns);
2509
    *gen_opc_ptr = INDEX_op_end;
2510
    if (search_pc) {
2511
        j = gen_opc_ptr - gen_opc_buf;
2512
        lj++;
2513
        while (lj <= j)
2514
            gen_opc_instr_start[lj++] = 0;
2515
    } else {
2516
        tb->size = ctx.pc - pc_start;
2517
        tb->icount = num_insns;
2518
    }
2519
#ifdef DEBUG_DISAS
2520
    log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
2521
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2522
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
2523
        log_target_disas(pc_start, ctx.pc - pc_start, 1);
2524
        qemu_log("\n");
2525
    }
2526
#endif
2527
}
2528

    
2529
void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2530
{
2531
    gen_intermediate_code_internal(env, tb, 0);
2532
}
2533

    
2534
void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2535
{
2536
    gen_intermediate_code_internal(env, tb, 1);
2537
}
2538

    
2539
struct cpu_def_t {
2540
    const char *name;
2541
    int implver, amask;
2542
};
2543

    
2544
static const struct cpu_def_t cpu_defs[] = {
2545
    { "ev4",   IMPLVER_2106x, 0 },
2546
    { "ev5",   IMPLVER_21164, 0 },
2547
    { "ev56",  IMPLVER_21164, AMASK_BWX },
2548
    { "pca56", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
2549
    { "ev6",   IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
2550
    { "ev67",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2551
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
2552
    { "ev68",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2553
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
2554
    { "21064", IMPLVER_2106x, 0 },
2555
    { "21164", IMPLVER_21164, 0 },
2556
    { "21164a", IMPLVER_21164, AMASK_BWX },
2557
    { "21164pc", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
2558
    { "21264", IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
2559
    { "21264a", IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2560
                                | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), }
2561
};
2562

    
2563
CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2564
{
2565
    CPUAlphaState *env;
2566
    uint64_t hwpcb;
2567
    int implver, amask, i, max;
2568

    
2569
    env = qemu_mallocz(sizeof(CPUAlphaState));
2570
    cpu_exec_init(env);
2571
    alpha_translate_init();
2572
    tlb_flush(env, 1);
2573

    
2574
    /* Default to ev67; no reason not to emulate insns by default.  */
2575
    implver = IMPLVER_21264;
2576
    amask = (AMASK_BWX | AMASK_FIX | AMASK_CIX | AMASK_MVI
2577
             | AMASK_TRAP | AMASK_PREFETCH);
2578

    
2579
    max = ARRAY_SIZE(cpu_defs);
2580
    for (i = 0; i < max; i++) {
2581
        if (strcmp (cpu_model, cpu_defs[i].name) == 0) {
2582
            implver = cpu_defs[i].implver;
2583
            amask = cpu_defs[i].amask;
2584
            break;
2585
        }
2586
    }
2587
    env->implver = implver;
2588
    env->amask = amask;
2589

    
2590
    env->ps = 0x1F00;
2591
#if defined (CONFIG_USER_ONLY)
2592
    env->ps |= 1 << 3;
2593
#endif
2594
    pal_init(env);
2595
    /* Initialize IPR */
2596
    hwpcb = env->ipr[IPR_PCBB];
2597
    env->ipr[IPR_ASN] = 0;
2598
    env->ipr[IPR_ASTEN] = 0;
2599
    env->ipr[IPR_ASTSR] = 0;
2600
    env->ipr[IPR_DATFX] = 0;
2601
    /* XXX: fix this */
2602
    //    env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2603
    //    env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2604
    //    env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2605
    //    env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2606
    env->ipr[IPR_FEN] = 0;
2607
    env->ipr[IPR_IPL] = 31;
2608
    env->ipr[IPR_MCES] = 0;
2609
    env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2610
    //    env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2611
    env->ipr[IPR_SISR] = 0;
2612
    env->ipr[IPR_VIRBND] = -1ULL;
2613

    
2614
    qemu_init_vcpu(env);
2615
    return env;
2616
}
2617

    
2618
void gen_pc_load(CPUState *env, TranslationBlock *tb,
2619
                unsigned long searched_pc, int pc_pos, void *puc)
2620
{
2621
    env->pc = gen_opc_pc[pc_pos];
2622
}