Statistics
| Branch: | Revision:

root / target-alpha / translate.c @ 248c42f3

History | View | Annotate | Download (79.2 kB)

1
/*
2
 *  Alpha emulation cpu translation for qemu.
3
 *
4
 *  Copyright (c) 2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18
 */
19

    
20
#include <stdint.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23

    
24
#include "cpu.h"
25
#include "exec-all.h"
26
#include "disas.h"
27
#include "host-utils.h"
28
#include "tcg-op.h"
29
#include "qemu-common.h"
30

    
31
#include "helper.h"
32
#define GEN_HELPER 1
33
#include "helper.h"
34

    
35
#undef ALPHA_DEBUG_DISAS
36

    
37
#ifdef ALPHA_DEBUG_DISAS
38
#  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
39
#else
40
#  define LOG_DISAS(...) do { } while (0)
41
#endif
42

    
43
typedef struct DisasContext DisasContext;
44
struct DisasContext {
45
    uint64_t pc;
46
    int mem_idx;
47
#if !defined (CONFIG_USER_ONLY)
48
    int pal_mode;
49
#endif
50
    CPUAlphaState *env;
51
    uint32_t amask;
52
};
53

    
54
/* global register indexes */
55
static TCGv_ptr cpu_env;
56
static TCGv cpu_ir[31];
57
static TCGv cpu_fir[31];
58
static TCGv cpu_pc;
59
static TCGv cpu_lock;
60
#ifdef CONFIG_USER_ONLY
61
static TCGv cpu_uniq;
62
#endif
63

    
64
/* register names */
65
static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
66

    
67
#include "gen-icount.h"
68

    
69
static void alpha_translate_init(void)
70
{
71
    int i;
72
    char *p;
73
    static int done_init = 0;
74

    
75
    if (done_init)
76
        return;
77

    
78
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
79

    
80
    p = cpu_reg_names;
81
    for (i = 0; i < 31; i++) {
82
        sprintf(p, "ir%d", i);
83
        cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
84
                                           offsetof(CPUState, ir[i]), p);
85
        p += (i < 10) ? 4 : 5;
86

    
87
        sprintf(p, "fir%d", i);
88
        cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
89
                                            offsetof(CPUState, fir[i]), p);
90
        p += (i < 10) ? 5 : 6;
91
    }
92

    
93
    cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
94
                                    offsetof(CPUState, pc), "pc");
95

    
96
    cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
97
                                      offsetof(CPUState, lock), "lock");
98

    
99
#ifdef CONFIG_USER_ONLY
100
    cpu_uniq = tcg_global_mem_new_i64(TCG_AREG0,
101
                                      offsetof(CPUState, unique), "uniq");
102
#endif
103

    
104
    /* register helpers */
105
#define GEN_HELPER 2
106
#include "helper.h"
107

    
108
    done_init = 1;
109
}
110

    
111
static inline void gen_excp(DisasContext *ctx, int exception, int error_code)
112
{
113
    TCGv_i32 tmp1, tmp2;
114

    
115
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
116
    tmp1 = tcg_const_i32(exception);
117
    tmp2 = tcg_const_i32(error_code);
118
    gen_helper_excp(tmp1, tmp2);
119
    tcg_temp_free_i32(tmp2);
120
    tcg_temp_free_i32(tmp1);
121
}
122

    
123
static inline void gen_invalid(DisasContext *ctx)
124
{
125
    gen_excp(ctx, EXCP_OPCDEC, 0);
126
}
127

    
128
static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
129
{
130
    TCGv tmp = tcg_temp_new();
131
    TCGv_i32 tmp32 = tcg_temp_new_i32();
132
    tcg_gen_qemu_ld32u(tmp, t1, flags);
133
    tcg_gen_trunc_i64_i32(tmp32, tmp);
134
    gen_helper_memory_to_f(t0, tmp32);
135
    tcg_temp_free_i32(tmp32);
136
    tcg_temp_free(tmp);
137
}
138

    
139
static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
140
{
141
    TCGv tmp = tcg_temp_new();
142
    tcg_gen_qemu_ld64(tmp, t1, flags);
143
    gen_helper_memory_to_g(t0, tmp);
144
    tcg_temp_free(tmp);
145
}
146

    
147
static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
148
{
149
    TCGv tmp = tcg_temp_new();
150
    TCGv_i32 tmp32 = tcg_temp_new_i32();
151
    tcg_gen_qemu_ld32u(tmp, t1, flags);
152
    tcg_gen_trunc_i64_i32(tmp32, tmp);
153
    gen_helper_memory_to_s(t0, tmp32);
154
    tcg_temp_free_i32(tmp32);
155
    tcg_temp_free(tmp);
156
}
157

    
158
static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
159
{
160
    tcg_gen_mov_i64(cpu_lock, t1);
161
    tcg_gen_qemu_ld32s(t0, t1, flags);
162
}
163

    
164
static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
165
{
166
    tcg_gen_mov_i64(cpu_lock, t1);
167
    tcg_gen_qemu_ld64(t0, t1, flags);
168
}
169

    
170
static inline void gen_load_mem(DisasContext *ctx,
171
                                void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
172
                                                          int flags),
173
                                int ra, int rb, int32_t disp16, int fp,
174
                                int clear)
175
{
176
    TCGv addr;
177

    
178
    if (unlikely(ra == 31))
179
        return;
180

    
181
    addr = tcg_temp_new();
182
    if (rb != 31) {
183
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
184
        if (clear)
185
            tcg_gen_andi_i64(addr, addr, ~0x7);
186
    } else {
187
        if (clear)
188
            disp16 &= ~0x7;
189
        tcg_gen_movi_i64(addr, disp16);
190
    }
191
    if (fp)
192
        tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
193
    else
194
        tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
195
    tcg_temp_free(addr);
196
}
197

    
198
static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
199
{
200
    TCGv_i32 tmp32 = tcg_temp_new_i32();
201
    TCGv tmp = tcg_temp_new();
202
    gen_helper_f_to_memory(tmp32, t0);
203
    tcg_gen_extu_i32_i64(tmp, tmp32);
204
    tcg_gen_qemu_st32(tmp, t1, flags);
205
    tcg_temp_free(tmp);
206
    tcg_temp_free_i32(tmp32);
207
}
208

    
209
static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
210
{
211
    TCGv tmp = tcg_temp_new();
212
    gen_helper_g_to_memory(tmp, t0);
213
    tcg_gen_qemu_st64(tmp, t1, flags);
214
    tcg_temp_free(tmp);
215
}
216

    
217
static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
218
{
219
    TCGv_i32 tmp32 = tcg_temp_new_i32();
220
    TCGv tmp = tcg_temp_new();
221
    gen_helper_s_to_memory(tmp32, t0);
222
    tcg_gen_extu_i32_i64(tmp, tmp32);
223
    tcg_gen_qemu_st32(tmp, t1, flags);
224
    tcg_temp_free(tmp);
225
    tcg_temp_free_i32(tmp32);
226
}
227

    
228
static inline void gen_qemu_stl_c(TCGv t0, TCGv t1, int flags)
229
{
230
    int l1, l2;
231

    
232
    l1 = gen_new_label();
233
    l2 = gen_new_label();
234
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
235
    tcg_gen_qemu_st32(t0, t1, flags);
236
    tcg_gen_movi_i64(t0, 1);
237
    tcg_gen_br(l2);
238
    gen_set_label(l1);
239
    tcg_gen_movi_i64(t0, 0);
240
    gen_set_label(l2);
241
    tcg_gen_movi_i64(cpu_lock, -1);
242
}
243

    
244
static inline void gen_qemu_stq_c(TCGv t0, TCGv t1, int flags)
245
{
246
    int l1, l2;
247

    
248
    l1 = gen_new_label();
249
    l2 = gen_new_label();
250
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
251
    tcg_gen_qemu_st64(t0, t1, flags);
252
    tcg_gen_movi_i64(t0, 1);
253
    tcg_gen_br(l2);
254
    gen_set_label(l1);
255
    tcg_gen_movi_i64(t0, 0);
256
    gen_set_label(l2);
257
    tcg_gen_movi_i64(cpu_lock, -1);
258
}
259

    
260
static inline void gen_store_mem(DisasContext *ctx,
261
                                 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
262
                                                            int flags),
263
                                 int ra, int rb, int32_t disp16, int fp,
264
                                 int clear, int local)
265
{
266
    TCGv addr;
267
    if (local)
268
        addr = tcg_temp_local_new();
269
    else
270
        addr = tcg_temp_new();
271
    if (rb != 31) {
272
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
273
        if (clear)
274
            tcg_gen_andi_i64(addr, addr, ~0x7);
275
    } else {
276
        if (clear)
277
            disp16 &= ~0x7;
278
        tcg_gen_movi_i64(addr, disp16);
279
    }
280
    if (ra != 31) {
281
        if (fp)
282
            tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
283
        else
284
            tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
285
    } else {
286
        TCGv zero;
287
        if (local)
288
            zero = tcg_const_local_i64(0);
289
        else
290
            zero = tcg_const_i64(0);
291
        tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
292
        tcg_temp_free(zero);
293
    }
294
    tcg_temp_free(addr);
295
}
296

    
297
static inline void gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
298
                             int32_t disp, int mask)
299
{
300
    int l1, l2;
301

    
302
    l1 = gen_new_label();
303
    l2 = gen_new_label();
304
    if (likely(ra != 31)) {
305
        if (mask) {
306
            TCGv tmp = tcg_temp_new();
307
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
308
            tcg_gen_brcondi_i64(cond, tmp, 0, l1);
309
            tcg_temp_free(tmp);
310
        } else
311
            tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
312
    } else {
313
        /* Very uncommon case - Do not bother to optimize.  */
314
        TCGv tmp = tcg_const_i64(0);
315
        tcg_gen_brcondi_i64(cond, tmp, 0, l1);
316
        tcg_temp_free(tmp);
317
    }
318
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
319
    tcg_gen_br(l2);
320
    gen_set_label(l1);
321
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
322
    gen_set_label(l2);
323
}
324

    
325
static inline void gen_fbcond(DisasContext *ctx, int opc, int ra, int32_t disp)
326
{
327
    int l1, l2;
328
    TCGv tmp;
329
    TCGv src;
330

    
331
    l1 = gen_new_label();
332
    l2 = gen_new_label();
333
    if (ra != 31) {
334
        tmp = tcg_temp_new();
335
        src = cpu_fir[ra];
336
    } else  {
337
        tmp = tcg_const_i64(0);
338
        src = tmp;
339
    }
340
    switch (opc) {
341
    case 0x31: /* FBEQ */
342
        gen_helper_cmpfeq(tmp, src);
343
        break;
344
    case 0x32: /* FBLT */
345
        gen_helper_cmpflt(tmp, src);
346
        break;
347
    case 0x33: /* FBLE */
348
        gen_helper_cmpfle(tmp, src);
349
        break;
350
    case 0x35: /* FBNE */
351
        gen_helper_cmpfne(tmp, src);
352
        break;
353
    case 0x36: /* FBGE */
354
        gen_helper_cmpfge(tmp, src);
355
        break;
356
    case 0x37: /* FBGT */
357
        gen_helper_cmpfgt(tmp, src);
358
        break;
359
    default:
360
        abort();
361
    }
362
    tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
363
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
364
    tcg_gen_br(l2);
365
    gen_set_label(l1);
366
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
367
    gen_set_label(l2);
368
}
369

    
370
static inline void gen_cmov(TCGCond inv_cond, int ra, int rb, int rc,
371
                            int islit, uint8_t lit, int mask)
372
{
373
    int l1;
374

    
375
    if (unlikely(rc == 31))
376
        return;
377

    
378
    l1 = gen_new_label();
379

    
380
    if (ra != 31) {
381
        if (mask) {
382
            TCGv tmp = tcg_temp_new();
383
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
384
            tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
385
            tcg_temp_free(tmp);
386
        } else
387
            tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
388
    } else {
389
        /* Very uncommon case - Do not bother to optimize.  */
390
        TCGv tmp = tcg_const_i64(0);
391
        tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
392
        tcg_temp_free(tmp);
393
    }
394

    
395
    if (islit)
396
        tcg_gen_movi_i64(cpu_ir[rc], lit);
397
    else
398
        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
399
    gen_set_label(l1);
400
}
401

    
402
#define FARITH2(name)                                       \
403
static inline void glue(gen_f, name)(int rb, int rc)        \
404
{                                                           \
405
    if (unlikely(rc == 31))                                 \
406
      return;                                               \
407
                                                            \
408
    if (rb != 31)                                           \
409
        gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]);    \
410
    else {                                                  \
411
        TCGv tmp = tcg_const_i64(0);                        \
412
        gen_helper_ ## name (cpu_fir[rc], tmp);            \
413
        tcg_temp_free(tmp);                                 \
414
    }                                                       \
415
}
416
FARITH2(sqrts)
417
FARITH2(sqrtf)
418
FARITH2(sqrtg)
419
FARITH2(sqrtt)
420
FARITH2(cvtgf)
421
FARITH2(cvtgq)
422
FARITH2(cvtqf)
423
FARITH2(cvtqg)
424
FARITH2(cvtst)
425
FARITH2(cvtts)
426
FARITH2(cvttq)
427
FARITH2(cvtqs)
428
FARITH2(cvtqt)
429
FARITH2(cvtlq)
430
FARITH2(cvtql)
431
FARITH2(cvtqlv)
432
FARITH2(cvtqlsv)
433

    
434
#define FARITH3(name)                                                     \
435
static inline void glue(gen_f, name)(int ra, int rb, int rc)              \
436
{                                                                         \
437
    if (unlikely(rc == 31))                                               \
438
        return;                                                           \
439
                                                                          \
440
    if (ra != 31) {                                                       \
441
        if (rb != 31)                                                     \
442
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);  \
443
        else {                                                            \
444
            TCGv tmp = tcg_const_i64(0);                                  \
445
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp);          \
446
            tcg_temp_free(tmp);                                           \
447
        }                                                                 \
448
    } else {                                                              \
449
        TCGv tmp = tcg_const_i64(0);                                      \
450
        if (rb != 31)                                                     \
451
            gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]);          \
452
        else                                                              \
453
            gen_helper_ ## name (cpu_fir[rc], tmp, tmp);                   \
454
        tcg_temp_free(tmp);                                               \
455
    }                                                                     \
456
}
457

    
458
FARITH3(addf)
459
FARITH3(subf)
460
FARITH3(mulf)
461
FARITH3(divf)
462
FARITH3(addg)
463
FARITH3(subg)
464
FARITH3(mulg)
465
FARITH3(divg)
466
FARITH3(cmpgeq)
467
FARITH3(cmpglt)
468
FARITH3(cmpgle)
469
FARITH3(adds)
470
FARITH3(subs)
471
FARITH3(muls)
472
FARITH3(divs)
473
FARITH3(addt)
474
FARITH3(subt)
475
FARITH3(mult)
476
FARITH3(divt)
477
FARITH3(cmptun)
478
FARITH3(cmpteq)
479
FARITH3(cmptlt)
480
FARITH3(cmptle)
481
FARITH3(cpys)
482
FARITH3(cpysn)
483
FARITH3(cpyse)
484

    
485
#define FCMOV(name)                                                   \
486
static inline void glue(gen_f, name)(int ra, int rb, int rc)          \
487
{                                                                     \
488
    int l1;                                                           \
489
    TCGv tmp;                                                         \
490
                                                                      \
491
    if (unlikely(rc == 31))                                           \
492
        return;                                                       \
493
                                                                      \
494
    l1 = gen_new_label();                                             \
495
    tmp = tcg_temp_new();                                 \
496
    if (ra != 31) {                                                   \
497
        tmp = tcg_temp_new();                             \
498
        gen_helper_ ## name (tmp, cpu_fir[ra]);                       \
499
    } else  {                                                         \
500
        tmp = tcg_const_i64(0);                                       \
501
        gen_helper_ ## name (tmp, tmp);                               \
502
    }                                                                 \
503
    tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);                     \
504
    if (rb != 31)                                                     \
505
        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);                    \
506
    else                                                              \
507
        tcg_gen_movi_i64(cpu_fir[rc], 0);                             \
508
    gen_set_label(l1);                                                \
509
}
510
FCMOV(cmpfeq)
511
FCMOV(cmpfne)
512
FCMOV(cmpflt)
513
FCMOV(cmpfge)
514
FCMOV(cmpfle)
515
FCMOV(cmpfgt)
516

    
517
static inline uint64_t zapnot_mask(uint8_t lit)
518
{
519
    uint64_t mask = 0;
520
    int i;
521

    
522
    for (i = 0; i < 8; ++i) {
523
        if ((lit >> i) & 1)
524
            mask |= 0xffull << (i * 8);
525
    }
526
    return mask;
527
}
528

    
529
/* Implement zapnot with an immediate operand, which expands to some
530
   form of immediate AND.  This is a basic building block in the
531
   definition of many of the other byte manipulation instructions.  */
532
static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
533
{
534
    switch (lit) {
535
    case 0x00:
536
        tcg_gen_movi_i64(dest, 0);
537
        break;
538
    case 0x01:
539
        tcg_gen_ext8u_i64(dest, src);
540
        break;
541
    case 0x03:
542
        tcg_gen_ext16u_i64(dest, src);
543
        break;
544
    case 0x0f:
545
        tcg_gen_ext32u_i64(dest, src);
546
        break;
547
    case 0xff:
548
        tcg_gen_mov_i64(dest, src);
549
        break;
550
    default:
551
        tcg_gen_andi_i64 (dest, src, zapnot_mask (lit));
552
        break;
553
    }
554
}
555

    
556
static inline void gen_zapnot(int ra, int rb, int rc, int islit, uint8_t lit)
557
{
558
    if (unlikely(rc == 31))
559
        return;
560
    else if (unlikely(ra == 31))
561
        tcg_gen_movi_i64(cpu_ir[rc], 0);
562
    else if (islit)
563
        gen_zapnoti(cpu_ir[rc], cpu_ir[ra], lit);
564
    else
565
        gen_helper_zapnot (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
566
}
567

    
568
static inline void gen_zap(int ra, int rb, int rc, int islit, uint8_t lit)
569
{
570
    if (unlikely(rc == 31))
571
        return;
572
    else if (unlikely(ra == 31))
573
        tcg_gen_movi_i64(cpu_ir[rc], 0);
574
    else if (islit)
575
        gen_zapnoti(cpu_ir[rc], cpu_ir[ra], ~lit);
576
    else
577
        gen_helper_zap (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
578
}
579

    
580

    
581
/* EXTWH, EXTLH, EXTQH */
582
static inline void gen_ext_h(int ra, int rb, int rc, int islit,
583
                             uint8_t lit, uint8_t byte_mask)
584
{
585
    if (unlikely(rc == 31))
586
        return;
587
    else if (unlikely(ra == 31))
588
        tcg_gen_movi_i64(cpu_ir[rc], 0);
589
    else {
590
        if (islit) {
591
            lit = (64 - (lit & 7) * 8) & 0x3f;
592
            tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit);
593
        } else {
594
            TCGv tmp1 = tcg_temp_new();
595
            tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
596
            tcg_gen_shli_i64(tmp1, tmp1, 3);
597
            tcg_gen_neg_i64(tmp1, tmp1);
598
            tcg_gen_andi_i64(tmp1, tmp1, 0x3f);
599
            tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
600
            tcg_temp_free(tmp1);
601
        }
602
        gen_zapnoti(cpu_ir[rc], cpu_ir[rc], byte_mask);
603
    }
604
}
605

    
606
/* EXTBL, EXTWL, EXTLL, EXTQL */
607
static inline void gen_ext_l(int ra, int rb, int rc, int islit,
608
                             uint8_t lit, uint8_t byte_mask)
609
{
610
    if (unlikely(rc == 31))
611
        return;
612
    else if (unlikely(ra == 31))
613
        tcg_gen_movi_i64(cpu_ir[rc], 0);
614
    else {
615
        if (islit) {
616
            tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
617
        } else {
618
            TCGv tmp = tcg_temp_new();
619
            tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
620
            tcg_gen_shli_i64(tmp, tmp, 3);
621
            tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
622
            tcg_temp_free(tmp);
623
        }
624
        gen_zapnoti(cpu_ir[rc], cpu_ir[rc], byte_mask);
625
    }
626
}
627

    
628
/* INSBL, INSWL, INSLL, INSQL */
629
static inline void gen_ins_l(int ra, int rb, int rc, int islit,
630
                             uint8_t lit, uint8_t byte_mask)
631
{
632
    if (unlikely(rc == 31))
633
        return;
634
    else if (unlikely(ra == 31))
635
        tcg_gen_movi_i64(cpu_ir[rc], 0);
636
    else {
637
        TCGv tmp = tcg_temp_new();
638

    
639
        /* The instruction description has us left-shift the byte mask
640
           the same number of byte slots as the data and apply the zap
641
           at the end.  This is equivalent to simply performing the zap
642
           first and shifting afterward.  */
643
        gen_zapnoti (tmp, cpu_ir[ra], byte_mask);
644

    
645
        if (islit) {
646
            tcg_gen_shli_i64(cpu_ir[rc], tmp, (lit & 7) * 8);
647
        } else {
648
            TCGv shift = tcg_temp_new();
649
            tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
650
            tcg_gen_shli_i64(shift, shift, 3);
651
            tcg_gen_shl_i64(cpu_ir[rc], tmp, shift);
652
            tcg_temp_free(shift);
653
        }
654
        tcg_temp_free(tmp);
655
    }
656
}
657

    
658
/* Code to call arith3 helpers */
659
#define ARITH3(name)                                                  \
660
static inline void glue(gen_, name)(int ra, int rb, int rc, int islit,\
661
                                    uint8_t lit)                      \
662
{                                                                     \
663
    if (unlikely(rc == 31))                                           \
664
        return;                                                       \
665
                                                                      \
666
    if (ra != 31) {                                                   \
667
        if (islit) {                                                  \
668
            TCGv tmp = tcg_const_i64(lit);                            \
669
            gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp);         \
670
            tcg_temp_free(tmp);                                       \
671
        } else                                                        \
672
            gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
673
    } else {                                                          \
674
        TCGv tmp1 = tcg_const_i64(0);                                 \
675
        if (islit) {                                                  \
676
            TCGv tmp2 = tcg_const_i64(lit);                           \
677
            gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2);             \
678
            tcg_temp_free(tmp2);                                      \
679
        } else                                                        \
680
            gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]);       \
681
        tcg_temp_free(tmp1);                                          \
682
    }                                                                 \
683
}
684
ARITH3(cmpbge)
685
ARITH3(addlv)
686
ARITH3(sublv)
687
ARITH3(addqv)
688
ARITH3(subqv)
689
ARITH3(mskbl)
690
ARITH3(mskwl)
691
ARITH3(mskll)
692
ARITH3(mskql)
693
ARITH3(mskwh)
694
ARITH3(inswh)
695
ARITH3(msklh)
696
ARITH3(inslh)
697
ARITH3(mskqh)
698
ARITH3(insqh)
699
ARITH3(umulh)
700
ARITH3(mullv)
701
ARITH3(mulqv)
702
ARITH3(minub8)
703
ARITH3(minsb8)
704
ARITH3(minuw4)
705
ARITH3(minsw4)
706
ARITH3(maxub8)
707
ARITH3(maxsb8)
708
ARITH3(maxuw4)
709
ARITH3(maxsw4)
710
ARITH3(perr)
711

    
712
#define MVIOP2(name)                                    \
713
static inline void glue(gen_, name)(int rb, int rc)     \
714
{                                                       \
715
    if (unlikely(rc == 31))                             \
716
        return;                                         \
717
    if (unlikely(rb == 31))                             \
718
        tcg_gen_movi_i64(cpu_ir[rc], 0);                \
719
    else                                                \
720
        gen_helper_ ## name (cpu_ir[rc], cpu_ir[rb]);   \
721
}
722
MVIOP2(pklb)
723
MVIOP2(pkwb)
724
MVIOP2(unpkbl)
725
MVIOP2(unpkbw)
726

    
727
static inline void gen_cmp(TCGCond cond, int ra, int rb, int rc, int islit,
728
                           uint8_t lit)
729
{
730
    int l1, l2;
731
    TCGv tmp;
732

    
733
    if (unlikely(rc == 31))
734
        return;
735

    
736
    l1 = gen_new_label();
737
    l2 = gen_new_label();
738

    
739
    if (ra != 31) {
740
        tmp = tcg_temp_new();
741
        tcg_gen_mov_i64(tmp, cpu_ir[ra]);
742
    } else
743
        tmp = tcg_const_i64(0);
744
    if (islit)
745
        tcg_gen_brcondi_i64(cond, tmp, lit, l1);
746
    else
747
        tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
748

    
749
    tcg_gen_movi_i64(cpu_ir[rc], 0);
750
    tcg_gen_br(l2);
751
    gen_set_label(l1);
752
    tcg_gen_movi_i64(cpu_ir[rc], 1);
753
    gen_set_label(l2);
754
}
755

    
756
static inline int translate_one(DisasContext *ctx, uint32_t insn)
757
{
758
    uint32_t palcode;
759
    int32_t disp21, disp16, disp12;
760
    uint16_t fn11, fn16;
761
    uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit, real_islit;
762
    uint8_t lit;
763
    int ret;
764

    
765
    /* Decode all instruction fields */
766
    opc = insn >> 26;
767
    ra = (insn >> 21) & 0x1F;
768
    rb = (insn >> 16) & 0x1F;
769
    rc = insn & 0x1F;
770
    sbz = (insn >> 13) & 0x07;
771
    real_islit = islit = (insn >> 12) & 1;
772
    if (rb == 31 && !islit) {
773
        islit = 1;
774
        lit = 0;
775
    } else
776
        lit = (insn >> 13) & 0xFF;
777
    palcode = insn & 0x03FFFFFF;
778
    disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
779
    disp16 = (int16_t)(insn & 0x0000FFFF);
780
    disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
781
    fn16 = insn & 0x0000FFFF;
782
    fn11 = (insn >> 5) & 0x000007FF;
783
    fpfn = fn11 & 0x3F;
784
    fn7 = (insn >> 5) & 0x0000007F;
785
    fn2 = (insn >> 5) & 0x00000003;
786
    ret = 0;
787
    LOG_DISAS("opc %02x ra %2d rb %2d rc %2d disp16 %6d\n",
788
              opc, ra, rb, rc, disp16);
789

    
790
    switch (opc) {
791
    case 0x00:
792
        /* CALL_PAL */
793
#ifdef CONFIG_USER_ONLY
794
        if (palcode == 0x9E) {
795
            /* RDUNIQUE */
796
            tcg_gen_mov_i64(cpu_ir[IR_V0], cpu_uniq);
797
            break;
798
        } else if (palcode == 0x9F) {
799
            /* WRUNIQUE */
800
            tcg_gen_mov_i64(cpu_uniq, cpu_ir[IR_A0]);
801
            break;
802
        }
803
#endif
804
        if (palcode >= 0x80 && palcode < 0xC0) {
805
            /* Unprivileged PAL call */
806
            gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x3F) << 6), 0);
807
            ret = 3;
808
            break;
809
        }
810
#ifndef CONFIG_USER_ONLY
811
        if (palcode < 0x40) {
812
            /* Privileged PAL code */
813
            if (ctx->mem_idx & 1)
814
                goto invalid_opc;
815
            gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x3F) << 6), 0);
816
            ret = 3;
817
        }
818
#endif
819
        /* Invalid PAL call */
820
        goto invalid_opc;
821
    case 0x01:
822
        /* OPC01 */
823
        goto invalid_opc;
824
    case 0x02:
825
        /* OPC02 */
826
        goto invalid_opc;
827
    case 0x03:
828
        /* OPC03 */
829
        goto invalid_opc;
830
    case 0x04:
831
        /* OPC04 */
832
        goto invalid_opc;
833
    case 0x05:
834
        /* OPC05 */
835
        goto invalid_opc;
836
    case 0x06:
837
        /* OPC06 */
838
        goto invalid_opc;
839
    case 0x07:
840
        /* OPC07 */
841
        goto invalid_opc;
842
    case 0x08:
843
        /* LDA */
844
        if (likely(ra != 31)) {
845
            if (rb != 31)
846
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
847
            else
848
                tcg_gen_movi_i64(cpu_ir[ra], disp16);
849
        }
850
        break;
851
    case 0x09:
852
        /* LDAH */
853
        if (likely(ra != 31)) {
854
            if (rb != 31)
855
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
856
            else
857
                tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
858
        }
859
        break;
860
    case 0x0A:
861
        /* LDBU */
862
        if (!(ctx->amask & AMASK_BWX))
863
            goto invalid_opc;
864
        gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
865
        break;
866
    case 0x0B:
867
        /* LDQ_U */
868
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
869
        break;
870
    case 0x0C:
871
        /* LDWU */
872
        if (!(ctx->amask & AMASK_BWX))
873
            goto invalid_opc;
874
        gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
875
        break;
876
    case 0x0D:
877
        /* STW */
878
        gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
879
        break;
880
    case 0x0E:
881
        /* STB */
882
        gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
883
        break;
884
    case 0x0F:
885
        /* STQ_U */
886
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
887
        break;
888
    case 0x10:
889
        switch (fn7) {
890
        case 0x00:
891
            /* ADDL */
892
            if (likely(rc != 31)) {
893
                if (ra != 31) {
894
                    if (islit) {
895
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
896
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
897
                    } else {
898
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
899
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
900
                    }
901
                } else {
902
                    if (islit)
903
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
904
                    else
905
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
906
                }
907
            }
908
            break;
909
        case 0x02:
910
            /* S4ADDL */
911
            if (likely(rc != 31)) {
912
                if (ra != 31) {
913
                    TCGv tmp = tcg_temp_new();
914
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
915
                    if (islit)
916
                        tcg_gen_addi_i64(tmp, tmp, lit);
917
                    else
918
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
919
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
920
                    tcg_temp_free(tmp);
921
                } else {
922
                    if (islit)
923
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
924
                    else
925
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
926
                }
927
            }
928
            break;
929
        case 0x09:
930
            /* SUBL */
931
            if (likely(rc != 31)) {
932
                if (ra != 31) {
933
                    if (islit)
934
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
935
                    else
936
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
937
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
938
                } else {
939
                    if (islit)
940
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
941
                    else {
942
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
943
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
944
                }
945
            }
946
            break;
947
        case 0x0B:
948
            /* S4SUBL */
949
            if (likely(rc != 31)) {
950
                if (ra != 31) {
951
                    TCGv tmp = tcg_temp_new();
952
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
953
                    if (islit)
954
                        tcg_gen_subi_i64(tmp, tmp, lit);
955
                    else
956
                        tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
957
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
958
                    tcg_temp_free(tmp);
959
                } else {
960
                    if (islit)
961
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
962
                    else {
963
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
964
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
965
                    }
966
                }
967
            }
968
            break;
969
        case 0x0F:
970
            /* CMPBGE */
971
            gen_cmpbge(ra, rb, rc, islit, lit);
972
            break;
973
        case 0x12:
974
            /* S8ADDL */
975
            if (likely(rc != 31)) {
976
                if (ra != 31) {
977
                    TCGv tmp = tcg_temp_new();
978
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
979
                    if (islit)
980
                        tcg_gen_addi_i64(tmp, tmp, lit);
981
                    else
982
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
983
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
984
                    tcg_temp_free(tmp);
985
                } else {
986
                    if (islit)
987
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
988
                    else
989
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
990
                }
991
            }
992
            break;
993
        case 0x1B:
994
            /* S8SUBL */
995
            if (likely(rc != 31)) {
996
                if (ra != 31) {
997
                    TCGv tmp = tcg_temp_new();
998
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
999
                    if (islit)
1000
                        tcg_gen_subi_i64(tmp, tmp, lit);
1001
                    else
1002
                       tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
1003
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1004
                    tcg_temp_free(tmp);
1005
                } else {
1006
                    if (islit)
1007
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1008
                    else
1009
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1010
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1011
                    }
1012
                }
1013
            }
1014
            break;
1015
        case 0x1D:
1016
            /* CMPULT */
1017
            gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
1018
            break;
1019
        case 0x20:
1020
            /* ADDQ */
1021
            if (likely(rc != 31)) {
1022
                if (ra != 31) {
1023
                    if (islit)
1024
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1025
                    else
1026
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1027
                } else {
1028
                    if (islit)
1029
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1030
                    else
1031
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1032
                }
1033
            }
1034
            break;
1035
        case 0x22:
1036
            /* S4ADDQ */
1037
            if (likely(rc != 31)) {
1038
                if (ra != 31) {
1039
                    TCGv tmp = tcg_temp_new();
1040
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1041
                    if (islit)
1042
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
1043
                    else
1044
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1045
                    tcg_temp_free(tmp);
1046
                } else {
1047
                    if (islit)
1048
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1049
                    else
1050
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1051
                }
1052
            }
1053
            break;
1054
        case 0x29:
1055
            /* SUBQ */
1056
            if (likely(rc != 31)) {
1057
                if (ra != 31) {
1058
                    if (islit)
1059
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1060
                    else
1061
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1062
                } else {
1063
                    if (islit)
1064
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1065
                    else
1066
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1067
                }
1068
            }
1069
            break;
1070
        case 0x2B:
1071
            /* S4SUBQ */
1072
            if (likely(rc != 31)) {
1073
                if (ra != 31) {
1074
                    TCGv tmp = tcg_temp_new();
1075
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1076
                    if (islit)
1077
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1078
                    else
1079
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1080
                    tcg_temp_free(tmp);
1081
                } else {
1082
                    if (islit)
1083
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1084
                    else
1085
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1086
                }
1087
            }
1088
            break;
1089
        case 0x2D:
1090
            /* CMPEQ */
1091
            gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
1092
            break;
1093
        case 0x32:
1094
            /* S8ADDQ */
1095
            if (likely(rc != 31)) {
1096
                if (ra != 31) {
1097
                    TCGv tmp = tcg_temp_new();
1098
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1099
                    if (islit)
1100
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
1101
                    else
1102
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1103
                    tcg_temp_free(tmp);
1104
                } else {
1105
                    if (islit)
1106
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1107
                    else
1108
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1109
                }
1110
            }
1111
            break;
1112
        case 0x3B:
1113
            /* S8SUBQ */
1114
            if (likely(rc != 31)) {
1115
                if (ra != 31) {
1116
                    TCGv tmp = tcg_temp_new();
1117
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1118
                    if (islit)
1119
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1120
                    else
1121
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1122
                    tcg_temp_free(tmp);
1123
                } else {
1124
                    if (islit)
1125
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1126
                    else
1127
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1128
                }
1129
            }
1130
            break;
1131
        case 0x3D:
1132
            /* CMPULE */
1133
            gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1134
            break;
1135
        case 0x40:
1136
            /* ADDL/V */
1137
            gen_addlv(ra, rb, rc, islit, lit);
1138
            break;
1139
        case 0x49:
1140
            /* SUBL/V */
1141
            gen_sublv(ra, rb, rc, islit, lit);
1142
            break;
1143
        case 0x4D:
1144
            /* CMPLT */
1145
            gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1146
            break;
1147
        case 0x60:
1148
            /* ADDQ/V */
1149
            gen_addqv(ra, rb, rc, islit, lit);
1150
            break;
1151
        case 0x69:
1152
            /* SUBQ/V */
1153
            gen_subqv(ra, rb, rc, islit, lit);
1154
            break;
1155
        case 0x6D:
1156
            /* CMPLE */
1157
            gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1158
            break;
1159
        default:
1160
            goto invalid_opc;
1161
        }
1162
        break;
1163
    case 0x11:
1164
        switch (fn7) {
1165
        case 0x00:
1166
            /* AND */
1167
            if (likely(rc != 31)) {
1168
                if (ra == 31)
1169
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1170
                else if (islit)
1171
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1172
                else
1173
                    tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1174
            }
1175
            break;
1176
        case 0x08:
1177
            /* BIC */
1178
            if (likely(rc != 31)) {
1179
                if (ra != 31) {
1180
                    if (islit)
1181
                        tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1182
                    else
1183
                        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1184
                } else
1185
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1186
            }
1187
            break;
1188
        case 0x14:
1189
            /* CMOVLBS */
1190
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1191
            break;
1192
        case 0x16:
1193
            /* CMOVLBC */
1194
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1195
            break;
1196
        case 0x20:
1197
            /* BIS */
1198
            if (likely(rc != 31)) {
1199
                if (ra != 31) {
1200
                    if (islit)
1201
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1202
                    else
1203
                        tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1204
                } else {
1205
                    if (islit)
1206
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1207
                    else
1208
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1209
                }
1210
            }
1211
            break;
1212
        case 0x24:
1213
            /* CMOVEQ */
1214
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1215
            break;
1216
        case 0x26:
1217
            /* CMOVNE */
1218
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1219
            break;
1220
        case 0x28:
1221
            /* ORNOT */
1222
            if (likely(rc != 31)) {
1223
                if (ra != 31) {
1224
                    if (islit)
1225
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1226
                    else
1227
                        tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1228
                } else {
1229
                    if (islit)
1230
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1231
                    else
1232
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1233
                }
1234
            }
1235
            break;
1236
        case 0x40:
1237
            /* XOR */
1238
            if (likely(rc != 31)) {
1239
                if (ra != 31) {
1240
                    if (islit)
1241
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1242
                    else
1243
                        tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1244
                } else {
1245
                    if (islit)
1246
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1247
                    else
1248
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1249
                }
1250
            }
1251
            break;
1252
        case 0x44:
1253
            /* CMOVLT */
1254
            gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1255
            break;
1256
        case 0x46:
1257
            /* CMOVGE */
1258
            gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1259
            break;
1260
        case 0x48:
1261
            /* EQV */
1262
            if (likely(rc != 31)) {
1263
                if (ra != 31) {
1264
                    if (islit)
1265
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1266
                    else
1267
                        tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1268
                } else {
1269
                    if (islit)
1270
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1271
                    else
1272
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1273
                }
1274
            }
1275
            break;
1276
        case 0x61:
1277
            /* AMASK */
1278
            if (likely(rc != 31)) {
1279
                if (islit)
1280
                    tcg_gen_movi_i64(cpu_ir[rc], lit);
1281
                else
1282
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1283
                switch (ctx->env->implver) {
1284
                case IMPLVER_2106x:
1285
                    /* EV4, EV45, LCA, LCA45 & EV5 */
1286
                    break;
1287
                case IMPLVER_21164:
1288
                case IMPLVER_21264:
1289
                case IMPLVER_21364:
1290
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[rc],
1291
                                     ~(uint64_t)ctx->amask);
1292
                    break;
1293
                }
1294
            }
1295
            break;
1296
        case 0x64:
1297
            /* CMOVLE */
1298
            gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1299
            break;
1300
        case 0x66:
1301
            /* CMOVGT */
1302
            gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1303
            break;
1304
        case 0x6C:
1305
            /* IMPLVER */
1306
            if (rc != 31)
1307
                tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
1308
            break;
1309
        default:
1310
            goto invalid_opc;
1311
        }
1312
        break;
1313
    case 0x12:
1314
        switch (fn7) {
1315
        case 0x02:
1316
            /* MSKBL */
1317
            gen_mskbl(ra, rb, rc, islit, lit);
1318
            break;
1319
        case 0x06:
1320
            /* EXTBL */
1321
            gen_ext_l(ra, rb, rc, islit, lit, 0x01);
1322
            break;
1323
        case 0x0B:
1324
            /* INSBL */
1325
            gen_ins_l(ra, rb, rc, islit, lit, 0x01);
1326
            break;
1327
        case 0x12:
1328
            /* MSKWL */
1329
            gen_mskwl(ra, rb, rc, islit, lit);
1330
            break;
1331
        case 0x16:
1332
            /* EXTWL */
1333
            gen_ext_l(ra, rb, rc, islit, lit, 0x03);
1334
            break;
1335
        case 0x1B:
1336
            /* INSWL */
1337
            gen_ins_l(ra, rb, rc, islit, lit, 0x03);
1338
            break;
1339
        case 0x22:
1340
            /* MSKLL */
1341
            gen_mskll(ra, rb, rc, islit, lit);
1342
            break;
1343
        case 0x26:
1344
            /* EXTLL */
1345
            gen_ext_l(ra, rb, rc, islit, lit, 0x0f);
1346
            break;
1347
        case 0x2B:
1348
            /* INSLL */
1349
            gen_ins_l(ra, rb, rc, islit, lit, 0x0f);
1350
            break;
1351
        case 0x30:
1352
            /* ZAP */
1353
            gen_zap(ra, rb, rc, islit, lit);
1354
            break;
1355
        case 0x31:
1356
            /* ZAPNOT */
1357
            gen_zapnot(ra, rb, rc, islit, lit);
1358
            break;
1359
        case 0x32:
1360
            /* MSKQL */
1361
            gen_mskql(ra, rb, rc, islit, lit);
1362
            break;
1363
        case 0x34:
1364
            /* SRL */
1365
            if (likely(rc != 31)) {
1366
                if (ra != 31) {
1367
                    if (islit)
1368
                        tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1369
                    else {
1370
                        TCGv shift = tcg_temp_new();
1371
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1372
                        tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1373
                        tcg_temp_free(shift);
1374
                    }
1375
                } else
1376
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1377
            }
1378
            break;
1379
        case 0x36:
1380
            /* EXTQL */
1381
            gen_ext_l(ra, rb, rc, islit, lit, 0xff);
1382
            break;
1383
        case 0x39:
1384
            /* SLL */
1385
            if (likely(rc != 31)) {
1386
                if (ra != 31) {
1387
                    if (islit)
1388
                        tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1389
                    else {
1390
                        TCGv shift = tcg_temp_new();
1391
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1392
                        tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1393
                        tcg_temp_free(shift);
1394
                    }
1395
                } else
1396
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1397
            }
1398
            break;
1399
        case 0x3B:
1400
            /* INSQL */
1401
            gen_ins_l(ra, rb, rc, islit, lit, 0xff);
1402
            break;
1403
        case 0x3C:
1404
            /* SRA */
1405
            if (likely(rc != 31)) {
1406
                if (ra != 31) {
1407
                    if (islit)
1408
                        tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1409
                    else {
1410
                        TCGv shift = tcg_temp_new();
1411
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1412
                        tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1413
                        tcg_temp_free(shift);
1414
                    }
1415
                } else
1416
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1417
            }
1418
            break;
1419
        case 0x52:
1420
            /* MSKWH */
1421
            gen_mskwh(ra, rb, rc, islit, lit);
1422
            break;
1423
        case 0x57:
1424
            /* INSWH */
1425
            gen_inswh(ra, rb, rc, islit, lit);
1426
            break;
1427
        case 0x5A:
1428
            /* EXTWH */
1429
            gen_ext_h(ra, rb, rc, islit, lit, 0x03);
1430
            break;
1431
        case 0x62:
1432
            /* MSKLH */
1433
            gen_msklh(ra, rb, rc, islit, lit);
1434
            break;
1435
        case 0x67:
1436
            /* INSLH */
1437
            gen_inslh(ra, rb, rc, islit, lit);
1438
            break;
1439
        case 0x6A:
1440
            /* EXTLH */
1441
            gen_ext_h(ra, rb, rc, islit, lit, 0x0f);
1442
            break;
1443
        case 0x72:
1444
            /* MSKQH */
1445
            gen_mskqh(ra, rb, rc, islit, lit);
1446
            break;
1447
        case 0x77:
1448
            /* INSQH */
1449
            gen_insqh(ra, rb, rc, islit, lit);
1450
            break;
1451
        case 0x7A:
1452
            /* EXTQH */
1453
            gen_ext_h(ra, rb, rc, islit, lit, 0xff);
1454
            break;
1455
        default:
1456
            goto invalid_opc;
1457
        }
1458
        break;
1459
    case 0x13:
1460
        switch (fn7) {
1461
        case 0x00:
1462
            /* MULL */
1463
            if (likely(rc != 31)) {
1464
                if (ra == 31)
1465
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1466
                else {
1467
                    if (islit)
1468
                        tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1469
                    else
1470
                        tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1471
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1472
                }
1473
            }
1474
            break;
1475
        case 0x20:
1476
            /* MULQ */
1477
            if (likely(rc != 31)) {
1478
                if (ra == 31)
1479
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1480
                else if (islit)
1481
                    tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1482
                else
1483
                    tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1484
            }
1485
            break;
1486
        case 0x30:
1487
            /* UMULH */
1488
            gen_umulh(ra, rb, rc, islit, lit);
1489
            break;
1490
        case 0x40:
1491
            /* MULL/V */
1492
            gen_mullv(ra, rb, rc, islit, lit);
1493
            break;
1494
        case 0x60:
1495
            /* MULQ/V */
1496
            gen_mulqv(ra, rb, rc, islit, lit);
1497
            break;
1498
        default:
1499
            goto invalid_opc;
1500
        }
1501
        break;
1502
    case 0x14:
1503
        switch (fpfn) { /* f11 & 0x3F */
1504
        case 0x04:
1505
            /* ITOFS */
1506
            if (!(ctx->amask & AMASK_FIX))
1507
                goto invalid_opc;
1508
            if (likely(rc != 31)) {
1509
                if (ra != 31) {
1510
                    TCGv_i32 tmp = tcg_temp_new_i32();
1511
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1512
                    gen_helper_memory_to_s(cpu_fir[rc], tmp);
1513
                    tcg_temp_free_i32(tmp);
1514
                } else
1515
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1516
            }
1517
            break;
1518
        case 0x0A:
1519
            /* SQRTF */
1520
            if (!(ctx->amask & AMASK_FIX))
1521
                goto invalid_opc;
1522
            gen_fsqrtf(rb, rc);
1523
            break;
1524
        case 0x0B:
1525
            /* SQRTS */
1526
            if (!(ctx->amask & AMASK_FIX))
1527
                goto invalid_opc;
1528
            gen_fsqrts(rb, rc);
1529
            break;
1530
        case 0x14:
1531
            /* ITOFF */
1532
            if (!(ctx->amask & AMASK_FIX))
1533
                goto invalid_opc;
1534
            if (likely(rc != 31)) {
1535
                if (ra != 31) {
1536
                    TCGv_i32 tmp = tcg_temp_new_i32();
1537
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1538
                    gen_helper_memory_to_f(cpu_fir[rc], tmp);
1539
                    tcg_temp_free_i32(tmp);
1540
                } else
1541
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1542
            }
1543
            break;
1544
        case 0x24:
1545
            /* ITOFT */
1546
            if (!(ctx->amask & AMASK_FIX))
1547
                goto invalid_opc;
1548
            if (likely(rc != 31)) {
1549
                if (ra != 31)
1550
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1551
                else
1552
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1553
            }
1554
            break;
1555
        case 0x2A:
1556
            /* SQRTG */
1557
            if (!(ctx->amask & AMASK_FIX))
1558
                goto invalid_opc;
1559
            gen_fsqrtg(rb, rc);
1560
            break;
1561
        case 0x02B:
1562
            /* SQRTT */
1563
            if (!(ctx->amask & AMASK_FIX))
1564
                goto invalid_opc;
1565
            gen_fsqrtt(rb, rc);
1566
            break;
1567
        default:
1568
            goto invalid_opc;
1569
        }
1570
        break;
1571
    case 0x15:
1572
        /* VAX floating point */
1573
        /* XXX: rounding mode and trap are ignored (!) */
1574
        switch (fpfn) { /* f11 & 0x3F */
1575
        case 0x00:
1576
            /* ADDF */
1577
            gen_faddf(ra, rb, rc);
1578
            break;
1579
        case 0x01:
1580
            /* SUBF */
1581
            gen_fsubf(ra, rb, rc);
1582
            break;
1583
        case 0x02:
1584
            /* MULF */
1585
            gen_fmulf(ra, rb, rc);
1586
            break;
1587
        case 0x03:
1588
            /* DIVF */
1589
            gen_fdivf(ra, rb, rc);
1590
            break;
1591
        case 0x1E:
1592
            /* CVTDG */
1593
#if 0 // TODO
1594
            gen_fcvtdg(rb, rc);
1595
#else
1596
            goto invalid_opc;
1597
#endif
1598
            break;
1599
        case 0x20:
1600
            /* ADDG */
1601
            gen_faddg(ra, rb, rc);
1602
            break;
1603
        case 0x21:
1604
            /* SUBG */
1605
            gen_fsubg(ra, rb, rc);
1606
            break;
1607
        case 0x22:
1608
            /* MULG */
1609
            gen_fmulg(ra, rb, rc);
1610
            break;
1611
        case 0x23:
1612
            /* DIVG */
1613
            gen_fdivg(ra, rb, rc);
1614
            break;
1615
        case 0x25:
1616
            /* CMPGEQ */
1617
            gen_fcmpgeq(ra, rb, rc);
1618
            break;
1619
        case 0x26:
1620
            /* CMPGLT */
1621
            gen_fcmpglt(ra, rb, rc);
1622
            break;
1623
        case 0x27:
1624
            /* CMPGLE */
1625
            gen_fcmpgle(ra, rb, rc);
1626
            break;
1627
        case 0x2C:
1628
            /* CVTGF */
1629
            gen_fcvtgf(rb, rc);
1630
            break;
1631
        case 0x2D:
1632
            /* CVTGD */
1633
#if 0 // TODO
1634
            gen_fcvtgd(rb, rc);
1635
#else
1636
            goto invalid_opc;
1637
#endif
1638
            break;
1639
        case 0x2F:
1640
            /* CVTGQ */
1641
            gen_fcvtgq(rb, rc);
1642
            break;
1643
        case 0x3C:
1644
            /* CVTQF */
1645
            gen_fcvtqf(rb, rc);
1646
            break;
1647
        case 0x3E:
1648
            /* CVTQG */
1649
            gen_fcvtqg(rb, rc);
1650
            break;
1651
        default:
1652
            goto invalid_opc;
1653
        }
1654
        break;
1655
    case 0x16:
1656
        /* IEEE floating-point */
1657
        /* XXX: rounding mode and traps are ignored (!) */
1658
        switch (fpfn) { /* f11 & 0x3F */
1659
        case 0x00:
1660
            /* ADDS */
1661
            gen_fadds(ra, rb, rc);
1662
            break;
1663
        case 0x01:
1664
            /* SUBS */
1665
            gen_fsubs(ra, rb, rc);
1666
            break;
1667
        case 0x02:
1668
            /* MULS */
1669
            gen_fmuls(ra, rb, rc);
1670
            break;
1671
        case 0x03:
1672
            /* DIVS */
1673
            gen_fdivs(ra, rb, rc);
1674
            break;
1675
        case 0x20:
1676
            /* ADDT */
1677
            gen_faddt(ra, rb, rc);
1678
            break;
1679
        case 0x21:
1680
            /* SUBT */
1681
            gen_fsubt(ra, rb, rc);
1682
            break;
1683
        case 0x22:
1684
            /* MULT */
1685
            gen_fmult(ra, rb, rc);
1686
            break;
1687
        case 0x23:
1688
            /* DIVT */
1689
            gen_fdivt(ra, rb, rc);
1690
            break;
1691
        case 0x24:
1692
            /* CMPTUN */
1693
            gen_fcmptun(ra, rb, rc);
1694
            break;
1695
        case 0x25:
1696
            /* CMPTEQ */
1697
            gen_fcmpteq(ra, rb, rc);
1698
            break;
1699
        case 0x26:
1700
            /* CMPTLT */
1701
            gen_fcmptlt(ra, rb, rc);
1702
            break;
1703
        case 0x27:
1704
            /* CMPTLE */
1705
            gen_fcmptle(ra, rb, rc);
1706
            break;
1707
        case 0x2C:
1708
            /* XXX: incorrect */
1709
            if (fn11 == 0x2AC || fn11 == 0x6AC) {
1710
                /* CVTST */
1711
                gen_fcvtst(rb, rc);
1712
            } else {
1713
                /* CVTTS */
1714
                gen_fcvtts(rb, rc);
1715
            }
1716
            break;
1717
        case 0x2F:
1718
            /* CVTTQ */
1719
            gen_fcvttq(rb, rc);
1720
            break;
1721
        case 0x3C:
1722
            /* CVTQS */
1723
            gen_fcvtqs(rb, rc);
1724
            break;
1725
        case 0x3E:
1726
            /* CVTQT */
1727
            gen_fcvtqt(rb, rc);
1728
            break;
1729
        default:
1730
            goto invalid_opc;
1731
        }
1732
        break;
1733
    case 0x17:
1734
        switch (fn11) {
1735
        case 0x010:
1736
            /* CVTLQ */
1737
            gen_fcvtlq(rb, rc);
1738
            break;
1739
        case 0x020:
1740
            if (likely(rc != 31)) {
1741
                if (ra == rb)
1742
                    /* FMOV */
1743
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1744
                else
1745
                    /* CPYS */
1746
                    gen_fcpys(ra, rb, rc);
1747
            }
1748
            break;
1749
        case 0x021:
1750
            /* CPYSN */
1751
            gen_fcpysn(ra, rb, rc);
1752
            break;
1753
        case 0x022:
1754
            /* CPYSE */
1755
            gen_fcpyse(ra, rb, rc);
1756
            break;
1757
        case 0x024:
1758
            /* MT_FPCR */
1759
            if (likely(ra != 31))
1760
                gen_helper_store_fpcr(cpu_fir[ra]);
1761
            else {
1762
                TCGv tmp = tcg_const_i64(0);
1763
                gen_helper_store_fpcr(tmp);
1764
                tcg_temp_free(tmp);
1765
            }
1766
            break;
1767
        case 0x025:
1768
            /* MF_FPCR */
1769
            if (likely(ra != 31))
1770
                gen_helper_load_fpcr(cpu_fir[ra]);
1771
            break;
1772
        case 0x02A:
1773
            /* FCMOVEQ */
1774
            gen_fcmpfeq(ra, rb, rc);
1775
            break;
1776
        case 0x02B:
1777
            /* FCMOVNE */
1778
            gen_fcmpfne(ra, rb, rc);
1779
            break;
1780
        case 0x02C:
1781
            /* FCMOVLT */
1782
            gen_fcmpflt(ra, rb, rc);
1783
            break;
1784
        case 0x02D:
1785
            /* FCMOVGE */
1786
            gen_fcmpfge(ra, rb, rc);
1787
            break;
1788
        case 0x02E:
1789
            /* FCMOVLE */
1790
            gen_fcmpfle(ra, rb, rc);
1791
            break;
1792
        case 0x02F:
1793
            /* FCMOVGT */
1794
            gen_fcmpfgt(ra, rb, rc);
1795
            break;
1796
        case 0x030:
1797
            /* CVTQL */
1798
            gen_fcvtql(rb, rc);
1799
            break;
1800
        case 0x130:
1801
            /* CVTQL/V */
1802
            gen_fcvtqlv(rb, rc);
1803
            break;
1804
        case 0x530:
1805
            /* CVTQL/SV */
1806
            gen_fcvtqlsv(rb, rc);
1807
            break;
1808
        default:
1809
            goto invalid_opc;
1810
        }
1811
        break;
1812
    case 0x18:
1813
        switch ((uint16_t)disp16) {
1814
        case 0x0000:
1815
            /* TRAPB */
1816
            /* No-op. Just exit from the current tb */
1817
            ret = 2;
1818
            break;
1819
        case 0x0400:
1820
            /* EXCB */
1821
            /* No-op. Just exit from the current tb */
1822
            ret = 2;
1823
            break;
1824
        case 0x4000:
1825
            /* MB */
1826
            /* No-op */
1827
            break;
1828
        case 0x4400:
1829
            /* WMB */
1830
            /* No-op */
1831
            break;
1832
        case 0x8000:
1833
            /* FETCH */
1834
            /* No-op */
1835
            break;
1836
        case 0xA000:
1837
            /* FETCH_M */
1838
            /* No-op */
1839
            break;
1840
        case 0xC000:
1841
            /* RPCC */
1842
            if (ra != 31)
1843
                gen_helper_load_pcc(cpu_ir[ra]);
1844
            break;
1845
        case 0xE000:
1846
            /* RC */
1847
            if (ra != 31)
1848
                gen_helper_rc(cpu_ir[ra]);
1849
            break;
1850
        case 0xE800:
1851
            /* ECB */
1852
            break;
1853
        case 0xF000:
1854
            /* RS */
1855
            if (ra != 31)
1856
                gen_helper_rs(cpu_ir[ra]);
1857
            break;
1858
        case 0xF800:
1859
            /* WH64 */
1860
            /* No-op */
1861
            break;
1862
        default:
1863
            goto invalid_opc;
1864
        }
1865
        break;
1866
    case 0x19:
1867
        /* HW_MFPR (PALcode) */
1868
#if defined (CONFIG_USER_ONLY)
1869
        goto invalid_opc;
1870
#else
1871
        if (!ctx->pal_mode)
1872
            goto invalid_opc;
1873
        if (ra != 31) {
1874
            TCGv tmp = tcg_const_i32(insn & 0xFF);
1875
            gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1876
            tcg_temp_free(tmp);
1877
        }
1878
        break;
1879
#endif
1880
    case 0x1A:
1881
        if (rb != 31)
1882
            tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1883
        else
1884
            tcg_gen_movi_i64(cpu_pc, 0);
1885
        if (ra != 31)
1886
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1887
        /* Those four jumps only differ by the branch prediction hint */
1888
        switch (fn2) {
1889
        case 0x0:
1890
            /* JMP */
1891
            break;
1892
        case 0x1:
1893
            /* JSR */
1894
            break;
1895
        case 0x2:
1896
            /* RET */
1897
            break;
1898
        case 0x3:
1899
            /* JSR_COROUTINE */
1900
            break;
1901
        }
1902
        ret = 1;
1903
        break;
1904
    case 0x1B:
1905
        /* HW_LD (PALcode) */
1906
#if defined (CONFIG_USER_ONLY)
1907
        goto invalid_opc;
1908
#else
1909
        if (!ctx->pal_mode)
1910
            goto invalid_opc;
1911
        if (ra != 31) {
1912
            TCGv addr = tcg_temp_new();
1913
            if (rb != 31)
1914
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1915
            else
1916
                tcg_gen_movi_i64(addr, disp12);
1917
            switch ((insn >> 12) & 0xF) {
1918
            case 0x0:
1919
                /* Longword physical access (hw_ldl/p) */
1920
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1921
                break;
1922
            case 0x1:
1923
                /* Quadword physical access (hw_ldq/p) */
1924
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1925
                break;
1926
            case 0x2:
1927
                /* Longword physical access with lock (hw_ldl_l/p) */
1928
                gen_helper_ldl_l_raw(cpu_ir[ra], addr);
1929
                break;
1930
            case 0x3:
1931
                /* Quadword physical access with lock (hw_ldq_l/p) */
1932
                gen_helper_ldq_l_raw(cpu_ir[ra], addr);
1933
                break;
1934
            case 0x4:
1935
                /* Longword virtual PTE fetch (hw_ldl/v) */
1936
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1937
                break;
1938
            case 0x5:
1939
                /* Quadword virtual PTE fetch (hw_ldq/v) */
1940
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1941
                break;
1942
            case 0x6:
1943
                /* Incpu_ir[ra]id */
1944
                goto invalid_opc;
1945
            case 0x7:
1946
                /* Incpu_ir[ra]id */
1947
                goto invalid_opc;
1948
            case 0x8:
1949
                /* Longword virtual access (hw_ldl) */
1950
                gen_helper_st_virt_to_phys(addr, addr);
1951
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1952
                break;
1953
            case 0x9:
1954
                /* Quadword virtual access (hw_ldq) */
1955
                gen_helper_st_virt_to_phys(addr, addr);
1956
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1957
                break;
1958
            case 0xA:
1959
                /* Longword virtual access with protection check (hw_ldl/w) */
1960
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1961
                break;
1962
            case 0xB:
1963
                /* Quadword virtual access with protection check (hw_ldq/w) */
1964
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1965
                break;
1966
            case 0xC:
1967
                /* Longword virtual access with alt access mode (hw_ldl/a)*/
1968
                gen_helper_set_alt_mode();
1969
                gen_helper_st_virt_to_phys(addr, addr);
1970
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1971
                gen_helper_restore_mode();
1972
                break;
1973
            case 0xD:
1974
                /* Quadword virtual access with alt access mode (hw_ldq/a) */
1975
                gen_helper_set_alt_mode();
1976
                gen_helper_st_virt_to_phys(addr, addr);
1977
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1978
                gen_helper_restore_mode();
1979
                break;
1980
            case 0xE:
1981
                /* Longword virtual access with alternate access mode and
1982
                 * protection checks (hw_ldl/wa)
1983
                 */
1984
                gen_helper_set_alt_mode();
1985
                gen_helper_ldl_data(cpu_ir[ra], addr);
1986
                gen_helper_restore_mode();
1987
                break;
1988
            case 0xF:
1989
                /* Quadword virtual access with alternate access mode and
1990
                 * protection checks (hw_ldq/wa)
1991
                 */
1992
                gen_helper_set_alt_mode();
1993
                gen_helper_ldq_data(cpu_ir[ra], addr);
1994
                gen_helper_restore_mode();
1995
                break;
1996
            }
1997
            tcg_temp_free(addr);
1998
        }
1999
        break;
2000
#endif
2001
    case 0x1C:
2002
        switch (fn7) {
2003
        case 0x00:
2004
            /* SEXTB */
2005
            if (!(ctx->amask & AMASK_BWX))
2006
                goto invalid_opc;
2007
            if (likely(rc != 31)) {
2008
                if (islit)
2009
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
2010
                else
2011
                    tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
2012
            }
2013
            break;
2014
        case 0x01:
2015
            /* SEXTW */
2016
            if (!(ctx->amask & AMASK_BWX))
2017
                goto invalid_opc;
2018
            if (likely(rc != 31)) {
2019
                if (islit)
2020
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
2021
                else
2022
                    tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
2023
            }
2024
            break;
2025
        case 0x30:
2026
            /* CTPOP */
2027
            if (!(ctx->amask & AMASK_CIX))
2028
                goto invalid_opc;
2029
            if (likely(rc != 31)) {
2030
                if (islit)
2031
                    tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
2032
                else
2033
                    gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
2034
            }
2035
            break;
2036
        case 0x31:
2037
            /* PERR */
2038
            if (!(ctx->amask & AMASK_MVI))
2039
                goto invalid_opc;
2040
            gen_perr(ra, rb, rc, islit, lit);
2041
            break;
2042
        case 0x32:
2043
            /* CTLZ */
2044
            if (!(ctx->amask & AMASK_CIX))
2045
                goto invalid_opc;
2046
            if (likely(rc != 31)) {
2047
                if (islit)
2048
                    tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
2049
                else
2050
                    gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
2051
            }
2052
            break;
2053
        case 0x33:
2054
            /* CTTZ */
2055
            if (!(ctx->amask & AMASK_CIX))
2056
                goto invalid_opc;
2057
            if (likely(rc != 31)) {
2058
                if (islit)
2059
                    tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
2060
                else
2061
                    gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
2062
            }
2063
            break;
2064
        case 0x34:
2065
            /* UNPKBW */
2066
            if (!(ctx->amask & AMASK_MVI))
2067
                goto invalid_opc;
2068
            if (real_islit || ra != 31)
2069
                goto invalid_opc;
2070
            gen_unpkbw (rb, rc);
2071
            break;
2072
        case 0x35:
2073
            /* UNPKBL */
2074
            if (!(ctx->amask & AMASK_MVI))
2075
                goto invalid_opc;
2076
            if (real_islit || ra != 31)
2077
                goto invalid_opc;
2078
            gen_unpkbl (rb, rc);
2079
            break;
2080
        case 0x36:
2081
            /* PKWB */
2082
            if (!(ctx->amask & AMASK_MVI))
2083
                goto invalid_opc;
2084
            if (real_islit || ra != 31)
2085
                goto invalid_opc;
2086
            gen_pkwb (rb, rc);
2087
            break;
2088
        case 0x37:
2089
            /* PKLB */
2090
            if (!(ctx->amask & AMASK_MVI))
2091
                goto invalid_opc;
2092
            if (real_islit || ra != 31)
2093
                goto invalid_opc;
2094
            gen_pklb (rb, rc);
2095
            break;
2096
        case 0x38:
2097
            /* MINSB8 */
2098
            if (!(ctx->amask & AMASK_MVI))
2099
                goto invalid_opc;
2100
            gen_minsb8 (ra, rb, rc, islit, lit);
2101
            break;
2102
        case 0x39:
2103
            /* MINSW4 */
2104
            if (!(ctx->amask & AMASK_MVI))
2105
                goto invalid_opc;
2106
            gen_minsw4 (ra, rb, rc, islit, lit);
2107
            break;
2108
        case 0x3A:
2109
            /* MINUB8 */
2110
            if (!(ctx->amask & AMASK_MVI))
2111
                goto invalid_opc;
2112
            gen_minub8 (ra, rb, rc, islit, lit);
2113
            break;
2114
        case 0x3B:
2115
            /* MINUW4 */
2116
            if (!(ctx->amask & AMASK_MVI))
2117
                goto invalid_opc;
2118
            gen_minuw4 (ra, rb, rc, islit, lit);
2119
            break;
2120
        case 0x3C:
2121
            /* MAXUB8 */
2122
            if (!(ctx->amask & AMASK_MVI))
2123
                goto invalid_opc;
2124
            gen_maxub8 (ra, rb, rc, islit, lit);
2125
            break;
2126
        case 0x3D:
2127
            /* MAXUW4 */
2128
            if (!(ctx->amask & AMASK_MVI))
2129
                goto invalid_opc;
2130
            gen_maxuw4 (ra, rb, rc, islit, lit);
2131
            break;
2132
        case 0x3E:
2133
            /* MAXSB8 */
2134
            if (!(ctx->amask & AMASK_MVI))
2135
                goto invalid_opc;
2136
            gen_maxsb8 (ra, rb, rc, islit, lit);
2137
            break;
2138
        case 0x3F:
2139
            /* MAXSW4 */
2140
            if (!(ctx->amask & AMASK_MVI))
2141
                goto invalid_opc;
2142
            gen_maxsw4 (ra, rb, rc, islit, lit);
2143
            break;
2144
        case 0x70:
2145
            /* FTOIT */
2146
            if (!(ctx->amask & AMASK_FIX))
2147
                goto invalid_opc;
2148
            if (likely(rc != 31)) {
2149
                if (ra != 31)
2150
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2151
                else
2152
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2153
            }
2154
            break;
2155
        case 0x78:
2156
            /* FTOIS */
2157
            if (!(ctx->amask & AMASK_FIX))
2158
                goto invalid_opc;
2159
            if (rc != 31) {
2160
                TCGv_i32 tmp1 = tcg_temp_new_i32();
2161
                if (ra != 31)
2162
                    gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2163
                else {
2164
                    TCGv tmp2 = tcg_const_i64(0);
2165
                    gen_helper_s_to_memory(tmp1, tmp2);
2166
                    tcg_temp_free(tmp2);
2167
                }
2168
                tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2169
                tcg_temp_free_i32(tmp1);
2170
            }
2171
            break;
2172
        default:
2173
            goto invalid_opc;
2174
        }
2175
        break;
2176
    case 0x1D:
2177
        /* HW_MTPR (PALcode) */
2178
#if defined (CONFIG_USER_ONLY)
2179
        goto invalid_opc;
2180
#else
2181
        if (!ctx->pal_mode)
2182
            goto invalid_opc;
2183
        else {
2184
            TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2185
            if (ra != 31)
2186
                gen_helper_mtpr(tmp1, cpu_ir[ra]);
2187
            else {
2188
                TCGv tmp2 = tcg_const_i64(0);
2189
                gen_helper_mtpr(tmp1, tmp2);
2190
                tcg_temp_free(tmp2);
2191
            }
2192
            tcg_temp_free(tmp1);
2193
            ret = 2;
2194
        }
2195
        break;
2196
#endif
2197
    case 0x1E:
2198
        /* HW_REI (PALcode) */
2199
#if defined (CONFIG_USER_ONLY)
2200
        goto invalid_opc;
2201
#else
2202
        if (!ctx->pal_mode)
2203
            goto invalid_opc;
2204
        if (rb == 31) {
2205
            /* "Old" alpha */
2206
            gen_helper_hw_rei();
2207
        } else {
2208
            TCGv tmp;
2209

    
2210
            if (ra != 31) {
2211
                tmp = tcg_temp_new();
2212
                tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2213
            } else
2214
                tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2215
            gen_helper_hw_ret(tmp);
2216
            tcg_temp_free(tmp);
2217
        }
2218
        ret = 2;
2219
        break;
2220
#endif
2221
    case 0x1F:
2222
        /* HW_ST (PALcode) */
2223
#if defined (CONFIG_USER_ONLY)
2224
        goto invalid_opc;
2225
#else
2226
        if (!ctx->pal_mode)
2227
            goto invalid_opc;
2228
        else {
2229
            TCGv addr, val;
2230
            addr = tcg_temp_new();
2231
            if (rb != 31)
2232
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2233
            else
2234
                tcg_gen_movi_i64(addr, disp12);
2235
            if (ra != 31)
2236
                val = cpu_ir[ra];
2237
            else {
2238
                val = tcg_temp_new();
2239
                tcg_gen_movi_i64(val, 0);
2240
            }
2241
            switch ((insn >> 12) & 0xF) {
2242
            case 0x0:
2243
                /* Longword physical access */
2244
                gen_helper_stl_raw(val, addr);
2245
                break;
2246
            case 0x1:
2247
                /* Quadword physical access */
2248
                gen_helper_stq_raw(val, addr);
2249
                break;
2250
            case 0x2:
2251
                /* Longword physical access with lock */
2252
                gen_helper_stl_c_raw(val, val, addr);
2253
                break;
2254
            case 0x3:
2255
                /* Quadword physical access with lock */
2256
                gen_helper_stq_c_raw(val, val, addr);
2257
                break;
2258
            case 0x4:
2259
                /* Longword virtual access */
2260
                gen_helper_st_virt_to_phys(addr, addr);
2261
                gen_helper_stl_raw(val, addr);
2262
                break;
2263
            case 0x5:
2264
                /* Quadword virtual access */
2265
                gen_helper_st_virt_to_phys(addr, addr);
2266
                gen_helper_stq_raw(val, addr);
2267
                break;
2268
            case 0x6:
2269
                /* Invalid */
2270
                goto invalid_opc;
2271
            case 0x7:
2272
                /* Invalid */
2273
                goto invalid_opc;
2274
            case 0x8:
2275
                /* Invalid */
2276
                goto invalid_opc;
2277
            case 0x9:
2278
                /* Invalid */
2279
                goto invalid_opc;
2280
            case 0xA:
2281
                /* Invalid */
2282
                goto invalid_opc;
2283
            case 0xB:
2284
                /* Invalid */
2285
                goto invalid_opc;
2286
            case 0xC:
2287
                /* Longword virtual access with alternate access mode */
2288
                gen_helper_set_alt_mode();
2289
                gen_helper_st_virt_to_phys(addr, addr);
2290
                gen_helper_stl_raw(val, addr);
2291
                gen_helper_restore_mode();
2292
                break;
2293
            case 0xD:
2294
                /* Quadword virtual access with alternate access mode */
2295
                gen_helper_set_alt_mode();
2296
                gen_helper_st_virt_to_phys(addr, addr);
2297
                gen_helper_stl_raw(val, addr);
2298
                gen_helper_restore_mode();
2299
                break;
2300
            case 0xE:
2301
                /* Invalid */
2302
                goto invalid_opc;
2303
            case 0xF:
2304
                /* Invalid */
2305
                goto invalid_opc;
2306
            }
2307
            if (ra == 31)
2308
                tcg_temp_free(val);
2309
            tcg_temp_free(addr);
2310
        }
2311
        break;
2312
#endif
2313
    case 0x20:
2314
        /* LDF */
2315
        gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2316
        break;
2317
    case 0x21:
2318
        /* LDG */
2319
        gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2320
        break;
2321
    case 0x22:
2322
        /* LDS */
2323
        gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2324
        break;
2325
    case 0x23:
2326
        /* LDT */
2327
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2328
        break;
2329
    case 0x24:
2330
        /* STF */
2331
        gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2332
        break;
2333
    case 0x25:
2334
        /* STG */
2335
        gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2336
        break;
2337
    case 0x26:
2338
        /* STS */
2339
        gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2340
        break;
2341
    case 0x27:
2342
        /* STT */
2343
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2344
        break;
2345
    case 0x28:
2346
        /* LDL */
2347
        gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2348
        break;
2349
    case 0x29:
2350
        /* LDQ */
2351
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2352
        break;
2353
    case 0x2A:
2354
        /* LDL_L */
2355
        gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2356
        break;
2357
    case 0x2B:
2358
        /* LDQ_L */
2359
        gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2360
        break;
2361
    case 0x2C:
2362
        /* STL */
2363
        gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2364
        break;
2365
    case 0x2D:
2366
        /* STQ */
2367
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2368
        break;
2369
    case 0x2E:
2370
        /* STL_C */
2371
        gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2372
        break;
2373
    case 0x2F:
2374
        /* STQ_C */
2375
        gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2376
        break;
2377
    case 0x30:
2378
        /* BR */
2379
        if (ra != 31)
2380
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2381
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2382
        ret = 1;
2383
        break;
2384
    case 0x31: /* FBEQ */
2385
    case 0x32: /* FBLT */
2386
    case 0x33: /* FBLE */
2387
        gen_fbcond(ctx, opc, ra, disp21);
2388
        ret = 1;
2389
        break;
2390
    case 0x34:
2391
        /* BSR */
2392
        if (ra != 31)
2393
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2394
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2395
        ret = 1;
2396
        break;
2397
    case 0x35: /* FBNE */
2398
    case 0x36: /* FBGE */
2399
    case 0x37: /* FBGT */
2400
        gen_fbcond(ctx, opc, ra, disp21);
2401
        ret = 1;
2402
        break;
2403
    case 0x38:
2404
        /* BLBC */
2405
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2406
        ret = 1;
2407
        break;
2408
    case 0x39:
2409
        /* BEQ */
2410
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2411
        ret = 1;
2412
        break;
2413
    case 0x3A:
2414
        /* BLT */
2415
        gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2416
        ret = 1;
2417
        break;
2418
    case 0x3B:
2419
        /* BLE */
2420
        gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2421
        ret = 1;
2422
        break;
2423
    case 0x3C:
2424
        /* BLBS */
2425
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2426
        ret = 1;
2427
        break;
2428
    case 0x3D:
2429
        /* BNE */
2430
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2431
        ret = 1;
2432
        break;
2433
    case 0x3E:
2434
        /* BGE */
2435
        gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2436
        ret = 1;
2437
        break;
2438
    case 0x3F:
2439
        /* BGT */
2440
        gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2441
        ret = 1;
2442
        break;
2443
    invalid_opc:
2444
        gen_invalid(ctx);
2445
        ret = 3;
2446
        break;
2447
    }
2448

    
2449
    return ret;
2450
}
2451

    
2452
static inline void gen_intermediate_code_internal(CPUState *env,
2453
                                                  TranslationBlock *tb,
2454
                                                  int search_pc)
2455
{
2456
    DisasContext ctx, *ctxp = &ctx;
2457
    target_ulong pc_start;
2458
    uint32_t insn;
2459
    uint16_t *gen_opc_end;
2460
    CPUBreakpoint *bp;
2461
    int j, lj = -1;
2462
    int ret;
2463
    int num_insns;
2464
    int max_insns;
2465

    
2466
    pc_start = tb->pc;
2467
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2468
    ctx.pc = pc_start;
2469
    ctx.amask = env->amask;
2470
    ctx.env = env;
2471
#if defined (CONFIG_USER_ONLY)
2472
    ctx.mem_idx = 0;
2473
#else
2474
    ctx.mem_idx = ((env->ps >> 3) & 3);
2475
    ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2476
#endif
2477
    num_insns = 0;
2478
    max_insns = tb->cflags & CF_COUNT_MASK;
2479
    if (max_insns == 0)
2480
        max_insns = CF_COUNT_MASK;
2481

    
2482
    gen_icount_start();
2483
    for (ret = 0; ret == 0;) {
2484
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
2485
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
2486
                if (bp->pc == ctx.pc) {
2487
                    gen_excp(&ctx, EXCP_DEBUG, 0);
2488
                    break;
2489
                }
2490
            }
2491
        }
2492
        if (search_pc) {
2493
            j = gen_opc_ptr - gen_opc_buf;
2494
            if (lj < j) {
2495
                lj++;
2496
                while (lj < j)
2497
                    gen_opc_instr_start[lj++] = 0;
2498
            }
2499
            gen_opc_pc[lj] = ctx.pc;
2500
            gen_opc_instr_start[lj] = 1;
2501
            gen_opc_icount[lj] = num_insns;
2502
        }
2503
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2504
            gen_io_start();
2505
        insn = ldl_code(ctx.pc);
2506
        num_insns++;
2507
        ctx.pc += 4;
2508
        ret = translate_one(ctxp, insn);
2509
        if (ret != 0)
2510
            break;
2511
        /* if we reach a page boundary or are single stepping, stop
2512
         * generation
2513
         */
2514
        if (env->singlestep_enabled) {
2515
            gen_excp(&ctx, EXCP_DEBUG, 0);
2516
            break;
2517
        }
2518

    
2519
        if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2520
            break;
2521

    
2522
        if (gen_opc_ptr >= gen_opc_end)
2523
            break;
2524

    
2525
        if (num_insns >= max_insns)
2526
            break;
2527

    
2528
        if (singlestep) {
2529
            break;
2530
        }
2531
    }
2532
    if (ret != 1 && ret != 3) {
2533
        tcg_gen_movi_i64(cpu_pc, ctx.pc);
2534
    }
2535
    if (tb->cflags & CF_LAST_IO)
2536
        gen_io_end();
2537
    /* Generate the return instruction */
2538
    tcg_gen_exit_tb(0);
2539
    gen_icount_end(tb, num_insns);
2540
    *gen_opc_ptr = INDEX_op_end;
2541
    if (search_pc) {
2542
        j = gen_opc_ptr - gen_opc_buf;
2543
        lj++;
2544
        while (lj <= j)
2545
            gen_opc_instr_start[lj++] = 0;
2546
    } else {
2547
        tb->size = ctx.pc - pc_start;
2548
        tb->icount = num_insns;
2549
    }
2550
#ifdef DEBUG_DISAS
2551
    log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
2552
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2553
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
2554
        log_target_disas(pc_start, ctx.pc - pc_start, 1);
2555
        qemu_log("\n");
2556
    }
2557
#endif
2558
}
2559

    
2560
void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2561
{
2562
    gen_intermediate_code_internal(env, tb, 0);
2563
}
2564

    
2565
void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2566
{
2567
    gen_intermediate_code_internal(env, tb, 1);
2568
}
2569

    
2570
struct cpu_def_t {
2571
    const char *name;
2572
    int implver, amask;
2573
};
2574

    
2575
static const struct cpu_def_t cpu_defs[] = {
2576
    { "ev4",   IMPLVER_2106x, 0 },
2577
    { "ev5",   IMPLVER_21164, 0 },
2578
    { "ev56",  IMPLVER_21164, AMASK_BWX },
2579
    { "pca56", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
2580
    { "ev6",   IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
2581
    { "ev67",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2582
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
2583
    { "ev68",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2584
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
2585
    { "21064", IMPLVER_2106x, 0 },
2586
    { "21164", IMPLVER_21164, 0 },
2587
    { "21164a", IMPLVER_21164, AMASK_BWX },
2588
    { "21164pc", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
2589
    { "21264", IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
2590
    { "21264a", IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2591
                                | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), }
2592
};
2593

    
2594
CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2595
{
2596
    CPUAlphaState *env;
2597
    uint64_t hwpcb;
2598
    int implver, amask, i, max;
2599

    
2600
    env = qemu_mallocz(sizeof(CPUAlphaState));
2601
    cpu_exec_init(env);
2602
    alpha_translate_init();
2603
    tlb_flush(env, 1);
2604

    
2605
    /* Default to ev67; no reason not to emulate insns by default.  */
2606
    implver = IMPLVER_21264;
2607
    amask = (AMASK_BWX | AMASK_FIX | AMASK_CIX | AMASK_MVI
2608
             | AMASK_TRAP | AMASK_PREFETCH);
2609

    
2610
    max = ARRAY_SIZE(cpu_defs);
2611
    for (i = 0; i < max; i++) {
2612
        if (strcmp (cpu_model, cpu_defs[i].name) == 0) {
2613
            implver = cpu_defs[i].implver;
2614
            amask = cpu_defs[i].amask;
2615
            break;
2616
        }
2617
    }
2618
    env->implver = implver;
2619
    env->amask = amask;
2620

    
2621
    env->ps = 0x1F00;
2622
#if defined (CONFIG_USER_ONLY)
2623
    env->ps |= 1 << 3;
2624
#endif
2625
    pal_init(env);
2626
    /* Initialize IPR */
2627
    hwpcb = env->ipr[IPR_PCBB];
2628
    env->ipr[IPR_ASN] = 0;
2629
    env->ipr[IPR_ASTEN] = 0;
2630
    env->ipr[IPR_ASTSR] = 0;
2631
    env->ipr[IPR_DATFX] = 0;
2632
    /* XXX: fix this */
2633
    //    env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2634
    //    env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2635
    //    env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2636
    //    env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2637
    env->ipr[IPR_FEN] = 0;
2638
    env->ipr[IPR_IPL] = 31;
2639
    env->ipr[IPR_MCES] = 0;
2640
    env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2641
    //    env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2642
    env->ipr[IPR_SISR] = 0;
2643
    env->ipr[IPR_VIRBND] = -1ULL;
2644

    
2645
    qemu_init_vcpu(env);
2646
    return env;
2647
}
2648

    
2649
void gen_pc_load(CPUState *env, TranslationBlock *tb,
2650
                unsigned long searched_pc, int pc_pos, void *puc)
2651
{
2652
    env->pc = gen_opc_pc[pc_pos];
2653
}