Statistics
| Branch: | Revision:

root / target-alpha / translate.c @ d9a50a36

History | View | Annotate | Download (82.9 kB)

1
/*
2
 *  Alpha emulation cpu translation for qemu.
3
 *
4
 *  Copyright (c) 2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18
 */
19

    
20
#include <stdint.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23

    
24
#include "cpu.h"
25
#include "exec-all.h"
26
#include "disas.h"
27
#include "host-utils.h"
28
#include "tcg-op.h"
29
#include "qemu-common.h"
30

    
31
#include "helper.h"
32
#define GEN_HELPER 1
33
#include "helper.h"
34

    
35
#undef ALPHA_DEBUG_DISAS
36

    
37
#ifdef ALPHA_DEBUG_DISAS
38
#  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
39
#else
40
#  define LOG_DISAS(...) do { } while (0)
41
#endif
42

    
43
typedef struct DisasContext DisasContext;
44
struct DisasContext {
45
    uint64_t pc;
46
    int mem_idx;
47
#if !defined (CONFIG_USER_ONLY)
48
    int pal_mode;
49
#endif
50
    CPUAlphaState *env;
51
    uint32_t amask;
52
};
53

    
54
/* global register indexes */
55
static TCGv_ptr cpu_env;
56
static TCGv cpu_ir[31];
57
static TCGv cpu_fir[31];
58
static TCGv cpu_pc;
59
static TCGv cpu_lock;
60
#ifdef CONFIG_USER_ONLY
61
static TCGv cpu_uniq;
62
#endif
63

    
64
/* register names */
65
static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
66

    
67
#include "gen-icount.h"
68

    
69
static void alpha_translate_init(void)
70
{
71
    int i;
72
    char *p;
73
    static int done_init = 0;
74

    
75
    if (done_init)
76
        return;
77

    
78
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
79

    
80
    p = cpu_reg_names;
81
    for (i = 0; i < 31; i++) {
82
        sprintf(p, "ir%d", i);
83
        cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
84
                                           offsetof(CPUState, ir[i]), p);
85
        p += (i < 10) ? 4 : 5;
86

    
87
        sprintf(p, "fir%d", i);
88
        cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
89
                                            offsetof(CPUState, fir[i]), p);
90
        p += (i < 10) ? 5 : 6;
91
    }
92

    
93
    cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
94
                                    offsetof(CPUState, pc), "pc");
95

    
96
    cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
97
                                      offsetof(CPUState, lock), "lock");
98

    
99
#ifdef CONFIG_USER_ONLY
100
    cpu_uniq = tcg_global_mem_new_i64(TCG_AREG0,
101
                                      offsetof(CPUState, unique), "uniq");
102
#endif
103

    
104
    /* register helpers */
105
#define GEN_HELPER 2
106
#include "helper.h"
107

    
108
    done_init = 1;
109
}
110

    
111
static inline void gen_excp(DisasContext *ctx, int exception, int error_code)
112
{
113
    TCGv_i32 tmp1, tmp2;
114

    
115
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
116
    tmp1 = tcg_const_i32(exception);
117
    tmp2 = tcg_const_i32(error_code);
118
    gen_helper_excp(tmp1, tmp2);
119
    tcg_temp_free_i32(tmp2);
120
    tcg_temp_free_i32(tmp1);
121
}
122

    
123
static inline void gen_invalid(DisasContext *ctx)
124
{
125
    gen_excp(ctx, EXCP_OPCDEC, 0);
126
}
127

    
128
static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
129
{
130
    TCGv tmp = tcg_temp_new();
131
    TCGv_i32 tmp32 = tcg_temp_new_i32();
132
    tcg_gen_qemu_ld32u(tmp, t1, flags);
133
    tcg_gen_trunc_i64_i32(tmp32, tmp);
134
    gen_helper_memory_to_f(t0, tmp32);
135
    tcg_temp_free_i32(tmp32);
136
    tcg_temp_free(tmp);
137
}
138

    
139
static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
140
{
141
    TCGv tmp = tcg_temp_new();
142
    tcg_gen_qemu_ld64(tmp, t1, flags);
143
    gen_helper_memory_to_g(t0, tmp);
144
    tcg_temp_free(tmp);
145
}
146

    
147
static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
148
{
149
    TCGv tmp = tcg_temp_new();
150
    TCGv_i32 tmp32 = tcg_temp_new_i32();
151
    tcg_gen_qemu_ld32u(tmp, t1, flags);
152
    tcg_gen_trunc_i64_i32(tmp32, tmp);
153
    gen_helper_memory_to_s(t0, tmp32);
154
    tcg_temp_free_i32(tmp32);
155
    tcg_temp_free(tmp);
156
}
157

    
158
static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
159
{
160
    tcg_gen_mov_i64(cpu_lock, t1);
161
    tcg_gen_qemu_ld32s(t0, t1, flags);
162
}
163

    
164
static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
165
{
166
    tcg_gen_mov_i64(cpu_lock, t1);
167
    tcg_gen_qemu_ld64(t0, t1, flags);
168
}
169

    
170
static inline void gen_load_mem(DisasContext *ctx,
171
                                void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
172
                                                          int flags),
173
                                int ra, int rb, int32_t disp16, int fp,
174
                                int clear)
175
{
176
    TCGv addr;
177

    
178
    if (unlikely(ra == 31))
179
        return;
180

    
181
    addr = tcg_temp_new();
182
    if (rb != 31) {
183
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
184
        if (clear)
185
            tcg_gen_andi_i64(addr, addr, ~0x7);
186
    } else {
187
        if (clear)
188
            disp16 &= ~0x7;
189
        tcg_gen_movi_i64(addr, disp16);
190
    }
191
    if (fp)
192
        tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
193
    else
194
        tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
195
    tcg_temp_free(addr);
196
}
197

    
198
static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
199
{
200
    TCGv_i32 tmp32 = tcg_temp_new_i32();
201
    TCGv tmp = tcg_temp_new();
202
    gen_helper_f_to_memory(tmp32, t0);
203
    tcg_gen_extu_i32_i64(tmp, tmp32);
204
    tcg_gen_qemu_st32(tmp, t1, flags);
205
    tcg_temp_free(tmp);
206
    tcg_temp_free_i32(tmp32);
207
}
208

    
209
static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
210
{
211
    TCGv tmp = tcg_temp_new();
212
    gen_helper_g_to_memory(tmp, t0);
213
    tcg_gen_qemu_st64(tmp, t1, flags);
214
    tcg_temp_free(tmp);
215
}
216

    
217
static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
218
{
219
    TCGv_i32 tmp32 = tcg_temp_new_i32();
220
    TCGv tmp = tcg_temp_new();
221
    gen_helper_s_to_memory(tmp32, t0);
222
    tcg_gen_extu_i32_i64(tmp, tmp32);
223
    tcg_gen_qemu_st32(tmp, t1, flags);
224
    tcg_temp_free(tmp);
225
    tcg_temp_free_i32(tmp32);
226
}
227

    
228
static inline void gen_qemu_stl_c(TCGv t0, TCGv t1, int flags)
229
{
230
    int l1, l2;
231

    
232
    l1 = gen_new_label();
233
    l2 = gen_new_label();
234
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
235
    tcg_gen_qemu_st32(t0, t1, flags);
236
    tcg_gen_movi_i64(t0, 1);
237
    tcg_gen_br(l2);
238
    gen_set_label(l1);
239
    tcg_gen_movi_i64(t0, 0);
240
    gen_set_label(l2);
241
    tcg_gen_movi_i64(cpu_lock, -1);
242
}
243

    
244
static inline void gen_qemu_stq_c(TCGv t0, TCGv t1, int flags)
245
{
246
    int l1, l2;
247

    
248
    l1 = gen_new_label();
249
    l2 = gen_new_label();
250
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
251
    tcg_gen_qemu_st64(t0, t1, flags);
252
    tcg_gen_movi_i64(t0, 1);
253
    tcg_gen_br(l2);
254
    gen_set_label(l1);
255
    tcg_gen_movi_i64(t0, 0);
256
    gen_set_label(l2);
257
    tcg_gen_movi_i64(cpu_lock, -1);
258
}
259

    
260
static inline void gen_store_mem(DisasContext *ctx,
261
                                 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
262
                                                            int flags),
263
                                 int ra, int rb, int32_t disp16, int fp,
264
                                 int clear, int local)
265
{
266
    TCGv addr;
267
    if (local)
268
        addr = tcg_temp_local_new();
269
    else
270
        addr = tcg_temp_new();
271
    if (rb != 31) {
272
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
273
        if (clear)
274
            tcg_gen_andi_i64(addr, addr, ~0x7);
275
    } else {
276
        if (clear)
277
            disp16 &= ~0x7;
278
        tcg_gen_movi_i64(addr, disp16);
279
    }
280
    if (ra != 31) {
281
        if (fp)
282
            tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
283
        else
284
            tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
285
    } else {
286
        TCGv zero;
287
        if (local)
288
            zero = tcg_const_local_i64(0);
289
        else
290
            zero = tcg_const_i64(0);
291
        tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
292
        tcg_temp_free(zero);
293
    }
294
    tcg_temp_free(addr);
295
}
296

    
297
static inline void gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
298
                             int32_t disp, int mask)
299
{
300
    int l1, l2;
301

    
302
    l1 = gen_new_label();
303
    l2 = gen_new_label();
304
    if (likely(ra != 31)) {
305
        if (mask) {
306
            TCGv tmp = tcg_temp_new();
307
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
308
            tcg_gen_brcondi_i64(cond, tmp, 0, l1);
309
            tcg_temp_free(tmp);
310
        } else
311
            tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
312
    } else {
313
        /* Very uncommon case - Do not bother to optimize.  */
314
        TCGv tmp = tcg_const_i64(0);
315
        tcg_gen_brcondi_i64(cond, tmp, 0, l1);
316
        tcg_temp_free(tmp);
317
    }
318
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
319
    tcg_gen_br(l2);
320
    gen_set_label(l1);
321
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
322
    gen_set_label(l2);
323
}
324

    
325
static inline void gen_fbcond(DisasContext *ctx, int opc, int ra, int32_t disp)
326
{
327
    int l1, l2;
328
    TCGv tmp;
329
    TCGv src;
330

    
331
    l1 = gen_new_label();
332
    l2 = gen_new_label();
333
    if (ra != 31) {
334
        tmp = tcg_temp_new();
335
        src = cpu_fir[ra];
336
    } else  {
337
        tmp = tcg_const_i64(0);
338
        src = tmp;
339
    }
340
    switch (opc) {
341
    case 0x31: /* FBEQ */
342
        gen_helper_cmpfeq(tmp, src);
343
        break;
344
    case 0x32: /* FBLT */
345
        gen_helper_cmpflt(tmp, src);
346
        break;
347
    case 0x33: /* FBLE */
348
        gen_helper_cmpfle(tmp, src);
349
        break;
350
    case 0x35: /* FBNE */
351
        gen_helper_cmpfne(tmp, src);
352
        break;
353
    case 0x36: /* FBGE */
354
        gen_helper_cmpfge(tmp, src);
355
        break;
356
    case 0x37: /* FBGT */
357
        gen_helper_cmpfgt(tmp, src);
358
        break;
359
    default:
360
        abort();
361
    }
362
    tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
363
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
364
    tcg_gen_br(l2);
365
    gen_set_label(l1);
366
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
367
    gen_set_label(l2);
368
}
369

    
370
static inline void gen_cmov(TCGCond inv_cond, int ra, int rb, int rc,
371
                            int islit, uint8_t lit, int mask)
372
{
373
    int l1;
374

    
375
    if (unlikely(rc == 31))
376
        return;
377

    
378
    l1 = gen_new_label();
379

    
380
    if (ra != 31) {
381
        if (mask) {
382
            TCGv tmp = tcg_temp_new();
383
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
384
            tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
385
            tcg_temp_free(tmp);
386
        } else
387
            tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
388
    } else {
389
        /* Very uncommon case - Do not bother to optimize.  */
390
        TCGv tmp = tcg_const_i64(0);
391
        tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
392
        tcg_temp_free(tmp);
393
    }
394

    
395
    if (islit)
396
        tcg_gen_movi_i64(cpu_ir[rc], lit);
397
    else
398
        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
399
    gen_set_label(l1);
400
}
401

    
402
#define FARITH2(name)                                       \
403
static inline void glue(gen_f, name)(int rb, int rc)        \
404
{                                                           \
405
    if (unlikely(rc == 31))                                 \
406
      return;                                               \
407
                                                            \
408
    if (rb != 31)                                           \
409
        gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]);    \
410
    else {                                                  \
411
        TCGv tmp = tcg_const_i64(0);                        \
412
        gen_helper_ ## name (cpu_fir[rc], tmp);            \
413
        tcg_temp_free(tmp);                                 \
414
    }                                                       \
415
}
416
FARITH2(sqrts)
417
FARITH2(sqrtf)
418
FARITH2(sqrtg)
419
FARITH2(sqrtt)
420
FARITH2(cvtgf)
421
FARITH2(cvtgq)
422
FARITH2(cvtqf)
423
FARITH2(cvtqg)
424
FARITH2(cvtst)
425
FARITH2(cvtts)
426
FARITH2(cvttq)
427
FARITH2(cvtqs)
428
FARITH2(cvtqt)
429
FARITH2(cvtlq)
430
FARITH2(cvtql)
431
FARITH2(cvtqlv)
432
FARITH2(cvtqlsv)
433

    
434
#define FARITH3(name)                                                     \
435
static inline void glue(gen_f, name)(int ra, int rb, int rc)              \
436
{                                                                         \
437
    if (unlikely(rc == 31))                                               \
438
        return;                                                           \
439
                                                                          \
440
    if (ra != 31) {                                                       \
441
        if (rb != 31)                                                     \
442
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);  \
443
        else {                                                            \
444
            TCGv tmp = tcg_const_i64(0);                                  \
445
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp);          \
446
            tcg_temp_free(tmp);                                           \
447
        }                                                                 \
448
    } else {                                                              \
449
        TCGv tmp = tcg_const_i64(0);                                      \
450
        if (rb != 31)                                                     \
451
            gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]);          \
452
        else                                                              \
453
            gen_helper_ ## name (cpu_fir[rc], tmp, tmp);                   \
454
        tcg_temp_free(tmp);                                               \
455
    }                                                                     \
456
}
457

    
458
FARITH3(addf)
459
FARITH3(subf)
460
FARITH3(mulf)
461
FARITH3(divf)
462
FARITH3(addg)
463
FARITH3(subg)
464
FARITH3(mulg)
465
FARITH3(divg)
466
FARITH3(cmpgeq)
467
FARITH3(cmpglt)
468
FARITH3(cmpgle)
469
FARITH3(adds)
470
FARITH3(subs)
471
FARITH3(muls)
472
FARITH3(divs)
473
FARITH3(addt)
474
FARITH3(subt)
475
FARITH3(mult)
476
FARITH3(divt)
477
FARITH3(cmptun)
478
FARITH3(cmpteq)
479
FARITH3(cmptlt)
480
FARITH3(cmptle)
481
FARITH3(cpys)
482
FARITH3(cpysn)
483
FARITH3(cpyse)
484

    
485
#define FCMOV(name)                                                   \
486
static inline void glue(gen_f, name)(int ra, int rb, int rc)          \
487
{                                                                     \
488
    int l1;                                                           \
489
    TCGv tmp;                                                         \
490
                                                                      \
491
    if (unlikely(rc == 31))                                           \
492
        return;                                                       \
493
                                                                      \
494
    l1 = gen_new_label();                                             \
495
    tmp = tcg_temp_new();                                 \
496
    if (ra != 31) {                                                   \
497
        tmp = tcg_temp_new();                             \
498
        gen_helper_ ## name (tmp, cpu_fir[ra]);                       \
499
    } else  {                                                         \
500
        tmp = tcg_const_i64(0);                                       \
501
        gen_helper_ ## name (tmp, tmp);                               \
502
    }                                                                 \
503
    tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);                     \
504
    if (rb != 31)                                                     \
505
        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);                    \
506
    else                                                              \
507
        tcg_gen_movi_i64(cpu_fir[rc], 0);                             \
508
    gen_set_label(l1);                                                \
509
}
510
FCMOV(cmpfeq)
511
FCMOV(cmpfne)
512
FCMOV(cmpflt)
513
FCMOV(cmpfge)
514
FCMOV(cmpfle)
515
FCMOV(cmpfgt)
516

    
517
static inline uint64_t zapnot_mask(uint8_t lit)
518
{
519
    uint64_t mask = 0;
520
    int i;
521

    
522
    for (i = 0; i < 8; ++i) {
523
        if ((lit >> i) & 1)
524
            mask |= 0xffull << (i * 8);
525
    }
526
    return mask;
527
}
528

    
529
/* Implement zapnot with an immediate operand, which expands to some
530
   form of immediate AND.  This is a basic building block in the
531
   definition of many of the other byte manipulation instructions.  */
532
static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
533
{
534
    switch (lit) {
535
    case 0x00:
536
        tcg_gen_movi_i64(dest, 0);
537
        break;
538
    case 0x01:
539
        tcg_gen_ext8u_i64(dest, src);
540
        break;
541
    case 0x03:
542
        tcg_gen_ext16u_i64(dest, src);
543
        break;
544
    case 0x0f:
545
        tcg_gen_ext32u_i64(dest, src);
546
        break;
547
    case 0xff:
548
        tcg_gen_mov_i64(dest, src);
549
        break;
550
    default:
551
        tcg_gen_andi_i64 (dest, src, zapnot_mask (lit));
552
        break;
553
    }
554
}
555

    
556
static inline void gen_zapnot(int ra, int rb, int rc, int islit, uint8_t lit)
557
{
558
    if (unlikely(rc == 31))
559
        return;
560
    else if (unlikely(ra == 31))
561
        tcg_gen_movi_i64(cpu_ir[rc], 0);
562
    else if (islit)
563
        gen_zapnoti(cpu_ir[rc], cpu_ir[ra], lit);
564
    else
565
        gen_helper_zapnot (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
566
}
567

    
568
static inline void gen_zap(int ra, int rb, int rc, int islit, uint8_t lit)
569
{
570
    if (unlikely(rc == 31))
571
        return;
572
    else if (unlikely(ra == 31))
573
        tcg_gen_movi_i64(cpu_ir[rc], 0);
574
    else if (islit)
575
        gen_zapnoti(cpu_ir[rc], cpu_ir[ra], ~lit);
576
    else
577
        gen_helper_zap (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
578
}
579

    
580

    
581
/* EXTWH, EXTLH, EXTQH */
582
static void gen_ext_h(int ra, int rb, int rc, int islit,
583
                      uint8_t lit, uint8_t byte_mask)
584
{
585
    if (unlikely(rc == 31))
586
        return;
587
    else if (unlikely(ra == 31))
588
        tcg_gen_movi_i64(cpu_ir[rc], 0);
589
    else {
590
        if (islit) {
591
            lit = (64 - (lit & 7) * 8) & 0x3f;
592
            tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit);
593
        } else {
594
            TCGv tmp1 = tcg_temp_new();
595
            tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
596
            tcg_gen_shli_i64(tmp1, tmp1, 3);
597
            tcg_gen_neg_i64(tmp1, tmp1);
598
            tcg_gen_andi_i64(tmp1, tmp1, 0x3f);
599
            tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
600
            tcg_temp_free(tmp1);
601
        }
602
        gen_zapnoti(cpu_ir[rc], cpu_ir[rc], byte_mask);
603
    }
604
}
605

    
606
/* EXTBL, EXTWL, EXTLL, EXTQL */
607
static void gen_ext_l(int ra, int rb, int rc, int islit,
608
                      uint8_t lit, uint8_t byte_mask)
609
{
610
    if (unlikely(rc == 31))
611
        return;
612
    else if (unlikely(ra == 31))
613
        tcg_gen_movi_i64(cpu_ir[rc], 0);
614
    else {
615
        if (islit) {
616
            tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
617
        } else {
618
            TCGv tmp = tcg_temp_new();
619
            tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
620
            tcg_gen_shli_i64(tmp, tmp, 3);
621
            tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
622
            tcg_temp_free(tmp);
623
        }
624
        gen_zapnoti(cpu_ir[rc], cpu_ir[rc], byte_mask);
625
    }
626
}
627

    
628
/* INSWH, INSLH, INSQH */
629
static void gen_ins_h(int ra, int rb, int rc, int islit,
630
                      uint8_t lit, uint8_t byte_mask)
631
{
632
    if (unlikely(rc == 31))
633
        return;
634
    else if (unlikely(ra == 31) || (islit && (lit & 7) == 0))
635
        tcg_gen_movi_i64(cpu_ir[rc], 0);
636
    else {
637
        TCGv tmp = tcg_temp_new();
638

    
639
        /* The instruction description has us left-shift the byte mask
640
           and extract bits <15:8> and apply that zap at the end.  This
641
           is equivalent to simply performing the zap first and shifting
642
           afterward.  */
643
        gen_zapnoti (tmp, cpu_ir[ra], byte_mask);
644

    
645
        if (islit) {
646
            /* Note that we have handled the lit==0 case above.  */
647
            tcg_gen_shri_i64 (cpu_ir[rc], tmp, 64 - (lit & 7) * 8);
648
        } else {
649
            TCGv shift = tcg_temp_new();
650

    
651
            /* If (B & 7) == 0, we need to shift by 64 and leave a zero.
652
               Do this portably by splitting the shift into two parts:
653
               shift_count-1 and 1.  Arrange for the -1 by using
654
               ones-complement instead of twos-complement in the negation:
655
               ~((B & 7) * 8) & 63.  */
656

    
657
            tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
658
            tcg_gen_shli_i64(shift, shift, 3);
659
            tcg_gen_not_i64(shift, shift);
660
            tcg_gen_andi_i64(shift, shift, 0x3f);
661

    
662
            tcg_gen_shr_i64(cpu_ir[rc], tmp, shift);
663
            tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[rc], 1);
664
            tcg_temp_free(shift);
665
        }
666
        tcg_temp_free(tmp);
667
    }
668
}
669

    
670
/* INSBL, INSWL, INSLL, INSQL */
671
static void gen_ins_l(int ra, int rb, int rc, int islit,
672
                      uint8_t lit, uint8_t byte_mask)
673
{
674
    if (unlikely(rc == 31))
675
        return;
676
    else if (unlikely(ra == 31))
677
        tcg_gen_movi_i64(cpu_ir[rc], 0);
678
    else {
679
        TCGv tmp = tcg_temp_new();
680

    
681
        /* The instruction description has us left-shift the byte mask
682
           the same number of byte slots as the data and apply the zap
683
           at the end.  This is equivalent to simply performing the zap
684
           first and shifting afterward.  */
685
        gen_zapnoti (tmp, cpu_ir[ra], byte_mask);
686

    
687
        if (islit) {
688
            tcg_gen_shli_i64(cpu_ir[rc], tmp, (lit & 7) * 8);
689
        } else {
690
            TCGv shift = tcg_temp_new();
691
            tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
692
            tcg_gen_shli_i64(shift, shift, 3);
693
            tcg_gen_shl_i64(cpu_ir[rc], tmp, shift);
694
            tcg_temp_free(shift);
695
        }
696
        tcg_temp_free(tmp);
697
    }
698
}
699

    
700
/* MSKWH, MSKLH, MSKQH */
701
static void gen_msk_h(int ra, int rb, int rc, int islit,
702
                      uint8_t lit, uint8_t byte_mask)
703
{
704
    if (unlikely(rc == 31))
705
        return;
706
    else if (unlikely(ra == 31))
707
        tcg_gen_movi_i64(cpu_ir[rc], 0);
708
    else if (islit) {
709
        gen_zapnoti (cpu_ir[rc], cpu_ir[ra], ~((byte_mask << (lit & 7)) >> 8));
710
    } else {
711
        TCGv shift = tcg_temp_new();
712
        TCGv mask = tcg_temp_new();
713

    
714
        /* The instruction description is as above, where the byte_mask
715
           is shifted left, and then we extract bits <15:8>.  This can be
716
           emulated with a right-shift on the expanded byte mask.  This
717
           requires extra care because for an input <2:0> == 0 we need a
718
           shift of 64 bits in order to generate a zero.  This is done by
719
           splitting the shift into two parts, the variable shift - 1
720
           followed by a constant 1 shift.  The code we expand below is
721
           equivalent to ~((B & 7) * 8) & 63.  */
722

    
723
        tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
724
        tcg_gen_shli_i64(shift, shift, 3);
725
        tcg_gen_not_i64(shift, shift);
726
        tcg_gen_andi_i64(shift, shift, 0x3f);
727
        tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
728
        tcg_gen_shr_i64(mask, mask, shift);
729
        tcg_gen_shri_i64(mask, mask, 1);
730

    
731
        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], mask);
732

    
733
        tcg_temp_free(mask);
734
        tcg_temp_free(shift);
735
    }
736
}
737

    
738
/* MSKBL, MSKWL, MSKLL, MSKQL */
739
static void gen_msk_l(int ra, int rb, int rc, int islit,
740
                      uint8_t lit, uint8_t byte_mask)
741
{
742
    if (unlikely(rc == 31))
743
        return;
744
    else if (unlikely(ra == 31))
745
        tcg_gen_movi_i64(cpu_ir[rc], 0);
746
    else if (islit) {
747
        gen_zapnoti (cpu_ir[rc], cpu_ir[ra], ~(byte_mask << (lit & 7)));
748
    } else {
749
        TCGv shift = tcg_temp_new();
750
        TCGv mask = tcg_temp_new();
751

    
752
        tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
753
        tcg_gen_shli_i64(shift, shift, 3);
754
        tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
755
        tcg_gen_shl_i64(mask, mask, shift);
756

    
757
        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], mask);
758

    
759
        tcg_temp_free(mask);
760
        tcg_temp_free(shift);
761
    }
762
}
763

    
764
/* Code to call arith3 helpers */
765
#define ARITH3(name)                                                  \
766
static inline void glue(gen_, name)(int ra, int rb, int rc, int islit,\
767
                                    uint8_t lit)                      \
768
{                                                                     \
769
    if (unlikely(rc == 31))                                           \
770
        return;                                                       \
771
                                                                      \
772
    if (ra != 31) {                                                   \
773
        if (islit) {                                                  \
774
            TCGv tmp = tcg_const_i64(lit);                            \
775
            gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp);         \
776
            tcg_temp_free(tmp);                                       \
777
        } else                                                        \
778
            gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
779
    } else {                                                          \
780
        TCGv tmp1 = tcg_const_i64(0);                                 \
781
        if (islit) {                                                  \
782
            TCGv tmp2 = tcg_const_i64(lit);                           \
783
            gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2);             \
784
            tcg_temp_free(tmp2);                                      \
785
        } else                                                        \
786
            gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]);       \
787
        tcg_temp_free(tmp1);                                          \
788
    }                                                                 \
789
}
790
ARITH3(cmpbge)
791
ARITH3(addlv)
792
ARITH3(sublv)
793
ARITH3(addqv)
794
ARITH3(subqv)
795
ARITH3(umulh)
796
ARITH3(mullv)
797
ARITH3(mulqv)
798
ARITH3(minub8)
799
ARITH3(minsb8)
800
ARITH3(minuw4)
801
ARITH3(minsw4)
802
ARITH3(maxub8)
803
ARITH3(maxsb8)
804
ARITH3(maxuw4)
805
ARITH3(maxsw4)
806
ARITH3(perr)
807

    
808
#define MVIOP2(name)                                    \
809
static inline void glue(gen_, name)(int rb, int rc)     \
810
{                                                       \
811
    if (unlikely(rc == 31))                             \
812
        return;                                         \
813
    if (unlikely(rb == 31))                             \
814
        tcg_gen_movi_i64(cpu_ir[rc], 0);                \
815
    else                                                \
816
        gen_helper_ ## name (cpu_ir[rc], cpu_ir[rb]);   \
817
}
818
MVIOP2(pklb)
819
MVIOP2(pkwb)
820
MVIOP2(unpkbl)
821
MVIOP2(unpkbw)
822

    
823
static inline void gen_cmp(TCGCond cond, int ra, int rb, int rc, int islit,
824
                           uint8_t lit)
825
{
826
    int l1, l2;
827
    TCGv tmp;
828

    
829
    if (unlikely(rc == 31))
830
        return;
831

    
832
    l1 = gen_new_label();
833
    l2 = gen_new_label();
834

    
835
    if (ra != 31) {
836
        tmp = tcg_temp_new();
837
        tcg_gen_mov_i64(tmp, cpu_ir[ra]);
838
    } else
839
        tmp = tcg_const_i64(0);
840
    if (islit)
841
        tcg_gen_brcondi_i64(cond, tmp, lit, l1);
842
    else
843
        tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
844

    
845
    tcg_gen_movi_i64(cpu_ir[rc], 0);
846
    tcg_gen_br(l2);
847
    gen_set_label(l1);
848
    tcg_gen_movi_i64(cpu_ir[rc], 1);
849
    gen_set_label(l2);
850
}
851

    
852
static inline int translate_one(DisasContext *ctx, uint32_t insn)
853
{
854
    uint32_t palcode;
855
    int32_t disp21, disp16, disp12;
856
    uint16_t fn11, fn16;
857
    uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit, real_islit;
858
    uint8_t lit;
859
    int ret;
860

    
861
    /* Decode all instruction fields */
862
    opc = insn >> 26;
863
    ra = (insn >> 21) & 0x1F;
864
    rb = (insn >> 16) & 0x1F;
865
    rc = insn & 0x1F;
866
    sbz = (insn >> 13) & 0x07;
867
    real_islit = islit = (insn >> 12) & 1;
868
    if (rb == 31 && !islit) {
869
        islit = 1;
870
        lit = 0;
871
    } else
872
        lit = (insn >> 13) & 0xFF;
873
    palcode = insn & 0x03FFFFFF;
874
    disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
875
    disp16 = (int16_t)(insn & 0x0000FFFF);
876
    disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
877
    fn16 = insn & 0x0000FFFF;
878
    fn11 = (insn >> 5) & 0x000007FF;
879
    fpfn = fn11 & 0x3F;
880
    fn7 = (insn >> 5) & 0x0000007F;
881
    fn2 = (insn >> 5) & 0x00000003;
882
    ret = 0;
883
    LOG_DISAS("opc %02x ra %2d rb %2d rc %2d disp16 %6d\n",
884
              opc, ra, rb, rc, disp16);
885

    
886
    switch (opc) {
887
    case 0x00:
888
        /* CALL_PAL */
889
#ifdef CONFIG_USER_ONLY
890
        if (palcode == 0x9E) {
891
            /* RDUNIQUE */
892
            tcg_gen_mov_i64(cpu_ir[IR_V0], cpu_uniq);
893
            break;
894
        } else if (palcode == 0x9F) {
895
            /* WRUNIQUE */
896
            tcg_gen_mov_i64(cpu_uniq, cpu_ir[IR_A0]);
897
            break;
898
        }
899
#endif
900
        if (palcode >= 0x80 && palcode < 0xC0) {
901
            /* Unprivileged PAL call */
902
            gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x3F) << 6), 0);
903
            ret = 3;
904
            break;
905
        }
906
#ifndef CONFIG_USER_ONLY
907
        if (palcode < 0x40) {
908
            /* Privileged PAL code */
909
            if (ctx->mem_idx & 1)
910
                goto invalid_opc;
911
            gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x3F) << 6), 0);
912
            ret = 3;
913
        }
914
#endif
915
        /* Invalid PAL call */
916
        goto invalid_opc;
917
    case 0x01:
918
        /* OPC01 */
919
        goto invalid_opc;
920
    case 0x02:
921
        /* OPC02 */
922
        goto invalid_opc;
923
    case 0x03:
924
        /* OPC03 */
925
        goto invalid_opc;
926
    case 0x04:
927
        /* OPC04 */
928
        goto invalid_opc;
929
    case 0x05:
930
        /* OPC05 */
931
        goto invalid_opc;
932
    case 0x06:
933
        /* OPC06 */
934
        goto invalid_opc;
935
    case 0x07:
936
        /* OPC07 */
937
        goto invalid_opc;
938
    case 0x08:
939
        /* LDA */
940
        if (likely(ra != 31)) {
941
            if (rb != 31)
942
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
943
            else
944
                tcg_gen_movi_i64(cpu_ir[ra], disp16);
945
        }
946
        break;
947
    case 0x09:
948
        /* LDAH */
949
        if (likely(ra != 31)) {
950
            if (rb != 31)
951
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
952
            else
953
                tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
954
        }
955
        break;
956
    case 0x0A:
957
        /* LDBU */
958
        if (!(ctx->amask & AMASK_BWX))
959
            goto invalid_opc;
960
        gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
961
        break;
962
    case 0x0B:
963
        /* LDQ_U */
964
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
965
        break;
966
    case 0x0C:
967
        /* LDWU */
968
        if (!(ctx->amask & AMASK_BWX))
969
            goto invalid_opc;
970
        gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
971
        break;
972
    case 0x0D:
973
        /* STW */
974
        gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
975
        break;
976
    case 0x0E:
977
        /* STB */
978
        gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
979
        break;
980
    case 0x0F:
981
        /* STQ_U */
982
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
983
        break;
984
    case 0x10:
985
        switch (fn7) {
986
        case 0x00:
987
            /* ADDL */
988
            if (likely(rc != 31)) {
989
                if (ra != 31) {
990
                    if (islit) {
991
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
992
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
993
                    } else {
994
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
995
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
996
                    }
997
                } else {
998
                    if (islit)
999
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1000
                    else
1001
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
1002
                }
1003
            }
1004
            break;
1005
        case 0x02:
1006
            /* S4ADDL */
1007
            if (likely(rc != 31)) {
1008
                if (ra != 31) {
1009
                    TCGv tmp = tcg_temp_new();
1010
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1011
                    if (islit)
1012
                        tcg_gen_addi_i64(tmp, tmp, lit);
1013
                    else
1014
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
1015
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1016
                    tcg_temp_free(tmp);
1017
                } else {
1018
                    if (islit)
1019
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1020
                    else
1021
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
1022
                }
1023
            }
1024
            break;
1025
        case 0x09:
1026
            /* SUBL */
1027
            if (likely(rc != 31)) {
1028
                if (ra != 31) {
1029
                    if (islit)
1030
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1031
                    else
1032
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1033
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1034
                } else {
1035
                    if (islit)
1036
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1037
                    else {
1038
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1039
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1040
                }
1041
            }
1042
            break;
1043
        case 0x0B:
1044
            /* S4SUBL */
1045
            if (likely(rc != 31)) {
1046
                if (ra != 31) {
1047
                    TCGv tmp = tcg_temp_new();
1048
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1049
                    if (islit)
1050
                        tcg_gen_subi_i64(tmp, tmp, lit);
1051
                    else
1052
                        tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
1053
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1054
                    tcg_temp_free(tmp);
1055
                } else {
1056
                    if (islit)
1057
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1058
                    else {
1059
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1060
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1061
                    }
1062
                }
1063
            }
1064
            break;
1065
        case 0x0F:
1066
            /* CMPBGE */
1067
            gen_cmpbge(ra, rb, rc, islit, lit);
1068
            break;
1069
        case 0x12:
1070
            /* S8ADDL */
1071
            if (likely(rc != 31)) {
1072
                if (ra != 31) {
1073
                    TCGv tmp = tcg_temp_new();
1074
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1075
                    if (islit)
1076
                        tcg_gen_addi_i64(tmp, tmp, lit);
1077
                    else
1078
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
1079
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1080
                    tcg_temp_free(tmp);
1081
                } else {
1082
                    if (islit)
1083
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1084
                    else
1085
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
1086
                }
1087
            }
1088
            break;
1089
        case 0x1B:
1090
            /* S8SUBL */
1091
            if (likely(rc != 31)) {
1092
                if (ra != 31) {
1093
                    TCGv tmp = tcg_temp_new();
1094
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1095
                    if (islit)
1096
                        tcg_gen_subi_i64(tmp, tmp, lit);
1097
                    else
1098
                       tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
1099
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1100
                    tcg_temp_free(tmp);
1101
                } else {
1102
                    if (islit)
1103
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1104
                    else
1105
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1106
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1107
                    }
1108
                }
1109
            }
1110
            break;
1111
        case 0x1D:
1112
            /* CMPULT */
1113
            gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
1114
            break;
1115
        case 0x20:
1116
            /* ADDQ */
1117
            if (likely(rc != 31)) {
1118
                if (ra != 31) {
1119
                    if (islit)
1120
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1121
                    else
1122
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1123
                } else {
1124
                    if (islit)
1125
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1126
                    else
1127
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1128
                }
1129
            }
1130
            break;
1131
        case 0x22:
1132
            /* S4ADDQ */
1133
            if (likely(rc != 31)) {
1134
                if (ra != 31) {
1135
                    TCGv tmp = tcg_temp_new();
1136
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1137
                    if (islit)
1138
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
1139
                    else
1140
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1141
                    tcg_temp_free(tmp);
1142
                } else {
1143
                    if (islit)
1144
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1145
                    else
1146
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1147
                }
1148
            }
1149
            break;
1150
        case 0x29:
1151
            /* SUBQ */
1152
            if (likely(rc != 31)) {
1153
                if (ra != 31) {
1154
                    if (islit)
1155
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1156
                    else
1157
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1158
                } else {
1159
                    if (islit)
1160
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1161
                    else
1162
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1163
                }
1164
            }
1165
            break;
1166
        case 0x2B:
1167
            /* S4SUBQ */
1168
            if (likely(rc != 31)) {
1169
                if (ra != 31) {
1170
                    TCGv tmp = tcg_temp_new();
1171
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1172
                    if (islit)
1173
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1174
                    else
1175
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1176
                    tcg_temp_free(tmp);
1177
                } else {
1178
                    if (islit)
1179
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1180
                    else
1181
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1182
                }
1183
            }
1184
            break;
1185
        case 0x2D:
1186
            /* CMPEQ */
1187
            gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
1188
            break;
1189
        case 0x32:
1190
            /* S8ADDQ */
1191
            if (likely(rc != 31)) {
1192
                if (ra != 31) {
1193
                    TCGv tmp = tcg_temp_new();
1194
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1195
                    if (islit)
1196
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
1197
                    else
1198
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1199
                    tcg_temp_free(tmp);
1200
                } else {
1201
                    if (islit)
1202
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1203
                    else
1204
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1205
                }
1206
            }
1207
            break;
1208
        case 0x3B:
1209
            /* S8SUBQ */
1210
            if (likely(rc != 31)) {
1211
                if (ra != 31) {
1212
                    TCGv tmp = tcg_temp_new();
1213
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1214
                    if (islit)
1215
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1216
                    else
1217
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1218
                    tcg_temp_free(tmp);
1219
                } else {
1220
                    if (islit)
1221
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1222
                    else
1223
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1224
                }
1225
            }
1226
            break;
1227
        case 0x3D:
1228
            /* CMPULE */
1229
            gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1230
            break;
1231
        case 0x40:
1232
            /* ADDL/V */
1233
            gen_addlv(ra, rb, rc, islit, lit);
1234
            break;
1235
        case 0x49:
1236
            /* SUBL/V */
1237
            gen_sublv(ra, rb, rc, islit, lit);
1238
            break;
1239
        case 0x4D:
1240
            /* CMPLT */
1241
            gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1242
            break;
1243
        case 0x60:
1244
            /* ADDQ/V */
1245
            gen_addqv(ra, rb, rc, islit, lit);
1246
            break;
1247
        case 0x69:
1248
            /* SUBQ/V */
1249
            gen_subqv(ra, rb, rc, islit, lit);
1250
            break;
1251
        case 0x6D:
1252
            /* CMPLE */
1253
            gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1254
            break;
1255
        default:
1256
            goto invalid_opc;
1257
        }
1258
        break;
1259
    case 0x11:
1260
        switch (fn7) {
1261
        case 0x00:
1262
            /* AND */
1263
            if (likely(rc != 31)) {
1264
                if (ra == 31)
1265
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1266
                else if (islit)
1267
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1268
                else
1269
                    tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1270
            }
1271
            break;
1272
        case 0x08:
1273
            /* BIC */
1274
            if (likely(rc != 31)) {
1275
                if (ra != 31) {
1276
                    if (islit)
1277
                        tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1278
                    else
1279
                        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1280
                } else
1281
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1282
            }
1283
            break;
1284
        case 0x14:
1285
            /* CMOVLBS */
1286
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1287
            break;
1288
        case 0x16:
1289
            /* CMOVLBC */
1290
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1291
            break;
1292
        case 0x20:
1293
            /* BIS */
1294
            if (likely(rc != 31)) {
1295
                if (ra != 31) {
1296
                    if (islit)
1297
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1298
                    else
1299
                        tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1300
                } else {
1301
                    if (islit)
1302
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1303
                    else
1304
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1305
                }
1306
            }
1307
            break;
1308
        case 0x24:
1309
            /* CMOVEQ */
1310
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1311
            break;
1312
        case 0x26:
1313
            /* CMOVNE */
1314
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1315
            break;
1316
        case 0x28:
1317
            /* ORNOT */
1318
            if (likely(rc != 31)) {
1319
                if (ra != 31) {
1320
                    if (islit)
1321
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1322
                    else
1323
                        tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1324
                } else {
1325
                    if (islit)
1326
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1327
                    else
1328
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1329
                }
1330
            }
1331
            break;
1332
        case 0x40:
1333
            /* XOR */
1334
            if (likely(rc != 31)) {
1335
                if (ra != 31) {
1336
                    if (islit)
1337
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1338
                    else
1339
                        tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1340
                } else {
1341
                    if (islit)
1342
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1343
                    else
1344
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1345
                }
1346
            }
1347
            break;
1348
        case 0x44:
1349
            /* CMOVLT */
1350
            gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1351
            break;
1352
        case 0x46:
1353
            /* CMOVGE */
1354
            gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1355
            break;
1356
        case 0x48:
1357
            /* EQV */
1358
            if (likely(rc != 31)) {
1359
                if (ra != 31) {
1360
                    if (islit)
1361
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1362
                    else
1363
                        tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1364
                } else {
1365
                    if (islit)
1366
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1367
                    else
1368
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1369
                }
1370
            }
1371
            break;
1372
        case 0x61:
1373
            /* AMASK */
1374
            if (likely(rc != 31)) {
1375
                if (islit)
1376
                    tcg_gen_movi_i64(cpu_ir[rc], lit);
1377
                else
1378
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1379
                switch (ctx->env->implver) {
1380
                case IMPLVER_2106x:
1381
                    /* EV4, EV45, LCA, LCA45 & EV5 */
1382
                    break;
1383
                case IMPLVER_21164:
1384
                case IMPLVER_21264:
1385
                case IMPLVER_21364:
1386
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[rc],
1387
                                     ~(uint64_t)ctx->amask);
1388
                    break;
1389
                }
1390
            }
1391
            break;
1392
        case 0x64:
1393
            /* CMOVLE */
1394
            gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1395
            break;
1396
        case 0x66:
1397
            /* CMOVGT */
1398
            gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1399
            break;
1400
        case 0x6C:
1401
            /* IMPLVER */
1402
            if (rc != 31)
1403
                tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
1404
            break;
1405
        default:
1406
            goto invalid_opc;
1407
        }
1408
        break;
1409
    case 0x12:
1410
        switch (fn7) {
1411
        case 0x02:
1412
            /* MSKBL */
1413
            gen_msk_l(ra, rb, rc, islit, lit, 0x01);
1414
            break;
1415
        case 0x06:
1416
            /* EXTBL */
1417
            gen_ext_l(ra, rb, rc, islit, lit, 0x01);
1418
            break;
1419
        case 0x0B:
1420
            /* INSBL */
1421
            gen_ins_l(ra, rb, rc, islit, lit, 0x01);
1422
            break;
1423
        case 0x12:
1424
            /* MSKWL */
1425
            gen_msk_l(ra, rb, rc, islit, lit, 0x03);
1426
            break;
1427
        case 0x16:
1428
            /* EXTWL */
1429
            gen_ext_l(ra, rb, rc, islit, lit, 0x03);
1430
            break;
1431
        case 0x1B:
1432
            /* INSWL */
1433
            gen_ins_l(ra, rb, rc, islit, lit, 0x03);
1434
            break;
1435
        case 0x22:
1436
            /* MSKLL */
1437
            gen_msk_l(ra, rb, rc, islit, lit, 0x0f);
1438
            break;
1439
        case 0x26:
1440
            /* EXTLL */
1441
            gen_ext_l(ra, rb, rc, islit, lit, 0x0f);
1442
            break;
1443
        case 0x2B:
1444
            /* INSLL */
1445
            gen_ins_l(ra, rb, rc, islit, lit, 0x0f);
1446
            break;
1447
        case 0x30:
1448
            /* ZAP */
1449
            gen_zap(ra, rb, rc, islit, lit);
1450
            break;
1451
        case 0x31:
1452
            /* ZAPNOT */
1453
            gen_zapnot(ra, rb, rc, islit, lit);
1454
            break;
1455
        case 0x32:
1456
            /* MSKQL */
1457
            gen_msk_l(ra, rb, rc, islit, lit, 0xff);
1458
            break;
1459
        case 0x34:
1460
            /* SRL */
1461
            if (likely(rc != 31)) {
1462
                if (ra != 31) {
1463
                    if (islit)
1464
                        tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1465
                    else {
1466
                        TCGv shift = tcg_temp_new();
1467
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1468
                        tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1469
                        tcg_temp_free(shift);
1470
                    }
1471
                } else
1472
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1473
            }
1474
            break;
1475
        case 0x36:
1476
            /* EXTQL */
1477
            gen_ext_l(ra, rb, rc, islit, lit, 0xff);
1478
            break;
1479
        case 0x39:
1480
            /* SLL */
1481
            if (likely(rc != 31)) {
1482
                if (ra != 31) {
1483
                    if (islit)
1484
                        tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1485
                    else {
1486
                        TCGv shift = tcg_temp_new();
1487
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1488
                        tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1489
                        tcg_temp_free(shift);
1490
                    }
1491
                } else
1492
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1493
            }
1494
            break;
1495
        case 0x3B:
1496
            /* INSQL */
1497
            gen_ins_l(ra, rb, rc, islit, lit, 0xff);
1498
            break;
1499
        case 0x3C:
1500
            /* SRA */
1501
            if (likely(rc != 31)) {
1502
                if (ra != 31) {
1503
                    if (islit)
1504
                        tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1505
                    else {
1506
                        TCGv shift = tcg_temp_new();
1507
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1508
                        tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1509
                        tcg_temp_free(shift);
1510
                    }
1511
                } else
1512
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1513
            }
1514
            break;
1515
        case 0x52:
1516
            /* MSKWH */
1517
            gen_msk_h(ra, rb, rc, islit, lit, 0x03);
1518
            break;
1519
        case 0x57:
1520
            /* INSWH */
1521
            gen_ins_h(ra, rb, rc, islit, lit, 0x03);
1522
            break;
1523
        case 0x5A:
1524
            /* EXTWH */
1525
            gen_ext_h(ra, rb, rc, islit, lit, 0x03);
1526
            break;
1527
        case 0x62:
1528
            /* MSKLH */
1529
            gen_msk_h(ra, rb, rc, islit, lit, 0x0f);
1530
            break;
1531
        case 0x67:
1532
            /* INSLH */
1533
            gen_ins_h(ra, rb, rc, islit, lit, 0x0f);
1534
            break;
1535
        case 0x6A:
1536
            /* EXTLH */
1537
            gen_ext_h(ra, rb, rc, islit, lit, 0x0f);
1538
            break;
1539
        case 0x72:
1540
            /* MSKQH */
1541
            gen_msk_h(ra, rb, rc, islit, lit, 0xff);
1542
            break;
1543
        case 0x77:
1544
            /* INSQH */
1545
            gen_ins_h(ra, rb, rc, islit, lit, 0xff);
1546
            break;
1547
        case 0x7A:
1548
            /* EXTQH */
1549
            gen_ext_h(ra, rb, rc, islit, lit, 0xff);
1550
            break;
1551
        default:
1552
            goto invalid_opc;
1553
        }
1554
        break;
1555
    case 0x13:
1556
        switch (fn7) {
1557
        case 0x00:
1558
            /* MULL */
1559
            if (likely(rc != 31)) {
1560
                if (ra == 31)
1561
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1562
                else {
1563
                    if (islit)
1564
                        tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1565
                    else
1566
                        tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1567
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1568
                }
1569
            }
1570
            break;
1571
        case 0x20:
1572
            /* MULQ */
1573
            if (likely(rc != 31)) {
1574
                if (ra == 31)
1575
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1576
                else if (islit)
1577
                    tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1578
                else
1579
                    tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1580
            }
1581
            break;
1582
        case 0x30:
1583
            /* UMULH */
1584
            gen_umulh(ra, rb, rc, islit, lit);
1585
            break;
1586
        case 0x40:
1587
            /* MULL/V */
1588
            gen_mullv(ra, rb, rc, islit, lit);
1589
            break;
1590
        case 0x60:
1591
            /* MULQ/V */
1592
            gen_mulqv(ra, rb, rc, islit, lit);
1593
            break;
1594
        default:
1595
            goto invalid_opc;
1596
        }
1597
        break;
1598
    case 0x14:
1599
        switch (fpfn) { /* f11 & 0x3F */
1600
        case 0x04:
1601
            /* ITOFS */
1602
            if (!(ctx->amask & AMASK_FIX))
1603
                goto invalid_opc;
1604
            if (likely(rc != 31)) {
1605
                if (ra != 31) {
1606
                    TCGv_i32 tmp = tcg_temp_new_i32();
1607
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1608
                    gen_helper_memory_to_s(cpu_fir[rc], tmp);
1609
                    tcg_temp_free_i32(tmp);
1610
                } else
1611
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1612
            }
1613
            break;
1614
        case 0x0A:
1615
            /* SQRTF */
1616
            if (!(ctx->amask & AMASK_FIX))
1617
                goto invalid_opc;
1618
            gen_fsqrtf(rb, rc);
1619
            break;
1620
        case 0x0B:
1621
            /* SQRTS */
1622
            if (!(ctx->amask & AMASK_FIX))
1623
                goto invalid_opc;
1624
            gen_fsqrts(rb, rc);
1625
            break;
1626
        case 0x14:
1627
            /* ITOFF */
1628
            if (!(ctx->amask & AMASK_FIX))
1629
                goto invalid_opc;
1630
            if (likely(rc != 31)) {
1631
                if (ra != 31) {
1632
                    TCGv_i32 tmp = tcg_temp_new_i32();
1633
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1634
                    gen_helper_memory_to_f(cpu_fir[rc], tmp);
1635
                    tcg_temp_free_i32(tmp);
1636
                } else
1637
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1638
            }
1639
            break;
1640
        case 0x24:
1641
            /* ITOFT */
1642
            if (!(ctx->amask & AMASK_FIX))
1643
                goto invalid_opc;
1644
            if (likely(rc != 31)) {
1645
                if (ra != 31)
1646
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1647
                else
1648
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1649
            }
1650
            break;
1651
        case 0x2A:
1652
            /* SQRTG */
1653
            if (!(ctx->amask & AMASK_FIX))
1654
                goto invalid_opc;
1655
            gen_fsqrtg(rb, rc);
1656
            break;
1657
        case 0x02B:
1658
            /* SQRTT */
1659
            if (!(ctx->amask & AMASK_FIX))
1660
                goto invalid_opc;
1661
            gen_fsqrtt(rb, rc);
1662
            break;
1663
        default:
1664
            goto invalid_opc;
1665
        }
1666
        break;
1667
    case 0x15:
1668
        /* VAX floating point */
1669
        /* XXX: rounding mode and trap are ignored (!) */
1670
        switch (fpfn) { /* f11 & 0x3F */
1671
        case 0x00:
1672
            /* ADDF */
1673
            gen_faddf(ra, rb, rc);
1674
            break;
1675
        case 0x01:
1676
            /* SUBF */
1677
            gen_fsubf(ra, rb, rc);
1678
            break;
1679
        case 0x02:
1680
            /* MULF */
1681
            gen_fmulf(ra, rb, rc);
1682
            break;
1683
        case 0x03:
1684
            /* DIVF */
1685
            gen_fdivf(ra, rb, rc);
1686
            break;
1687
        case 0x1E:
1688
            /* CVTDG */
1689
#if 0 // TODO
1690
            gen_fcvtdg(rb, rc);
1691
#else
1692
            goto invalid_opc;
1693
#endif
1694
            break;
1695
        case 0x20:
1696
            /* ADDG */
1697
            gen_faddg(ra, rb, rc);
1698
            break;
1699
        case 0x21:
1700
            /* SUBG */
1701
            gen_fsubg(ra, rb, rc);
1702
            break;
1703
        case 0x22:
1704
            /* MULG */
1705
            gen_fmulg(ra, rb, rc);
1706
            break;
1707
        case 0x23:
1708
            /* DIVG */
1709
            gen_fdivg(ra, rb, rc);
1710
            break;
1711
        case 0x25:
1712
            /* CMPGEQ */
1713
            gen_fcmpgeq(ra, rb, rc);
1714
            break;
1715
        case 0x26:
1716
            /* CMPGLT */
1717
            gen_fcmpglt(ra, rb, rc);
1718
            break;
1719
        case 0x27:
1720
            /* CMPGLE */
1721
            gen_fcmpgle(ra, rb, rc);
1722
            break;
1723
        case 0x2C:
1724
            /* CVTGF */
1725
            gen_fcvtgf(rb, rc);
1726
            break;
1727
        case 0x2D:
1728
            /* CVTGD */
1729
#if 0 // TODO
1730
            gen_fcvtgd(rb, rc);
1731
#else
1732
            goto invalid_opc;
1733
#endif
1734
            break;
1735
        case 0x2F:
1736
            /* CVTGQ */
1737
            gen_fcvtgq(rb, rc);
1738
            break;
1739
        case 0x3C:
1740
            /* CVTQF */
1741
            gen_fcvtqf(rb, rc);
1742
            break;
1743
        case 0x3E:
1744
            /* CVTQG */
1745
            gen_fcvtqg(rb, rc);
1746
            break;
1747
        default:
1748
            goto invalid_opc;
1749
        }
1750
        break;
1751
    case 0x16:
1752
        /* IEEE floating-point */
1753
        /* XXX: rounding mode and traps are ignored (!) */
1754
        switch (fpfn) { /* f11 & 0x3F */
1755
        case 0x00:
1756
            /* ADDS */
1757
            gen_fadds(ra, rb, rc);
1758
            break;
1759
        case 0x01:
1760
            /* SUBS */
1761
            gen_fsubs(ra, rb, rc);
1762
            break;
1763
        case 0x02:
1764
            /* MULS */
1765
            gen_fmuls(ra, rb, rc);
1766
            break;
1767
        case 0x03:
1768
            /* DIVS */
1769
            gen_fdivs(ra, rb, rc);
1770
            break;
1771
        case 0x20:
1772
            /* ADDT */
1773
            gen_faddt(ra, rb, rc);
1774
            break;
1775
        case 0x21:
1776
            /* SUBT */
1777
            gen_fsubt(ra, rb, rc);
1778
            break;
1779
        case 0x22:
1780
            /* MULT */
1781
            gen_fmult(ra, rb, rc);
1782
            break;
1783
        case 0x23:
1784
            /* DIVT */
1785
            gen_fdivt(ra, rb, rc);
1786
            break;
1787
        case 0x24:
1788
            /* CMPTUN */
1789
            gen_fcmptun(ra, rb, rc);
1790
            break;
1791
        case 0x25:
1792
            /* CMPTEQ */
1793
            gen_fcmpteq(ra, rb, rc);
1794
            break;
1795
        case 0x26:
1796
            /* CMPTLT */
1797
            gen_fcmptlt(ra, rb, rc);
1798
            break;
1799
        case 0x27:
1800
            /* CMPTLE */
1801
            gen_fcmptle(ra, rb, rc);
1802
            break;
1803
        case 0x2C:
1804
            /* XXX: incorrect */
1805
            if (fn11 == 0x2AC || fn11 == 0x6AC) {
1806
                /* CVTST */
1807
                gen_fcvtst(rb, rc);
1808
            } else {
1809
                /* CVTTS */
1810
                gen_fcvtts(rb, rc);
1811
            }
1812
            break;
1813
        case 0x2F:
1814
            /* CVTTQ */
1815
            gen_fcvttq(rb, rc);
1816
            break;
1817
        case 0x3C:
1818
            /* CVTQS */
1819
            gen_fcvtqs(rb, rc);
1820
            break;
1821
        case 0x3E:
1822
            /* CVTQT */
1823
            gen_fcvtqt(rb, rc);
1824
            break;
1825
        default:
1826
            goto invalid_opc;
1827
        }
1828
        break;
1829
    case 0x17:
1830
        switch (fn11) {
1831
        case 0x010:
1832
            /* CVTLQ */
1833
            gen_fcvtlq(rb, rc);
1834
            break;
1835
        case 0x020:
1836
            if (likely(rc != 31)) {
1837
                if (ra == rb) {
1838
                    /* FMOV */
1839
                    if (ra == 31)
1840
                        tcg_gen_movi_i64(cpu_fir[rc], 0);
1841
                    else
1842
                        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1843
                } else {
1844
                    /* CPYS */
1845
                    gen_fcpys(ra, rb, rc);
1846
                }
1847
            }
1848
            break;
1849
        case 0x021:
1850
            /* CPYSN */
1851
            gen_fcpysn(ra, rb, rc);
1852
            break;
1853
        case 0x022:
1854
            /* CPYSE */
1855
            gen_fcpyse(ra, rb, rc);
1856
            break;
1857
        case 0x024:
1858
            /* MT_FPCR */
1859
            if (likely(ra != 31))
1860
                gen_helper_store_fpcr(cpu_fir[ra]);
1861
            else {
1862
                TCGv tmp = tcg_const_i64(0);
1863
                gen_helper_store_fpcr(tmp);
1864
                tcg_temp_free(tmp);
1865
            }
1866
            break;
1867
        case 0x025:
1868
            /* MF_FPCR */
1869
            if (likely(ra != 31))
1870
                gen_helper_load_fpcr(cpu_fir[ra]);
1871
            break;
1872
        case 0x02A:
1873
            /* FCMOVEQ */
1874
            gen_fcmpfeq(ra, rb, rc);
1875
            break;
1876
        case 0x02B:
1877
            /* FCMOVNE */
1878
            gen_fcmpfne(ra, rb, rc);
1879
            break;
1880
        case 0x02C:
1881
            /* FCMOVLT */
1882
            gen_fcmpflt(ra, rb, rc);
1883
            break;
1884
        case 0x02D:
1885
            /* FCMOVGE */
1886
            gen_fcmpfge(ra, rb, rc);
1887
            break;
1888
        case 0x02E:
1889
            /* FCMOVLE */
1890
            gen_fcmpfle(ra, rb, rc);
1891
            break;
1892
        case 0x02F:
1893
            /* FCMOVGT */
1894
            gen_fcmpfgt(ra, rb, rc);
1895
            break;
1896
        case 0x030:
1897
            /* CVTQL */
1898
            gen_fcvtql(rb, rc);
1899
            break;
1900
        case 0x130:
1901
            /* CVTQL/V */
1902
            gen_fcvtqlv(rb, rc);
1903
            break;
1904
        case 0x530:
1905
            /* CVTQL/SV */
1906
            gen_fcvtqlsv(rb, rc);
1907
            break;
1908
        default:
1909
            goto invalid_opc;
1910
        }
1911
        break;
1912
    case 0x18:
1913
        switch ((uint16_t)disp16) {
1914
        case 0x0000:
1915
            /* TRAPB */
1916
            /* No-op. Just exit from the current tb */
1917
            ret = 2;
1918
            break;
1919
        case 0x0400:
1920
            /* EXCB */
1921
            /* No-op. Just exit from the current tb */
1922
            ret = 2;
1923
            break;
1924
        case 0x4000:
1925
            /* MB */
1926
            /* No-op */
1927
            break;
1928
        case 0x4400:
1929
            /* WMB */
1930
            /* No-op */
1931
            break;
1932
        case 0x8000:
1933
            /* FETCH */
1934
            /* No-op */
1935
            break;
1936
        case 0xA000:
1937
            /* FETCH_M */
1938
            /* No-op */
1939
            break;
1940
        case 0xC000:
1941
            /* RPCC */
1942
            if (ra != 31)
1943
                gen_helper_load_pcc(cpu_ir[ra]);
1944
            break;
1945
        case 0xE000:
1946
            /* RC */
1947
            if (ra != 31)
1948
                gen_helper_rc(cpu_ir[ra]);
1949
            break;
1950
        case 0xE800:
1951
            /* ECB */
1952
            break;
1953
        case 0xF000:
1954
            /* RS */
1955
            if (ra != 31)
1956
                gen_helper_rs(cpu_ir[ra]);
1957
            break;
1958
        case 0xF800:
1959
            /* WH64 */
1960
            /* No-op */
1961
            break;
1962
        default:
1963
            goto invalid_opc;
1964
        }
1965
        break;
1966
    case 0x19:
1967
        /* HW_MFPR (PALcode) */
1968
#if defined (CONFIG_USER_ONLY)
1969
        goto invalid_opc;
1970
#else
1971
        if (!ctx->pal_mode)
1972
            goto invalid_opc;
1973
        if (ra != 31) {
1974
            TCGv tmp = tcg_const_i32(insn & 0xFF);
1975
            gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1976
            tcg_temp_free(tmp);
1977
        }
1978
        break;
1979
#endif
1980
    case 0x1A:
1981
        if (rb != 31)
1982
            tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1983
        else
1984
            tcg_gen_movi_i64(cpu_pc, 0);
1985
        if (ra != 31)
1986
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1987
        /* Those four jumps only differ by the branch prediction hint */
1988
        switch (fn2) {
1989
        case 0x0:
1990
            /* JMP */
1991
            break;
1992
        case 0x1:
1993
            /* JSR */
1994
            break;
1995
        case 0x2:
1996
            /* RET */
1997
            break;
1998
        case 0x3:
1999
            /* JSR_COROUTINE */
2000
            break;
2001
        }
2002
        ret = 1;
2003
        break;
2004
    case 0x1B:
2005
        /* HW_LD (PALcode) */
2006
#if defined (CONFIG_USER_ONLY)
2007
        goto invalid_opc;
2008
#else
2009
        if (!ctx->pal_mode)
2010
            goto invalid_opc;
2011
        if (ra != 31) {
2012
            TCGv addr = tcg_temp_new();
2013
            if (rb != 31)
2014
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2015
            else
2016
                tcg_gen_movi_i64(addr, disp12);
2017
            switch ((insn >> 12) & 0xF) {
2018
            case 0x0:
2019
                /* Longword physical access (hw_ldl/p) */
2020
                gen_helper_ldl_raw(cpu_ir[ra], addr);
2021
                break;
2022
            case 0x1:
2023
                /* Quadword physical access (hw_ldq/p) */
2024
                gen_helper_ldq_raw(cpu_ir[ra], addr);
2025
                break;
2026
            case 0x2:
2027
                /* Longword physical access with lock (hw_ldl_l/p) */
2028
                gen_helper_ldl_l_raw(cpu_ir[ra], addr);
2029
                break;
2030
            case 0x3:
2031
                /* Quadword physical access with lock (hw_ldq_l/p) */
2032
                gen_helper_ldq_l_raw(cpu_ir[ra], addr);
2033
                break;
2034
            case 0x4:
2035
                /* Longword virtual PTE fetch (hw_ldl/v) */
2036
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
2037
                break;
2038
            case 0x5:
2039
                /* Quadword virtual PTE fetch (hw_ldq/v) */
2040
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
2041
                break;
2042
            case 0x6:
2043
                /* Incpu_ir[ra]id */
2044
                goto invalid_opc;
2045
            case 0x7:
2046
                /* Incpu_ir[ra]id */
2047
                goto invalid_opc;
2048
            case 0x8:
2049
                /* Longword virtual access (hw_ldl) */
2050
                gen_helper_st_virt_to_phys(addr, addr);
2051
                gen_helper_ldl_raw(cpu_ir[ra], addr);
2052
                break;
2053
            case 0x9:
2054
                /* Quadword virtual access (hw_ldq) */
2055
                gen_helper_st_virt_to_phys(addr, addr);
2056
                gen_helper_ldq_raw(cpu_ir[ra], addr);
2057
                break;
2058
            case 0xA:
2059
                /* Longword virtual access with protection check (hw_ldl/w) */
2060
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
2061
                break;
2062
            case 0xB:
2063
                /* Quadword virtual access with protection check (hw_ldq/w) */
2064
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
2065
                break;
2066
            case 0xC:
2067
                /* Longword virtual access with alt access mode (hw_ldl/a)*/
2068
                gen_helper_set_alt_mode();
2069
                gen_helper_st_virt_to_phys(addr, addr);
2070
                gen_helper_ldl_raw(cpu_ir[ra], addr);
2071
                gen_helper_restore_mode();
2072
                break;
2073
            case 0xD:
2074
                /* Quadword virtual access with alt access mode (hw_ldq/a) */
2075
                gen_helper_set_alt_mode();
2076
                gen_helper_st_virt_to_phys(addr, addr);
2077
                gen_helper_ldq_raw(cpu_ir[ra], addr);
2078
                gen_helper_restore_mode();
2079
                break;
2080
            case 0xE:
2081
                /* Longword virtual access with alternate access mode and
2082
                 * protection checks (hw_ldl/wa)
2083
                 */
2084
                gen_helper_set_alt_mode();
2085
                gen_helper_ldl_data(cpu_ir[ra], addr);
2086
                gen_helper_restore_mode();
2087
                break;
2088
            case 0xF:
2089
                /* Quadword virtual access with alternate access mode and
2090
                 * protection checks (hw_ldq/wa)
2091
                 */
2092
                gen_helper_set_alt_mode();
2093
                gen_helper_ldq_data(cpu_ir[ra], addr);
2094
                gen_helper_restore_mode();
2095
                break;
2096
            }
2097
            tcg_temp_free(addr);
2098
        }
2099
        break;
2100
#endif
2101
    case 0x1C:
2102
        switch (fn7) {
2103
        case 0x00:
2104
            /* SEXTB */
2105
            if (!(ctx->amask & AMASK_BWX))
2106
                goto invalid_opc;
2107
            if (likely(rc != 31)) {
2108
                if (islit)
2109
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
2110
                else
2111
                    tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
2112
            }
2113
            break;
2114
        case 0x01:
2115
            /* SEXTW */
2116
            if (!(ctx->amask & AMASK_BWX))
2117
                goto invalid_opc;
2118
            if (likely(rc != 31)) {
2119
                if (islit)
2120
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
2121
                else
2122
                    tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
2123
            }
2124
            break;
2125
        case 0x30:
2126
            /* CTPOP */
2127
            if (!(ctx->amask & AMASK_CIX))
2128
                goto invalid_opc;
2129
            if (likely(rc != 31)) {
2130
                if (islit)
2131
                    tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
2132
                else
2133
                    gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
2134
            }
2135
            break;
2136
        case 0x31:
2137
            /* PERR */
2138
            if (!(ctx->amask & AMASK_MVI))
2139
                goto invalid_opc;
2140
            gen_perr(ra, rb, rc, islit, lit);
2141
            break;
2142
        case 0x32:
2143
            /* CTLZ */
2144
            if (!(ctx->amask & AMASK_CIX))
2145
                goto invalid_opc;
2146
            if (likely(rc != 31)) {
2147
                if (islit)
2148
                    tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
2149
                else
2150
                    gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
2151
            }
2152
            break;
2153
        case 0x33:
2154
            /* CTTZ */
2155
            if (!(ctx->amask & AMASK_CIX))
2156
                goto invalid_opc;
2157
            if (likely(rc != 31)) {
2158
                if (islit)
2159
                    tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
2160
                else
2161
                    gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
2162
            }
2163
            break;
2164
        case 0x34:
2165
            /* UNPKBW */
2166
            if (!(ctx->amask & AMASK_MVI))
2167
                goto invalid_opc;
2168
            if (real_islit || ra != 31)
2169
                goto invalid_opc;
2170
            gen_unpkbw (rb, rc);
2171
            break;
2172
        case 0x35:
2173
            /* UNPKBL */
2174
            if (!(ctx->amask & AMASK_MVI))
2175
                goto invalid_opc;
2176
            if (real_islit || ra != 31)
2177
                goto invalid_opc;
2178
            gen_unpkbl (rb, rc);
2179
            break;
2180
        case 0x36:
2181
            /* PKWB */
2182
            if (!(ctx->amask & AMASK_MVI))
2183
                goto invalid_opc;
2184
            if (real_islit || ra != 31)
2185
                goto invalid_opc;
2186
            gen_pkwb (rb, rc);
2187
            break;
2188
        case 0x37:
2189
            /* PKLB */
2190
            if (!(ctx->amask & AMASK_MVI))
2191
                goto invalid_opc;
2192
            if (real_islit || ra != 31)
2193
                goto invalid_opc;
2194
            gen_pklb (rb, rc);
2195
            break;
2196
        case 0x38:
2197
            /* MINSB8 */
2198
            if (!(ctx->amask & AMASK_MVI))
2199
                goto invalid_opc;
2200
            gen_minsb8 (ra, rb, rc, islit, lit);
2201
            break;
2202
        case 0x39:
2203
            /* MINSW4 */
2204
            if (!(ctx->amask & AMASK_MVI))
2205
                goto invalid_opc;
2206
            gen_minsw4 (ra, rb, rc, islit, lit);
2207
            break;
2208
        case 0x3A:
2209
            /* MINUB8 */
2210
            if (!(ctx->amask & AMASK_MVI))
2211
                goto invalid_opc;
2212
            gen_minub8 (ra, rb, rc, islit, lit);
2213
            break;
2214
        case 0x3B:
2215
            /* MINUW4 */
2216
            if (!(ctx->amask & AMASK_MVI))
2217
                goto invalid_opc;
2218
            gen_minuw4 (ra, rb, rc, islit, lit);
2219
            break;
2220
        case 0x3C:
2221
            /* MAXUB8 */
2222
            if (!(ctx->amask & AMASK_MVI))
2223
                goto invalid_opc;
2224
            gen_maxub8 (ra, rb, rc, islit, lit);
2225
            break;
2226
        case 0x3D:
2227
            /* MAXUW4 */
2228
            if (!(ctx->amask & AMASK_MVI))
2229
                goto invalid_opc;
2230
            gen_maxuw4 (ra, rb, rc, islit, lit);
2231
            break;
2232
        case 0x3E:
2233
            /* MAXSB8 */
2234
            if (!(ctx->amask & AMASK_MVI))
2235
                goto invalid_opc;
2236
            gen_maxsb8 (ra, rb, rc, islit, lit);
2237
            break;
2238
        case 0x3F:
2239
            /* MAXSW4 */
2240
            if (!(ctx->amask & AMASK_MVI))
2241
                goto invalid_opc;
2242
            gen_maxsw4 (ra, rb, rc, islit, lit);
2243
            break;
2244
        case 0x70:
2245
            /* FTOIT */
2246
            if (!(ctx->amask & AMASK_FIX))
2247
                goto invalid_opc;
2248
            if (likely(rc != 31)) {
2249
                if (ra != 31)
2250
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2251
                else
2252
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2253
            }
2254
            break;
2255
        case 0x78:
2256
            /* FTOIS */
2257
            if (!(ctx->amask & AMASK_FIX))
2258
                goto invalid_opc;
2259
            if (rc != 31) {
2260
                TCGv_i32 tmp1 = tcg_temp_new_i32();
2261
                if (ra != 31)
2262
                    gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2263
                else {
2264
                    TCGv tmp2 = tcg_const_i64(0);
2265
                    gen_helper_s_to_memory(tmp1, tmp2);
2266
                    tcg_temp_free(tmp2);
2267
                }
2268
                tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2269
                tcg_temp_free_i32(tmp1);
2270
            }
2271
            break;
2272
        default:
2273
            goto invalid_opc;
2274
        }
2275
        break;
2276
    case 0x1D:
2277
        /* HW_MTPR (PALcode) */
2278
#if defined (CONFIG_USER_ONLY)
2279
        goto invalid_opc;
2280
#else
2281
        if (!ctx->pal_mode)
2282
            goto invalid_opc;
2283
        else {
2284
            TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2285
            if (ra != 31)
2286
                gen_helper_mtpr(tmp1, cpu_ir[ra]);
2287
            else {
2288
                TCGv tmp2 = tcg_const_i64(0);
2289
                gen_helper_mtpr(tmp1, tmp2);
2290
                tcg_temp_free(tmp2);
2291
            }
2292
            tcg_temp_free(tmp1);
2293
            ret = 2;
2294
        }
2295
        break;
2296
#endif
2297
    case 0x1E:
2298
        /* HW_REI (PALcode) */
2299
#if defined (CONFIG_USER_ONLY)
2300
        goto invalid_opc;
2301
#else
2302
        if (!ctx->pal_mode)
2303
            goto invalid_opc;
2304
        if (rb == 31) {
2305
            /* "Old" alpha */
2306
            gen_helper_hw_rei();
2307
        } else {
2308
            TCGv tmp;
2309

    
2310
            if (ra != 31) {
2311
                tmp = tcg_temp_new();
2312
                tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2313
            } else
2314
                tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2315
            gen_helper_hw_ret(tmp);
2316
            tcg_temp_free(tmp);
2317
        }
2318
        ret = 2;
2319
        break;
2320
#endif
2321
    case 0x1F:
2322
        /* HW_ST (PALcode) */
2323
#if defined (CONFIG_USER_ONLY)
2324
        goto invalid_opc;
2325
#else
2326
        if (!ctx->pal_mode)
2327
            goto invalid_opc;
2328
        else {
2329
            TCGv addr, val;
2330
            addr = tcg_temp_new();
2331
            if (rb != 31)
2332
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2333
            else
2334
                tcg_gen_movi_i64(addr, disp12);
2335
            if (ra != 31)
2336
                val = cpu_ir[ra];
2337
            else {
2338
                val = tcg_temp_new();
2339
                tcg_gen_movi_i64(val, 0);
2340
            }
2341
            switch ((insn >> 12) & 0xF) {
2342
            case 0x0:
2343
                /* Longword physical access */
2344
                gen_helper_stl_raw(val, addr);
2345
                break;
2346
            case 0x1:
2347
                /* Quadword physical access */
2348
                gen_helper_stq_raw(val, addr);
2349
                break;
2350
            case 0x2:
2351
                /* Longword physical access with lock */
2352
                gen_helper_stl_c_raw(val, val, addr);
2353
                break;
2354
            case 0x3:
2355
                /* Quadword physical access with lock */
2356
                gen_helper_stq_c_raw(val, val, addr);
2357
                break;
2358
            case 0x4:
2359
                /* Longword virtual access */
2360
                gen_helper_st_virt_to_phys(addr, addr);
2361
                gen_helper_stl_raw(val, addr);
2362
                break;
2363
            case 0x5:
2364
                /* Quadword virtual access */
2365
                gen_helper_st_virt_to_phys(addr, addr);
2366
                gen_helper_stq_raw(val, addr);
2367
                break;
2368
            case 0x6:
2369
                /* Invalid */
2370
                goto invalid_opc;
2371
            case 0x7:
2372
                /* Invalid */
2373
                goto invalid_opc;
2374
            case 0x8:
2375
                /* Invalid */
2376
                goto invalid_opc;
2377
            case 0x9:
2378
                /* Invalid */
2379
                goto invalid_opc;
2380
            case 0xA:
2381
                /* Invalid */
2382
                goto invalid_opc;
2383
            case 0xB:
2384
                /* Invalid */
2385
                goto invalid_opc;
2386
            case 0xC:
2387
                /* Longword virtual access with alternate access mode */
2388
                gen_helper_set_alt_mode();
2389
                gen_helper_st_virt_to_phys(addr, addr);
2390
                gen_helper_stl_raw(val, addr);
2391
                gen_helper_restore_mode();
2392
                break;
2393
            case 0xD:
2394
                /* Quadword virtual access with alternate access mode */
2395
                gen_helper_set_alt_mode();
2396
                gen_helper_st_virt_to_phys(addr, addr);
2397
                gen_helper_stl_raw(val, addr);
2398
                gen_helper_restore_mode();
2399
                break;
2400
            case 0xE:
2401
                /* Invalid */
2402
                goto invalid_opc;
2403
            case 0xF:
2404
                /* Invalid */
2405
                goto invalid_opc;
2406
            }
2407
            if (ra == 31)
2408
                tcg_temp_free(val);
2409
            tcg_temp_free(addr);
2410
        }
2411
        break;
2412
#endif
2413
    case 0x20:
2414
        /* LDF */
2415
        gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2416
        break;
2417
    case 0x21:
2418
        /* LDG */
2419
        gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2420
        break;
2421
    case 0x22:
2422
        /* LDS */
2423
        gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2424
        break;
2425
    case 0x23:
2426
        /* LDT */
2427
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2428
        break;
2429
    case 0x24:
2430
        /* STF */
2431
        gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2432
        break;
2433
    case 0x25:
2434
        /* STG */
2435
        gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2436
        break;
2437
    case 0x26:
2438
        /* STS */
2439
        gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2440
        break;
2441
    case 0x27:
2442
        /* STT */
2443
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2444
        break;
2445
    case 0x28:
2446
        /* LDL */
2447
        gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2448
        break;
2449
    case 0x29:
2450
        /* LDQ */
2451
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2452
        break;
2453
    case 0x2A:
2454
        /* LDL_L */
2455
        gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2456
        break;
2457
    case 0x2B:
2458
        /* LDQ_L */
2459
        gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2460
        break;
2461
    case 0x2C:
2462
        /* STL */
2463
        gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2464
        break;
2465
    case 0x2D:
2466
        /* STQ */
2467
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2468
        break;
2469
    case 0x2E:
2470
        /* STL_C */
2471
        gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2472
        break;
2473
    case 0x2F:
2474
        /* STQ_C */
2475
        gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2476
        break;
2477
    case 0x30:
2478
        /* BR */
2479
        if (ra != 31)
2480
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2481
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2482
        ret = 1;
2483
        break;
2484
    case 0x31: /* FBEQ */
2485
    case 0x32: /* FBLT */
2486
    case 0x33: /* FBLE */
2487
        gen_fbcond(ctx, opc, ra, disp21);
2488
        ret = 1;
2489
        break;
2490
    case 0x34:
2491
        /* BSR */
2492
        if (ra != 31)
2493
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2494
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2495
        ret = 1;
2496
        break;
2497
    case 0x35: /* FBNE */
2498
    case 0x36: /* FBGE */
2499
    case 0x37: /* FBGT */
2500
        gen_fbcond(ctx, opc, ra, disp21);
2501
        ret = 1;
2502
        break;
2503
    case 0x38:
2504
        /* BLBC */
2505
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2506
        ret = 1;
2507
        break;
2508
    case 0x39:
2509
        /* BEQ */
2510
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2511
        ret = 1;
2512
        break;
2513
    case 0x3A:
2514
        /* BLT */
2515
        gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2516
        ret = 1;
2517
        break;
2518
    case 0x3B:
2519
        /* BLE */
2520
        gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2521
        ret = 1;
2522
        break;
2523
    case 0x3C:
2524
        /* BLBS */
2525
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2526
        ret = 1;
2527
        break;
2528
    case 0x3D:
2529
        /* BNE */
2530
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2531
        ret = 1;
2532
        break;
2533
    case 0x3E:
2534
        /* BGE */
2535
        gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2536
        ret = 1;
2537
        break;
2538
    case 0x3F:
2539
        /* BGT */
2540
        gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2541
        ret = 1;
2542
        break;
2543
    invalid_opc:
2544
        gen_invalid(ctx);
2545
        ret = 3;
2546
        break;
2547
    }
2548

    
2549
    return ret;
2550
}
2551

    
2552
static inline void gen_intermediate_code_internal(CPUState *env,
2553
                                                  TranslationBlock *tb,
2554
                                                  int search_pc)
2555
{
2556
    DisasContext ctx, *ctxp = &ctx;
2557
    target_ulong pc_start;
2558
    uint32_t insn;
2559
    uint16_t *gen_opc_end;
2560
    CPUBreakpoint *bp;
2561
    int j, lj = -1;
2562
    int ret;
2563
    int num_insns;
2564
    int max_insns;
2565

    
2566
    pc_start = tb->pc;
2567
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2568
    ctx.pc = pc_start;
2569
    ctx.amask = env->amask;
2570
    ctx.env = env;
2571
#if defined (CONFIG_USER_ONLY)
2572
    ctx.mem_idx = 0;
2573
#else
2574
    ctx.mem_idx = ((env->ps >> 3) & 3);
2575
    ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2576
#endif
2577
    num_insns = 0;
2578
    max_insns = tb->cflags & CF_COUNT_MASK;
2579
    if (max_insns == 0)
2580
        max_insns = CF_COUNT_MASK;
2581

    
2582
    gen_icount_start();
2583
    for (ret = 0; ret == 0;) {
2584
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
2585
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
2586
                if (bp->pc == ctx.pc) {
2587
                    gen_excp(&ctx, EXCP_DEBUG, 0);
2588
                    break;
2589
                }
2590
            }
2591
        }
2592
        if (search_pc) {
2593
            j = gen_opc_ptr - gen_opc_buf;
2594
            if (lj < j) {
2595
                lj++;
2596
                while (lj < j)
2597
                    gen_opc_instr_start[lj++] = 0;
2598
            }
2599
            gen_opc_pc[lj] = ctx.pc;
2600
            gen_opc_instr_start[lj] = 1;
2601
            gen_opc_icount[lj] = num_insns;
2602
        }
2603
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2604
            gen_io_start();
2605
        insn = ldl_code(ctx.pc);
2606
        num_insns++;
2607
        ctx.pc += 4;
2608
        ret = translate_one(ctxp, insn);
2609
        if (ret != 0)
2610
            break;
2611
        /* if we reach a page boundary or are single stepping, stop
2612
         * generation
2613
         */
2614
        if (env->singlestep_enabled) {
2615
            gen_excp(&ctx, EXCP_DEBUG, 0);
2616
            break;
2617
        }
2618

    
2619
        if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2620
            break;
2621

    
2622
        if (gen_opc_ptr >= gen_opc_end)
2623
            break;
2624

    
2625
        if (num_insns >= max_insns)
2626
            break;
2627

    
2628
        if (singlestep) {
2629
            break;
2630
        }
2631
    }
2632
    if (ret != 1 && ret != 3) {
2633
        tcg_gen_movi_i64(cpu_pc, ctx.pc);
2634
    }
2635
    if (tb->cflags & CF_LAST_IO)
2636
        gen_io_end();
2637
    /* Generate the return instruction */
2638
    tcg_gen_exit_tb(0);
2639
    gen_icount_end(tb, num_insns);
2640
    *gen_opc_ptr = INDEX_op_end;
2641
    if (search_pc) {
2642
        j = gen_opc_ptr - gen_opc_buf;
2643
        lj++;
2644
        while (lj <= j)
2645
            gen_opc_instr_start[lj++] = 0;
2646
    } else {
2647
        tb->size = ctx.pc - pc_start;
2648
        tb->icount = num_insns;
2649
    }
2650
#ifdef DEBUG_DISAS
2651
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2652
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
2653
        log_target_disas(pc_start, ctx.pc - pc_start, 1);
2654
        qemu_log("\n");
2655
    }
2656
#endif
2657
}
2658

    
2659
void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2660
{
2661
    gen_intermediate_code_internal(env, tb, 0);
2662
}
2663

    
2664
void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2665
{
2666
    gen_intermediate_code_internal(env, tb, 1);
2667
}
2668

    
2669
struct cpu_def_t {
2670
    const char *name;
2671
    int implver, amask;
2672
};
2673

    
2674
static const struct cpu_def_t cpu_defs[] = {
2675
    { "ev4",   IMPLVER_2106x, 0 },
2676
    { "ev5",   IMPLVER_21164, 0 },
2677
    { "ev56",  IMPLVER_21164, AMASK_BWX },
2678
    { "pca56", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
2679
    { "ev6",   IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
2680
    { "ev67",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2681
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
2682
    { "ev68",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2683
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
2684
    { "21064", IMPLVER_2106x, 0 },
2685
    { "21164", IMPLVER_21164, 0 },
2686
    { "21164a", IMPLVER_21164, AMASK_BWX },
2687
    { "21164pc", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
2688
    { "21264", IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
2689
    { "21264a", IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
2690
                                | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), }
2691
};
2692

    
2693
CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2694
{
2695
    CPUAlphaState *env;
2696
    uint64_t hwpcb;
2697
    int implver, amask, i, max;
2698

    
2699
    env = qemu_mallocz(sizeof(CPUAlphaState));
2700
    cpu_exec_init(env);
2701
    alpha_translate_init();
2702
    tlb_flush(env, 1);
2703

    
2704
    /* Default to ev67; no reason not to emulate insns by default.  */
2705
    implver = IMPLVER_21264;
2706
    amask = (AMASK_BWX | AMASK_FIX | AMASK_CIX | AMASK_MVI
2707
             | AMASK_TRAP | AMASK_PREFETCH);
2708

    
2709
    max = ARRAY_SIZE(cpu_defs);
2710
    for (i = 0; i < max; i++) {
2711
        if (strcmp (cpu_model, cpu_defs[i].name) == 0) {
2712
            implver = cpu_defs[i].implver;
2713
            amask = cpu_defs[i].amask;
2714
            break;
2715
        }
2716
    }
2717
    env->implver = implver;
2718
    env->amask = amask;
2719

    
2720
    env->ps = 0x1F00;
2721
#if defined (CONFIG_USER_ONLY)
2722
    env->ps |= 1 << 3;
2723
#endif
2724
    pal_init(env);
2725
    /* Initialize IPR */
2726
    hwpcb = env->ipr[IPR_PCBB];
2727
    env->ipr[IPR_ASN] = 0;
2728
    env->ipr[IPR_ASTEN] = 0;
2729
    env->ipr[IPR_ASTSR] = 0;
2730
    env->ipr[IPR_DATFX] = 0;
2731
    /* XXX: fix this */
2732
    //    env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2733
    //    env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2734
    //    env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2735
    //    env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2736
    env->ipr[IPR_FEN] = 0;
2737
    env->ipr[IPR_IPL] = 31;
2738
    env->ipr[IPR_MCES] = 0;
2739
    env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2740
    //    env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2741
    env->ipr[IPR_SISR] = 0;
2742
    env->ipr[IPR_VIRBND] = -1ULL;
2743

    
2744
    qemu_init_vcpu(env);
2745
    return env;
2746
}
2747

    
2748
void gen_pc_load(CPUState *env, TranslationBlock *tb,
2749
                unsigned long searched_pc, int pc_pos, void *puc)
2750
{
2751
    env->pc = gen_opc_pc[pc_pos];
2752
}