Statistics
| Branch: | Revision:

root / target-alpha / translate.c @ 1a1f7dbc

History | View | Annotate | Download (75.3 kB)

1
/*
2
 *  Alpha emulation cpu translation for qemu.
3
 *
4
 *  Copyright (c) 2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA  02110-1301 USA
19
 */
20

    
21
#include <stdint.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24

    
25
#include "cpu.h"
26
#include "exec-all.h"
27
#include "disas.h"
28
#include "host-utils.h"
29
#include "tcg-op.h"
30
#include "qemu-common.h"
31

    
32
#include "helper.h"
33
#define GEN_HELPER 1
34
#include "helper.h"
35

    
36
/* #define DO_SINGLE_STEP */
37
#define ALPHA_DEBUG_DISAS
38
/* #define DO_TB_FLUSH */
39

    
40

    
41
#ifdef ALPHA_DEBUG_DISAS
42
#  define LOG_DISAS(...) qemu_log(__VA_ARGS__)
43
#else
44
#  define LOG_DISAS(...) do { } while (0)
45
#endif
46

    
47
typedef struct DisasContext DisasContext;
48
struct DisasContext {
49
    uint64_t pc;
50
    int mem_idx;
51
#if !defined (CONFIG_USER_ONLY)
52
    int pal_mode;
53
#endif
54
    CPUAlphaState *env;
55
    uint32_t amask;
56
};
57

    
58
/* global register indexes */
59
static TCGv_ptr cpu_env;
60
static TCGv cpu_ir[31];
61
static TCGv cpu_fir[31];
62
static TCGv cpu_pc;
63
static TCGv cpu_lock;
64

    
65
/* register names */
66
static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
67

    
68
#include "gen-icount.h"
69

    
70
static void alpha_translate_init(void)
71
{
72
    int i;
73
    char *p;
74
    static int done_init = 0;
75

    
76
    if (done_init)
77
        return;
78

    
79
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
80

    
81
    p = cpu_reg_names;
82
    for (i = 0; i < 31; i++) {
83
        sprintf(p, "ir%d", i);
84
        cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
85
                                           offsetof(CPUState, ir[i]), p);
86
        p += (i < 10) ? 4 : 5;
87

    
88
        sprintf(p, "fir%d", i);
89
        cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
90
                                            offsetof(CPUState, fir[i]), p);
91
        p += (i < 10) ? 5 : 6;
92
    }
93

    
94
    cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
95
                                    offsetof(CPUState, pc), "pc");
96

    
97
    cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
98
                                      offsetof(CPUState, lock), "lock");
99

    
100
    /* register helpers */
101
#define GEN_HELPER 2
102
#include "helper.h"
103

    
104
    done_init = 1;
105
}
106

    
107
static always_inline void gen_excp (DisasContext *ctx,
108
                                    int exception, int error_code)
109
{
110
    TCGv_i32 tmp1, tmp2;
111

    
112
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
113
    tmp1 = tcg_const_i32(exception);
114
    tmp2 = tcg_const_i32(error_code);
115
    gen_helper_excp(tmp1, tmp2);
116
    tcg_temp_free_i32(tmp2);
117
    tcg_temp_free_i32(tmp1);
118
}
119

    
120
static always_inline void gen_invalid (DisasContext *ctx)
121
{
122
    gen_excp(ctx, EXCP_OPCDEC, 0);
123
}
124

    
125
static always_inline void gen_qemu_ldf (TCGv t0, TCGv t1, int flags)
126
{
127
    TCGv tmp = tcg_temp_new();
128
    TCGv_i32 tmp32 = tcg_temp_new_i32();
129
    tcg_gen_qemu_ld32u(tmp, t1, flags);
130
    tcg_gen_trunc_i64_i32(tmp32, tmp);
131
    gen_helper_memory_to_f(t0, tmp32);
132
    tcg_temp_free_i32(tmp32);
133
    tcg_temp_free(tmp);
134
}
135

    
136
static always_inline void gen_qemu_ldg (TCGv t0, TCGv t1, int flags)
137
{
138
    TCGv tmp = tcg_temp_new();
139
    tcg_gen_qemu_ld64(tmp, t1, flags);
140
    gen_helper_memory_to_g(t0, tmp);
141
    tcg_temp_free(tmp);
142
}
143

    
144
static always_inline void gen_qemu_lds (TCGv t0, TCGv t1, int flags)
145
{
146
    TCGv tmp = tcg_temp_new();
147
    TCGv_i32 tmp32 = tcg_temp_new_i32();
148
    tcg_gen_qemu_ld32u(tmp, t1, flags);
149
    tcg_gen_trunc_i64_i32(tmp32, tmp);
150
    gen_helper_memory_to_s(t0, tmp32);
151
    tcg_temp_free_i32(tmp32);
152
    tcg_temp_free(tmp);
153
}
154

    
155
static always_inline void gen_qemu_ldl_l (TCGv t0, TCGv t1, int flags)
156
{
157
    tcg_gen_mov_i64(cpu_lock, t1);
158
    tcg_gen_qemu_ld32s(t0, t1, flags);
159
}
160

    
161
static always_inline void gen_qemu_ldq_l (TCGv t0, TCGv t1, int flags)
162
{
163
    tcg_gen_mov_i64(cpu_lock, t1);
164
    tcg_gen_qemu_ld64(t0, t1, flags);
165
}
166

    
167
static always_inline void gen_load_mem (DisasContext *ctx,
168
                                        void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1, int flags),
169
                                        int ra, int rb, int32_t disp16,
170
                                        int fp, int clear)
171
{
172
    TCGv addr;
173

    
174
    if (unlikely(ra == 31))
175
        return;
176

    
177
    addr = tcg_temp_new();
178
    if (rb != 31) {
179
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
180
        if (clear)
181
            tcg_gen_andi_i64(addr, addr, ~0x7);
182
    } else {
183
        if (clear)
184
            disp16 &= ~0x7;
185
        tcg_gen_movi_i64(addr, disp16);
186
    }
187
    if (fp)
188
        tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
189
    else
190
        tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
191
    tcg_temp_free(addr);
192
}
193

    
194
static always_inline void gen_qemu_stf (TCGv t0, TCGv t1, int flags)
195
{
196
    TCGv_i32 tmp32 = tcg_temp_new_i32();
197
    TCGv tmp = tcg_temp_new();
198
    gen_helper_f_to_memory(tmp32, t0);
199
    tcg_gen_extu_i32_i64(tmp, tmp32);
200
    tcg_gen_qemu_st32(tmp, t1, flags);
201
    tcg_temp_free(tmp);
202
    tcg_temp_free_i32(tmp32);
203
}
204

    
205
static always_inline void gen_qemu_stg (TCGv t0, TCGv t1, int flags)
206
{
207
    TCGv tmp = tcg_temp_new();
208
    gen_helper_g_to_memory(tmp, t0);
209
    tcg_gen_qemu_st64(tmp, t1, flags);
210
    tcg_temp_free(tmp);
211
}
212

    
213
static always_inline void gen_qemu_sts (TCGv t0, TCGv t1, int flags)
214
{
215
    TCGv_i32 tmp32 = tcg_temp_new_i32();
216
    TCGv tmp = tcg_temp_new();
217
    gen_helper_s_to_memory(tmp32, t0);
218
    tcg_gen_extu_i32_i64(tmp, tmp32);
219
    tcg_gen_qemu_st32(tmp, t1, flags);
220
    tcg_temp_free(tmp);
221
    tcg_temp_free_i32(tmp32);
222
}
223

    
224
static always_inline void gen_qemu_stl_c (TCGv t0, TCGv t1, int flags)
225
{
226
    int l1, l2;
227

    
228
    l1 = gen_new_label();
229
    l2 = gen_new_label();
230
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
231
    tcg_gen_qemu_st32(t0, t1, flags);
232
    tcg_gen_movi_i64(t0, 1);
233
    tcg_gen_br(l2);
234
    gen_set_label(l1);
235
    tcg_gen_movi_i64(t0, 0);
236
    gen_set_label(l2);
237
    tcg_gen_movi_i64(cpu_lock, -1);
238
}
239

    
240
static always_inline void gen_qemu_stq_c (TCGv t0, TCGv t1, int flags)
241
{
242
    int l1, l2;
243

    
244
    l1 = gen_new_label();
245
    l2 = gen_new_label();
246
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
247
    tcg_gen_qemu_st64(t0, t1, flags);
248
    tcg_gen_movi_i64(t0, 1);
249
    tcg_gen_br(l2);
250
    gen_set_label(l1);
251
    tcg_gen_movi_i64(t0, 0);
252
    gen_set_label(l2);
253
    tcg_gen_movi_i64(cpu_lock, -1);
254
}
255

    
256
static always_inline void gen_store_mem (DisasContext *ctx,
257
                                         void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1, int flags),
258
                                         int ra, int rb, int32_t disp16,
259
                                         int fp, int clear, int local)
260
{
261
    TCGv addr;
262
    if (local)
263
        addr = tcg_temp_local_new();
264
    else
265
        addr = tcg_temp_new();
266
    if (rb != 31) {
267
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
268
        if (clear)
269
            tcg_gen_andi_i64(addr, addr, ~0x7);
270
    } else {
271
        if (clear)
272
            disp16 &= ~0x7;
273
        tcg_gen_movi_i64(addr, disp16);
274
    }
275
    if (ra != 31) {
276
        if (fp)
277
            tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
278
        else
279
            tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
280
    } else {
281
        TCGv zero;
282
        if (local)
283
            zero = tcg_const_local_i64(0);
284
        else
285
            zero = tcg_const_i64(0);
286
        tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
287
        tcg_temp_free(zero);
288
    }
289
    tcg_temp_free(addr);
290
}
291

    
292
static always_inline void gen_bcond (DisasContext *ctx,
293
                                     TCGCond cond,
294
                                     int ra, int32_t disp, int mask)
295
{
296
    int l1, l2;
297

    
298
    l1 = gen_new_label();
299
    l2 = gen_new_label();
300
    if (likely(ra != 31)) {
301
        if (mask) {
302
            TCGv tmp = tcg_temp_new();
303
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
304
            tcg_gen_brcondi_i64(cond, tmp, 0, l1);
305
            tcg_temp_free(tmp);
306
        } else
307
            tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
308
    } else {
309
        /* Very uncommon case - Do not bother to optimize.  */
310
        TCGv tmp = tcg_const_i64(0);
311
        tcg_gen_brcondi_i64(cond, tmp, 0, l1);
312
        tcg_temp_free(tmp);
313
    }
314
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
315
    tcg_gen_br(l2);
316
    gen_set_label(l1);
317
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
318
    gen_set_label(l2);
319
}
320

    
321
static always_inline void gen_fbcond (DisasContext *ctx, int opc,
322
                                      int ra, int32_t disp16)
323
{
324
    int l1, l2;
325
    TCGv tmp;
326
    TCGv src;
327

    
328
    l1 = gen_new_label();
329
    l2 = gen_new_label();
330
    if (ra != 31) {
331
        tmp = tcg_temp_new();
332
        src = cpu_fir[ra];
333
    } else  {
334
        tmp = tcg_const_i64(0);
335
        src = tmp;
336
    }
337
    switch (opc) {
338
    case 0x31: /* FBEQ */
339
        gen_helper_cmpfeq(tmp, src);
340
        break;
341
    case 0x32: /* FBLT */
342
        gen_helper_cmpflt(tmp, src);
343
        break;
344
    case 0x33: /* FBLE */
345
        gen_helper_cmpfle(tmp, src);
346
        break;
347
    case 0x35: /* FBNE */
348
        gen_helper_cmpfne(tmp, src);
349
        break;
350
    case 0x36: /* FBGE */
351
        gen_helper_cmpfge(tmp, src);
352
        break;
353
    case 0x37: /* FBGT */
354
        gen_helper_cmpfgt(tmp, src);
355
        break;
356
    default:
357
        abort();
358
    }
359
    tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
360
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
361
    tcg_gen_br(l2);
362
    gen_set_label(l1);
363
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
364
    gen_set_label(l2);
365
}
366

    
367
static always_inline void gen_cmov (TCGCond inv_cond,
368
                                    int ra, int rb, int rc,
369
                                    int islit, uint8_t lit, int mask)
370
{
371
    int l1;
372

    
373
    if (unlikely(rc == 31))
374
        return;
375

    
376
    l1 = gen_new_label();
377

    
378
    if (ra != 31) {
379
        if (mask) {
380
            TCGv tmp = tcg_temp_new();
381
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
382
            tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
383
            tcg_temp_free(tmp);
384
        } else
385
            tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
386
    } else {
387
        /* Very uncommon case - Do not bother to optimize.  */
388
        TCGv tmp = tcg_const_i64(0);
389
        tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
390
        tcg_temp_free(tmp);
391
    }
392

    
393
    if (islit)
394
        tcg_gen_movi_i64(cpu_ir[rc], lit);
395
    else
396
        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
397
    gen_set_label(l1);
398
}
399

    
400
#define FARITH2(name)                                       \
401
static always_inline void glue(gen_f, name)(int rb, int rc) \
402
{                                                           \
403
    if (unlikely(rc == 31))                                 \
404
      return;                                               \
405
                                                            \
406
    if (rb != 31)                                           \
407
        gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]);    \
408
    else {                                                  \
409
        TCGv tmp = tcg_const_i64(0);                        \
410
        gen_helper_ ## name (cpu_fir[rc], tmp);            \
411
        tcg_temp_free(tmp);                                 \
412
    }                                                       \
413
}
414
FARITH2(sqrts)
415
FARITH2(sqrtf)
416
FARITH2(sqrtg)
417
FARITH2(sqrtt)
418
FARITH2(cvtgf)
419
FARITH2(cvtgq)
420
FARITH2(cvtqf)
421
FARITH2(cvtqg)
422
FARITH2(cvtst)
423
FARITH2(cvtts)
424
FARITH2(cvttq)
425
FARITH2(cvtqs)
426
FARITH2(cvtqt)
427
FARITH2(cvtlq)
428
FARITH2(cvtql)
429
FARITH2(cvtqlv)
430
FARITH2(cvtqlsv)
431

    
432
#define FARITH3(name)                                                     \
433
static always_inline void glue(gen_f, name) (int ra, int rb, int rc)      \
434
{                                                                         \
435
    if (unlikely(rc == 31))                                               \
436
        return;                                                           \
437
                                                                          \
438
    if (ra != 31) {                                                       \
439
        if (rb != 31)                                                     \
440
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);  \
441
        else {                                                            \
442
            TCGv tmp = tcg_const_i64(0);                                  \
443
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp);          \
444
            tcg_temp_free(tmp);                                           \
445
        }                                                                 \
446
    } else {                                                              \
447
        TCGv tmp = tcg_const_i64(0);                                      \
448
        if (rb != 31)                                                     \
449
            gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]);          \
450
        else                                                              \
451
            gen_helper_ ## name (cpu_fir[rc], tmp, tmp);                   \
452
        tcg_temp_free(tmp);                                               \
453
    }                                                                     \
454
}
455

    
456
FARITH3(addf)
457
FARITH3(subf)
458
FARITH3(mulf)
459
FARITH3(divf)
460
FARITH3(addg)
461
FARITH3(subg)
462
FARITH3(mulg)
463
FARITH3(divg)
464
FARITH3(cmpgeq)
465
FARITH3(cmpglt)
466
FARITH3(cmpgle)
467
FARITH3(adds)
468
FARITH3(subs)
469
FARITH3(muls)
470
FARITH3(divs)
471
FARITH3(addt)
472
FARITH3(subt)
473
FARITH3(mult)
474
FARITH3(divt)
475
FARITH3(cmptun)
476
FARITH3(cmpteq)
477
FARITH3(cmptlt)
478
FARITH3(cmptle)
479
FARITH3(cpys)
480
FARITH3(cpysn)
481
FARITH3(cpyse)
482

    
483
#define FCMOV(name)                                                   \
484
static always_inline void glue(gen_f, name) (int ra, int rb, int rc)  \
485
{                                                                     \
486
    int l1;                                                           \
487
    TCGv tmp;                                                         \
488
                                                                      \
489
    if (unlikely(rc == 31))                                           \
490
        return;                                                       \
491
                                                                      \
492
    l1 = gen_new_label();                                             \
493
    tmp = tcg_temp_new();                                 \
494
    if (ra != 31) {                                                   \
495
        tmp = tcg_temp_new();                             \
496
        gen_helper_ ## name (tmp, cpu_fir[ra]);                       \
497
    } else  {                                                         \
498
        tmp = tcg_const_i64(0);                                       \
499
        gen_helper_ ## name (tmp, tmp);                               \
500
    }                                                                 \
501
    tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);                     \
502
    if (rb != 31)                                                     \
503
        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);                    \
504
    else                                                              \
505
        tcg_gen_movi_i64(cpu_fir[rc], 0);                             \
506
    gen_set_label(l1);                                                \
507
}
508
FCMOV(cmpfeq)
509
FCMOV(cmpfne)
510
FCMOV(cmpflt)
511
FCMOV(cmpfge)
512
FCMOV(cmpfle)
513
FCMOV(cmpfgt)
514

    
515
/* EXTWH, EXTWH, EXTLH, EXTQH */
516
static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
517
                                    int ra, int rb, int rc,
518
                                    int islit, uint8_t lit)
519
{
520
    if (unlikely(rc == 31))
521
        return;
522

    
523
    if (ra != 31) {
524
        if (islit) {
525
            if (lit != 0)
526
                tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
527
            else
528
                tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
529
        } else {
530
            TCGv tmp1, tmp2;
531
            tmp1 = tcg_temp_new();
532
            tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
533
            tcg_gen_shli_i64(tmp1, tmp1, 3);
534
            tmp2 = tcg_const_i64(64);
535
            tcg_gen_sub_i64(tmp1, tmp2, tmp1);
536
            tcg_temp_free(tmp2);
537
            tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
538
            tcg_temp_free(tmp1);
539
        }
540
        if (tcg_gen_ext_i64)
541
            tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
542
    } else
543
        tcg_gen_movi_i64(cpu_ir[rc], 0);
544
}
545

    
546
/* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
547
static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
548
                                    int ra, int rb, int rc,
549
                                    int islit, uint8_t lit)
550
{
551
    if (unlikely(rc == 31))
552
        return;
553

    
554
    if (ra != 31) {
555
        if (islit) {
556
                tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
557
        } else {
558
            TCGv tmp = tcg_temp_new();
559
            tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
560
            tcg_gen_shli_i64(tmp, tmp, 3);
561
            tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
562
            tcg_temp_free(tmp);
563
        }
564
        if (tcg_gen_ext_i64)
565
            tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
566
    } else
567
        tcg_gen_movi_i64(cpu_ir[rc], 0);
568
}
569

    
570
/* Code to call arith3 helpers */
571
#define ARITH3(name)                                                  \
572
static always_inline void glue(gen_, name) (int ra, int rb, int rc,   \
573
                                            int islit, uint8_t lit)   \
574
{                                                                     \
575
    if (unlikely(rc == 31))                                           \
576
        return;                                                       \
577
                                                                      \
578
    if (ra != 31) {                                                   \
579
        if (islit) {                                                  \
580
            TCGv tmp = tcg_const_i64(lit);                            \
581
            gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp);         \
582
            tcg_temp_free(tmp);                                       \
583
        } else                                                        \
584
            gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
585
    } else {                                                          \
586
        TCGv tmp1 = tcg_const_i64(0);                                 \
587
        if (islit) {                                                  \
588
            TCGv tmp2 = tcg_const_i64(lit);                           \
589
            gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2);             \
590
            tcg_temp_free(tmp2);                                      \
591
        } else                                                        \
592
            gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]);       \
593
        tcg_temp_free(tmp1);                                          \
594
    }                                                                 \
595
}
596
ARITH3(cmpbge)
597
ARITH3(addlv)
598
ARITH3(sublv)
599
ARITH3(addqv)
600
ARITH3(subqv)
601
ARITH3(mskbl)
602
ARITH3(insbl)
603
ARITH3(mskwl)
604
ARITH3(inswl)
605
ARITH3(mskll)
606
ARITH3(insll)
607
ARITH3(zap)
608
ARITH3(zapnot)
609
ARITH3(mskql)
610
ARITH3(insql)
611
ARITH3(mskwh)
612
ARITH3(inswh)
613
ARITH3(msklh)
614
ARITH3(inslh)
615
ARITH3(mskqh)
616
ARITH3(insqh)
617
ARITH3(umulh)
618
ARITH3(mullv)
619
ARITH3(mulqv)
620

    
621
static always_inline void gen_cmp(TCGCond cond,
622
                                  int ra, int rb, int rc,
623
                                  int islit, uint8_t lit)
624
{
625
    int l1, l2;
626
    TCGv tmp;
627

    
628
    if (unlikely(rc == 31))
629
    return;
630

    
631
    l1 = gen_new_label();
632
    l2 = gen_new_label();
633

    
634
    if (ra != 31) {
635
        tmp = tcg_temp_new();
636
        tcg_gen_mov_i64(tmp, cpu_ir[ra]);
637
    } else
638
        tmp = tcg_const_i64(0);
639
    if (islit)
640
        tcg_gen_brcondi_i64(cond, tmp, lit, l1);
641
    else
642
        tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
643

    
644
    tcg_gen_movi_i64(cpu_ir[rc], 0);
645
    tcg_gen_br(l2);
646
    gen_set_label(l1);
647
    tcg_gen_movi_i64(cpu_ir[rc], 1);
648
    gen_set_label(l2);
649
}
650

    
651
static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
652
{
653
    uint32_t palcode;
654
    int32_t disp21, disp16, disp12;
655
    uint16_t fn11, fn16;
656
    uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
657
    uint8_t lit;
658
    int ret;
659

    
660
    /* Decode all instruction fields */
661
    opc = insn >> 26;
662
    ra = (insn >> 21) & 0x1F;
663
    rb = (insn >> 16) & 0x1F;
664
    rc = insn & 0x1F;
665
    sbz = (insn >> 13) & 0x07;
666
    islit = (insn >> 12) & 1;
667
    if (rb == 31 && !islit) {
668
        islit = 1;
669
        lit = 0;
670
    } else
671
        lit = (insn >> 13) & 0xFF;
672
    palcode = insn & 0x03FFFFFF;
673
    disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
674
    disp16 = (int16_t)(insn & 0x0000FFFF);
675
    disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
676
    fn16 = insn & 0x0000FFFF;
677
    fn11 = (insn >> 5) & 0x000007FF;
678
    fpfn = fn11 & 0x3F;
679
    fn7 = (insn >> 5) & 0x0000007F;
680
    fn2 = (insn >> 5) & 0x00000003;
681
    ret = 0;
682
    LOG_DISAS("opc %02x ra %d rb %d rc %d disp16 %04x\n",
683
              opc, ra, rb, rc, disp16);
684
    switch (opc) {
685
    case 0x00:
686
        /* CALL_PAL */
687
        if (palcode >= 0x80 && palcode < 0xC0) {
688
            /* Unprivileged PAL call */
689
            gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x3F) << 6), 0);
690
#if !defined (CONFIG_USER_ONLY)
691
        } else if (palcode < 0x40) {
692
            /* Privileged PAL code */
693
            if (ctx->mem_idx & 1)
694
                goto invalid_opc;
695
            else
696
                gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x3F) << 6), 0);
697
#endif
698
        } else {
699
            /* Invalid PAL call */
700
            goto invalid_opc;
701
        }
702
        ret = 3;
703
        break;
704
    case 0x01:
705
        /* OPC01 */
706
        goto invalid_opc;
707
    case 0x02:
708
        /* OPC02 */
709
        goto invalid_opc;
710
    case 0x03:
711
        /* OPC03 */
712
        goto invalid_opc;
713
    case 0x04:
714
        /* OPC04 */
715
        goto invalid_opc;
716
    case 0x05:
717
        /* OPC05 */
718
        goto invalid_opc;
719
    case 0x06:
720
        /* OPC06 */
721
        goto invalid_opc;
722
    case 0x07:
723
        /* OPC07 */
724
        goto invalid_opc;
725
    case 0x08:
726
        /* LDA */
727
        if (likely(ra != 31)) {
728
            if (rb != 31)
729
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
730
            else
731
                tcg_gen_movi_i64(cpu_ir[ra], disp16);
732
        }
733
        break;
734
    case 0x09:
735
        /* LDAH */
736
        if (likely(ra != 31)) {
737
            if (rb != 31)
738
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
739
            else
740
                tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
741
        }
742
        break;
743
    case 0x0A:
744
        /* LDBU */
745
        if (!(ctx->amask & AMASK_BWX))
746
            goto invalid_opc;
747
        gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
748
        break;
749
    case 0x0B:
750
        /* LDQ_U */
751
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
752
        break;
753
    case 0x0C:
754
        /* LDWU */
755
        if (!(ctx->amask & AMASK_BWX))
756
            goto invalid_opc;
757
        gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
758
        break;
759
    case 0x0D:
760
        /* STW */
761
        gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
762
        break;
763
    case 0x0E:
764
        /* STB */
765
        gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
766
        break;
767
    case 0x0F:
768
        /* STQ_U */
769
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
770
        break;
771
    case 0x10:
772
        switch (fn7) {
773
        case 0x00:
774
            /* ADDL */
775
            if (likely(rc != 31)) {
776
                if (ra != 31) {
777
                    if (islit) {
778
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
779
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
780
                    } else {
781
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
782
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
783
                    }
784
                } else {
785
                    if (islit)
786
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
787
                    else
788
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
789
                }
790
            }
791
            break;
792
        case 0x02:
793
            /* S4ADDL */
794
            if (likely(rc != 31)) {
795
                if (ra != 31) {
796
                    TCGv tmp = tcg_temp_new();
797
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
798
                    if (islit)
799
                        tcg_gen_addi_i64(tmp, tmp, lit);
800
                    else
801
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
802
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
803
                    tcg_temp_free(tmp);
804
                } else {
805
                    if (islit)
806
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
807
                    else
808
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
809
                }
810
            }
811
            break;
812
        case 0x09:
813
            /* SUBL */
814
            if (likely(rc != 31)) {
815
                if (ra != 31) {
816
                    if (islit)
817
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
818
                    else
819
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
820
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
821
                } else {
822
                    if (islit)
823
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
824
                    else {
825
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
826
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
827
                }
828
            }
829
            break;
830
        case 0x0B:
831
            /* S4SUBL */
832
            if (likely(rc != 31)) {
833
                if (ra != 31) {
834
                    TCGv tmp = tcg_temp_new();
835
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
836
                    if (islit)
837
                        tcg_gen_subi_i64(tmp, tmp, lit);
838
                    else
839
                        tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
840
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
841
                    tcg_temp_free(tmp);
842
                } else {
843
                    if (islit)
844
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
845
                    else {
846
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
847
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
848
                    }
849
                }
850
            }
851
            break;
852
        case 0x0F:
853
            /* CMPBGE */
854
            gen_cmpbge(ra, rb, rc, islit, lit);
855
            break;
856
        case 0x12:
857
            /* S8ADDL */
858
            if (likely(rc != 31)) {
859
                if (ra != 31) {
860
                    TCGv tmp = tcg_temp_new();
861
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
862
                    if (islit)
863
                        tcg_gen_addi_i64(tmp, tmp, lit);
864
                    else
865
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
866
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
867
                    tcg_temp_free(tmp);
868
                } else {
869
                    if (islit)
870
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
871
                    else
872
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
873
                }
874
            }
875
            break;
876
        case 0x1B:
877
            /* S8SUBL */
878
            if (likely(rc != 31)) {
879
                if (ra != 31) {
880
                    TCGv tmp = tcg_temp_new();
881
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
882
                    if (islit)
883
                        tcg_gen_subi_i64(tmp, tmp, lit);
884
                    else
885
                       tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
886
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
887
                    tcg_temp_free(tmp);
888
                } else {
889
                    if (islit)
890
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
891
                    else
892
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
893
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
894
                    }
895
                }
896
            }
897
            break;
898
        case 0x1D:
899
            /* CMPULT */
900
            gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
901
            break;
902
        case 0x20:
903
            /* ADDQ */
904
            if (likely(rc != 31)) {
905
                if (ra != 31) {
906
                    if (islit)
907
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
908
                    else
909
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
910
                } else {
911
                    if (islit)
912
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
913
                    else
914
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
915
                }
916
            }
917
            break;
918
        case 0x22:
919
            /* S4ADDQ */
920
            if (likely(rc != 31)) {
921
                if (ra != 31) {
922
                    TCGv tmp = tcg_temp_new();
923
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
924
                    if (islit)
925
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
926
                    else
927
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
928
                    tcg_temp_free(tmp);
929
                } else {
930
                    if (islit)
931
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
932
                    else
933
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
934
                }
935
            }
936
            break;
937
        case 0x29:
938
            /* SUBQ */
939
            if (likely(rc != 31)) {
940
                if (ra != 31) {
941
                    if (islit)
942
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
943
                    else
944
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
945
                } else {
946
                    if (islit)
947
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
948
                    else
949
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
950
                }
951
            }
952
            break;
953
        case 0x2B:
954
            /* S4SUBQ */
955
            if (likely(rc != 31)) {
956
                if (ra != 31) {
957
                    TCGv tmp = tcg_temp_new();
958
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
959
                    if (islit)
960
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
961
                    else
962
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
963
                    tcg_temp_free(tmp);
964
                } else {
965
                    if (islit)
966
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
967
                    else
968
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
969
                }
970
            }
971
            break;
972
        case 0x2D:
973
            /* CMPEQ */
974
            gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
975
            break;
976
        case 0x32:
977
            /* S8ADDQ */
978
            if (likely(rc != 31)) {
979
                if (ra != 31) {
980
                    TCGv tmp = tcg_temp_new();
981
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
982
                    if (islit)
983
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
984
                    else
985
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
986
                    tcg_temp_free(tmp);
987
                } else {
988
                    if (islit)
989
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
990
                    else
991
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
992
                }
993
            }
994
            break;
995
        case 0x3B:
996
            /* S8SUBQ */
997
            if (likely(rc != 31)) {
998
                if (ra != 31) {
999
                    TCGv tmp = tcg_temp_new();
1000
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1001
                    if (islit)
1002
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1003
                    else
1004
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1005
                    tcg_temp_free(tmp);
1006
                } else {
1007
                    if (islit)
1008
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1009
                    else
1010
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1011
                }
1012
            }
1013
            break;
1014
        case 0x3D:
1015
            /* CMPULE */
1016
            gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1017
            break;
1018
        case 0x40:
1019
            /* ADDL/V */
1020
            gen_addlv(ra, rb, rc, islit, lit);
1021
            break;
1022
        case 0x49:
1023
            /* SUBL/V */
1024
            gen_sublv(ra, rb, rc, islit, lit);
1025
            break;
1026
        case 0x4D:
1027
            /* CMPLT */
1028
            gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1029
            break;
1030
        case 0x60:
1031
            /* ADDQ/V */
1032
            gen_addqv(ra, rb, rc, islit, lit);
1033
            break;
1034
        case 0x69:
1035
            /* SUBQ/V */
1036
            gen_subqv(ra, rb, rc, islit, lit);
1037
            break;
1038
        case 0x6D:
1039
            /* CMPLE */
1040
            gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1041
            break;
1042
        default:
1043
            goto invalid_opc;
1044
        }
1045
        break;
1046
    case 0x11:
1047
        switch (fn7) {
1048
        case 0x00:
1049
            /* AND */
1050
            if (likely(rc != 31)) {
1051
                if (ra == 31)
1052
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1053
                else if (islit)
1054
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1055
                else
1056
                    tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1057
            }
1058
            break;
1059
        case 0x08:
1060
            /* BIC */
1061
            if (likely(rc != 31)) {
1062
                if (ra != 31) {
1063
                    if (islit)
1064
                        tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1065
                    else
1066
                        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1067
                } else
1068
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1069
            }
1070
            break;
1071
        case 0x14:
1072
            /* CMOVLBS */
1073
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1074
            break;
1075
        case 0x16:
1076
            /* CMOVLBC */
1077
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1078
            break;
1079
        case 0x20:
1080
            /* BIS */
1081
            if (likely(rc != 31)) {
1082
                if (ra != 31) {
1083
                    if (islit)
1084
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1085
                    else
1086
                        tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1087
                } else {
1088
                    if (islit)
1089
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1090
                    else
1091
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1092
                }
1093
            }
1094
            break;
1095
        case 0x24:
1096
            /* CMOVEQ */
1097
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1098
            break;
1099
        case 0x26:
1100
            /* CMOVNE */
1101
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1102
            break;
1103
        case 0x28:
1104
            /* ORNOT */
1105
            if (likely(rc != 31)) {
1106
                if (ra != 31) {
1107
                    if (islit)
1108
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1109
                    else
1110
                        tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1111
                } else {
1112
                    if (islit)
1113
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1114
                    else
1115
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1116
                }
1117
            }
1118
            break;
1119
        case 0x40:
1120
            /* XOR */
1121
            if (likely(rc != 31)) {
1122
                if (ra != 31) {
1123
                    if (islit)
1124
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1125
                    else
1126
                        tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1127
                } else {
1128
                    if (islit)
1129
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1130
                    else
1131
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1132
                }
1133
            }
1134
            break;
1135
        case 0x44:
1136
            /* CMOVLT */
1137
            gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1138
            break;
1139
        case 0x46:
1140
            /* CMOVGE */
1141
            gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1142
            break;
1143
        case 0x48:
1144
            /* EQV */
1145
            if (likely(rc != 31)) {
1146
                if (ra != 31) {
1147
                    if (islit)
1148
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1149
                    else
1150
                        tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1151
                } else {
1152
                    if (islit)
1153
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1154
                    else
1155
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1156
                }
1157
            }
1158
            break;
1159
        case 0x61:
1160
            /* AMASK */
1161
            if (likely(rc != 31)) {
1162
                if (islit)
1163
                    tcg_gen_movi_i64(cpu_ir[rc], lit);
1164
                else
1165
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1166
                switch (ctx->env->implver) {
1167
                case IMPLVER_2106x:
1168
                    /* EV4, EV45, LCA, LCA45 & EV5 */
1169
                    break;
1170
                case IMPLVER_21164:
1171
                case IMPLVER_21264:
1172
                case IMPLVER_21364:
1173
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[rc],
1174
                                     ~(uint64_t)ctx->amask);
1175
                    break;
1176
                }
1177
            }
1178
            break;
1179
        case 0x64:
1180
            /* CMOVLE */
1181
            gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1182
            break;
1183
        case 0x66:
1184
            /* CMOVGT */
1185
            gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1186
            break;
1187
        case 0x6C:
1188
            /* IMPLVER */
1189
            if (rc != 31)
1190
                tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
1191
            break;
1192
        default:
1193
            goto invalid_opc;
1194
        }
1195
        break;
1196
    case 0x12:
1197
        switch (fn7) {
1198
        case 0x02:
1199
            /* MSKBL */
1200
            gen_mskbl(ra, rb, rc, islit, lit);
1201
            break;
1202
        case 0x06:
1203
            /* EXTBL */
1204
            gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1205
            break;
1206
        case 0x0B:
1207
            /* INSBL */
1208
            gen_insbl(ra, rb, rc, islit, lit);
1209
            break;
1210
        case 0x12:
1211
            /* MSKWL */
1212
            gen_mskwl(ra, rb, rc, islit, lit);
1213
            break;
1214
        case 0x16:
1215
            /* EXTWL */
1216
            gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1217
            break;
1218
        case 0x1B:
1219
            /* INSWL */
1220
            gen_inswl(ra, rb, rc, islit, lit);
1221
            break;
1222
        case 0x22:
1223
            /* MSKLL */
1224
            gen_mskll(ra, rb, rc, islit, lit);
1225
            break;
1226
        case 0x26:
1227
            /* EXTLL */
1228
            gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1229
            break;
1230
        case 0x2B:
1231
            /* INSLL */
1232
            gen_insll(ra, rb, rc, islit, lit);
1233
            break;
1234
        case 0x30:
1235
            /* ZAP */
1236
            gen_zap(ra, rb, rc, islit, lit);
1237
            break;
1238
        case 0x31:
1239
            /* ZAPNOT */
1240
            gen_zapnot(ra, rb, rc, islit, lit);
1241
            break;
1242
        case 0x32:
1243
            /* MSKQL */
1244
            gen_mskql(ra, rb, rc, islit, lit);
1245
            break;
1246
        case 0x34:
1247
            /* SRL */
1248
            if (likely(rc != 31)) {
1249
                if (ra != 31) {
1250
                    if (islit)
1251
                        tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1252
                    else {
1253
                        TCGv shift = tcg_temp_new();
1254
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1255
                        tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1256
                        tcg_temp_free(shift);
1257
                    }
1258
                } else
1259
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1260
            }
1261
            break;
1262
        case 0x36:
1263
            /* EXTQL */
1264
            gen_ext_l(NULL, ra, rb, rc, islit, lit);
1265
            break;
1266
        case 0x39:
1267
            /* SLL */
1268
            if (likely(rc != 31)) {
1269
                if (ra != 31) {
1270
                    if (islit)
1271
                        tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1272
                    else {
1273
                        TCGv shift = tcg_temp_new();
1274
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1275
                        tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1276
                        tcg_temp_free(shift);
1277
                    }
1278
                } else
1279
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1280
            }
1281
            break;
1282
        case 0x3B:
1283
            /* INSQL */
1284
            gen_insql(ra, rb, rc, islit, lit);
1285
            break;
1286
        case 0x3C:
1287
            /* SRA */
1288
            if (likely(rc != 31)) {
1289
                if (ra != 31) {
1290
                    if (islit)
1291
                        tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1292
                    else {
1293
                        TCGv shift = tcg_temp_new();
1294
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1295
                        tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1296
                        tcg_temp_free(shift);
1297
                    }
1298
                } else
1299
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1300
            }
1301
            break;
1302
        case 0x52:
1303
            /* MSKWH */
1304
            gen_mskwh(ra, rb, rc, islit, lit);
1305
            break;
1306
        case 0x57:
1307
            /* INSWH */
1308
            gen_inswh(ra, rb, rc, islit, lit);
1309
            break;
1310
        case 0x5A:
1311
            /* EXTWH */
1312
            gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1313
            break;
1314
        case 0x62:
1315
            /* MSKLH */
1316
            gen_msklh(ra, rb, rc, islit, lit);
1317
            break;
1318
        case 0x67:
1319
            /* INSLH */
1320
            gen_inslh(ra, rb, rc, islit, lit);
1321
            break;
1322
        case 0x6A:
1323
            /* EXTLH */
1324
            gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1325
            break;
1326
        case 0x72:
1327
            /* MSKQH */
1328
            gen_mskqh(ra, rb, rc, islit, lit);
1329
            break;
1330
        case 0x77:
1331
            /* INSQH */
1332
            gen_insqh(ra, rb, rc, islit, lit);
1333
            break;
1334
        case 0x7A:
1335
            /* EXTQH */
1336
            gen_ext_h(NULL, ra, rb, rc, islit, lit);
1337
            break;
1338
        default:
1339
            goto invalid_opc;
1340
        }
1341
        break;
1342
    case 0x13:
1343
        switch (fn7) {
1344
        case 0x00:
1345
            /* MULL */
1346
            if (likely(rc != 31)) {
1347
                if (ra == 31)
1348
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1349
                else {
1350
                    if (islit)
1351
                        tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1352
                    else
1353
                        tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1354
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1355
                }
1356
            }
1357
            break;
1358
        case 0x20:
1359
            /* MULQ */
1360
            if (likely(rc != 31)) {
1361
                if (ra == 31)
1362
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1363
                else if (islit)
1364
                    tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1365
                else
1366
                    tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1367
            }
1368
            break;
1369
        case 0x30:
1370
            /* UMULH */
1371
            gen_umulh(ra, rb, rc, islit, lit);
1372
            break;
1373
        case 0x40:
1374
            /* MULL/V */
1375
            gen_mullv(ra, rb, rc, islit, lit);
1376
            break;
1377
        case 0x60:
1378
            /* MULQ/V */
1379
            gen_mulqv(ra, rb, rc, islit, lit);
1380
            break;
1381
        default:
1382
            goto invalid_opc;
1383
        }
1384
        break;
1385
    case 0x14:
1386
        switch (fpfn) { /* f11 & 0x3F */
1387
        case 0x04:
1388
            /* ITOFS */
1389
            if (!(ctx->amask & AMASK_FIX))
1390
                goto invalid_opc;
1391
            if (likely(rc != 31)) {
1392
                if (ra != 31) {
1393
                    TCGv_i32 tmp = tcg_temp_new_i32();
1394
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1395
                    gen_helper_memory_to_s(cpu_fir[rc], tmp);
1396
                    tcg_temp_free_i32(tmp);
1397
                } else
1398
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1399
            }
1400
            break;
1401
        case 0x0A:
1402
            /* SQRTF */
1403
            if (!(ctx->amask & AMASK_FIX))
1404
                goto invalid_opc;
1405
            gen_fsqrtf(rb, rc);
1406
            break;
1407
        case 0x0B:
1408
            /* SQRTS */
1409
            if (!(ctx->amask & AMASK_FIX))
1410
                goto invalid_opc;
1411
            gen_fsqrts(rb, rc);
1412
            break;
1413
        case 0x14:
1414
            /* ITOFF */
1415
            if (!(ctx->amask & AMASK_FIX))
1416
                goto invalid_opc;
1417
            if (likely(rc != 31)) {
1418
                if (ra != 31) {
1419
                    TCGv_i32 tmp = tcg_temp_new_i32();
1420
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1421
                    gen_helper_memory_to_f(cpu_fir[rc], tmp);
1422
                    tcg_temp_free_i32(tmp);
1423
                } else
1424
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1425
            }
1426
            break;
1427
        case 0x24:
1428
            /* ITOFT */
1429
            if (!(ctx->amask & AMASK_FIX))
1430
                goto invalid_opc;
1431
            if (likely(rc != 31)) {
1432
                if (ra != 31)
1433
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1434
                else
1435
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1436
            }
1437
            break;
1438
        case 0x2A:
1439
            /* SQRTG */
1440
            if (!(ctx->amask & AMASK_FIX))
1441
                goto invalid_opc;
1442
            gen_fsqrtg(rb, rc);
1443
            break;
1444
        case 0x02B:
1445
            /* SQRTT */
1446
            if (!(ctx->amask & AMASK_FIX))
1447
                goto invalid_opc;
1448
            gen_fsqrtt(rb, rc);
1449
            break;
1450
        default:
1451
            goto invalid_opc;
1452
        }
1453
        break;
1454
    case 0x15:
1455
        /* VAX floating point */
1456
        /* XXX: rounding mode and trap are ignored (!) */
1457
        switch (fpfn) { /* f11 & 0x3F */
1458
        case 0x00:
1459
            /* ADDF */
1460
            gen_faddf(ra, rb, rc);
1461
            break;
1462
        case 0x01:
1463
            /* SUBF */
1464
            gen_fsubf(ra, rb, rc);
1465
            break;
1466
        case 0x02:
1467
            /* MULF */
1468
            gen_fmulf(ra, rb, rc);
1469
            break;
1470
        case 0x03:
1471
            /* DIVF */
1472
            gen_fdivf(ra, rb, rc);
1473
            break;
1474
        case 0x1E:
1475
            /* CVTDG */
1476
#if 0 // TODO
1477
            gen_fcvtdg(rb, rc);
1478
#else
1479
            goto invalid_opc;
1480
#endif
1481
            break;
1482
        case 0x20:
1483
            /* ADDG */
1484
            gen_faddg(ra, rb, rc);
1485
            break;
1486
        case 0x21:
1487
            /* SUBG */
1488
            gen_fsubg(ra, rb, rc);
1489
            break;
1490
        case 0x22:
1491
            /* MULG */
1492
            gen_fmulg(ra, rb, rc);
1493
            break;
1494
        case 0x23:
1495
            /* DIVG */
1496
            gen_fdivg(ra, rb, rc);
1497
            break;
1498
        case 0x25:
1499
            /* CMPGEQ */
1500
            gen_fcmpgeq(ra, rb, rc);
1501
            break;
1502
        case 0x26:
1503
            /* CMPGLT */
1504
            gen_fcmpglt(ra, rb, rc);
1505
            break;
1506
        case 0x27:
1507
            /* CMPGLE */
1508
            gen_fcmpgle(ra, rb, rc);
1509
            break;
1510
        case 0x2C:
1511
            /* CVTGF */
1512
            gen_fcvtgf(rb, rc);
1513
            break;
1514
        case 0x2D:
1515
            /* CVTGD */
1516
#if 0 // TODO
1517
            gen_fcvtgd(rb, rc);
1518
#else
1519
            goto invalid_opc;
1520
#endif
1521
            break;
1522
        case 0x2F:
1523
            /* CVTGQ */
1524
            gen_fcvtgq(rb, rc);
1525
            break;
1526
        case 0x3C:
1527
            /* CVTQF */
1528
            gen_fcvtqf(rb, rc);
1529
            break;
1530
        case 0x3E:
1531
            /* CVTQG */
1532
            gen_fcvtqg(rb, rc);
1533
            break;
1534
        default:
1535
            goto invalid_opc;
1536
        }
1537
        break;
1538
    case 0x16:
1539
        /* IEEE floating-point */
1540
        /* XXX: rounding mode and traps are ignored (!) */
1541
        switch (fpfn) { /* f11 & 0x3F */
1542
        case 0x00:
1543
            /* ADDS */
1544
            gen_fadds(ra, rb, rc);
1545
            break;
1546
        case 0x01:
1547
            /* SUBS */
1548
            gen_fsubs(ra, rb, rc);
1549
            break;
1550
        case 0x02:
1551
            /* MULS */
1552
            gen_fmuls(ra, rb, rc);
1553
            break;
1554
        case 0x03:
1555
            /* DIVS */
1556
            gen_fdivs(ra, rb, rc);
1557
            break;
1558
        case 0x20:
1559
            /* ADDT */
1560
            gen_faddt(ra, rb, rc);
1561
            break;
1562
        case 0x21:
1563
            /* SUBT */
1564
            gen_fsubt(ra, rb, rc);
1565
            break;
1566
        case 0x22:
1567
            /* MULT */
1568
            gen_fmult(ra, rb, rc);
1569
            break;
1570
        case 0x23:
1571
            /* DIVT */
1572
            gen_fdivt(ra, rb, rc);
1573
            break;
1574
        case 0x24:
1575
            /* CMPTUN */
1576
            gen_fcmptun(ra, rb, rc);
1577
            break;
1578
        case 0x25:
1579
            /* CMPTEQ */
1580
            gen_fcmpteq(ra, rb, rc);
1581
            break;
1582
        case 0x26:
1583
            /* CMPTLT */
1584
            gen_fcmptlt(ra, rb, rc);
1585
            break;
1586
        case 0x27:
1587
            /* CMPTLE */
1588
            gen_fcmptle(ra, rb, rc);
1589
            break;
1590
        case 0x2C:
1591
            /* XXX: incorrect */
1592
            if (fn11 == 0x2AC || fn11 == 0x6AC) {
1593
                /* CVTST */
1594
                gen_fcvtst(rb, rc);
1595
            } else {
1596
                /* CVTTS */
1597
                gen_fcvtts(rb, rc);
1598
            }
1599
            break;
1600
        case 0x2F:
1601
            /* CVTTQ */
1602
            gen_fcvttq(rb, rc);
1603
            break;
1604
        case 0x3C:
1605
            /* CVTQS */
1606
            gen_fcvtqs(rb, rc);
1607
            break;
1608
        case 0x3E:
1609
            /* CVTQT */
1610
            gen_fcvtqt(rb, rc);
1611
            break;
1612
        default:
1613
            goto invalid_opc;
1614
        }
1615
        break;
1616
    case 0x17:
1617
        switch (fn11) {
1618
        case 0x010:
1619
            /* CVTLQ */
1620
            gen_fcvtlq(rb, rc);
1621
            break;
1622
        case 0x020:
1623
            if (likely(rc != 31)) {
1624
                if (ra == rb)
1625
                    /* FMOV */
1626
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1627
                else
1628
                    /* CPYS */
1629
                    gen_fcpys(ra, rb, rc);
1630
            }
1631
            break;
1632
        case 0x021:
1633
            /* CPYSN */
1634
            gen_fcpysn(ra, rb, rc);
1635
            break;
1636
        case 0x022:
1637
            /* CPYSE */
1638
            gen_fcpyse(ra, rb, rc);
1639
            break;
1640
        case 0x024:
1641
            /* MT_FPCR */
1642
            if (likely(ra != 31))
1643
                gen_helper_store_fpcr(cpu_fir[ra]);
1644
            else {
1645
                TCGv tmp = tcg_const_i64(0);
1646
                gen_helper_store_fpcr(tmp);
1647
                tcg_temp_free(tmp);
1648
            }
1649
            break;
1650
        case 0x025:
1651
            /* MF_FPCR */
1652
            if (likely(ra != 31))
1653
                gen_helper_load_fpcr(cpu_fir[ra]);
1654
            break;
1655
        case 0x02A:
1656
            /* FCMOVEQ */
1657
            gen_fcmpfeq(ra, rb, rc);
1658
            break;
1659
        case 0x02B:
1660
            /* FCMOVNE */
1661
            gen_fcmpfne(ra, rb, rc);
1662
            break;
1663
        case 0x02C:
1664
            /* FCMOVLT */
1665
            gen_fcmpflt(ra, rb, rc);
1666
            break;
1667
        case 0x02D:
1668
            /* FCMOVGE */
1669
            gen_fcmpfge(ra, rb, rc);
1670
            break;
1671
        case 0x02E:
1672
            /* FCMOVLE */
1673
            gen_fcmpfle(ra, rb, rc);
1674
            break;
1675
        case 0x02F:
1676
            /* FCMOVGT */
1677
            gen_fcmpfgt(ra, rb, rc);
1678
            break;
1679
        case 0x030:
1680
            /* CVTQL */
1681
            gen_fcvtql(rb, rc);
1682
            break;
1683
        case 0x130:
1684
            /* CVTQL/V */
1685
            gen_fcvtqlv(rb, rc);
1686
            break;
1687
        case 0x530:
1688
            /* CVTQL/SV */
1689
            gen_fcvtqlsv(rb, rc);
1690
            break;
1691
        default:
1692
            goto invalid_opc;
1693
        }
1694
        break;
1695
    case 0x18:
1696
        switch ((uint16_t)disp16) {
1697
        case 0x0000:
1698
            /* TRAPB */
1699
            /* No-op. Just exit from the current tb */
1700
            ret = 2;
1701
            break;
1702
        case 0x0400:
1703
            /* EXCB */
1704
            /* No-op. Just exit from the current tb */
1705
            ret = 2;
1706
            break;
1707
        case 0x4000:
1708
            /* MB */
1709
            /* No-op */
1710
            break;
1711
        case 0x4400:
1712
            /* WMB */
1713
            /* No-op */
1714
            break;
1715
        case 0x8000:
1716
            /* FETCH */
1717
            /* No-op */
1718
            break;
1719
        case 0xA000:
1720
            /* FETCH_M */
1721
            /* No-op */
1722
            break;
1723
        case 0xC000:
1724
            /* RPCC */
1725
            if (ra != 31)
1726
                gen_helper_load_pcc(cpu_ir[ra]);
1727
            break;
1728
        case 0xE000:
1729
            /* RC */
1730
            if (ra != 31)
1731
                gen_helper_rc(cpu_ir[ra]);
1732
            break;
1733
        case 0xE800:
1734
            /* ECB */
1735
            /* XXX: TODO: evict tb cache at address rb */
1736
#if 0
1737
            ret = 2;
1738
#else
1739
            goto invalid_opc;
1740
#endif
1741
            break;
1742
        case 0xF000:
1743
            /* RS */
1744
            if (ra != 31)
1745
                gen_helper_rs(cpu_ir[ra]);
1746
            break;
1747
        case 0xF800:
1748
            /* WH64 */
1749
            /* No-op */
1750
            break;
1751
        default:
1752
            goto invalid_opc;
1753
        }
1754
        break;
1755
    case 0x19:
1756
        /* HW_MFPR (PALcode) */
1757
#if defined (CONFIG_USER_ONLY)
1758
        goto invalid_opc;
1759
#else
1760
        if (!ctx->pal_mode)
1761
            goto invalid_opc;
1762
        if (ra != 31) {
1763
            TCGv tmp = tcg_const_i32(insn & 0xFF);
1764
            gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1765
            tcg_temp_free(tmp);
1766
        }
1767
        break;
1768
#endif
1769
    case 0x1A:
1770
        if (rb != 31)
1771
            tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1772
        else
1773
            tcg_gen_movi_i64(cpu_pc, 0);
1774
        if (ra != 31)
1775
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1776
        /* Those four jumps only differ by the branch prediction hint */
1777
        switch (fn2) {
1778
        case 0x0:
1779
            /* JMP */
1780
            break;
1781
        case 0x1:
1782
            /* JSR */
1783
            break;
1784
        case 0x2:
1785
            /* RET */
1786
            break;
1787
        case 0x3:
1788
            /* JSR_COROUTINE */
1789
            break;
1790
        }
1791
        ret = 1;
1792
        break;
1793
    case 0x1B:
1794
        /* HW_LD (PALcode) */
1795
#if defined (CONFIG_USER_ONLY)
1796
        goto invalid_opc;
1797
#else
1798
        if (!ctx->pal_mode)
1799
            goto invalid_opc;
1800
        if (ra != 31) {
1801
            TCGv addr = tcg_temp_new();
1802
            if (rb != 31)
1803
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1804
            else
1805
                tcg_gen_movi_i64(addr, disp12);
1806
            switch ((insn >> 12) & 0xF) {
1807
            case 0x0:
1808
                /* Longword physical access (hw_ldl/p) */
1809
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1810
                break;
1811
            case 0x1:
1812
                /* Quadword physical access (hw_ldq/p) */
1813
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1814
                break;
1815
            case 0x2:
1816
                /* Longword physical access with lock (hw_ldl_l/p) */
1817
                gen_helper_ldl_l_raw(cpu_ir[ra], addr);
1818
                break;
1819
            case 0x3:
1820
                /* Quadword physical access with lock (hw_ldq_l/p) */
1821
                gen_helper_ldq_l_raw(cpu_ir[ra], addr);
1822
                break;
1823
            case 0x4:
1824
                /* Longword virtual PTE fetch (hw_ldl/v) */
1825
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1826
                break;
1827
            case 0x5:
1828
                /* Quadword virtual PTE fetch (hw_ldq/v) */
1829
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1830
                break;
1831
            case 0x6:
1832
                /* Incpu_ir[ra]id */
1833
                goto invalid_opc;
1834
            case 0x7:
1835
                /* Incpu_ir[ra]id */
1836
                goto invalid_opc;
1837
            case 0x8:
1838
                /* Longword virtual access (hw_ldl) */
1839
                gen_helper_st_virt_to_phys(addr, addr);
1840
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1841
                break;
1842
            case 0x9:
1843
                /* Quadword virtual access (hw_ldq) */
1844
                gen_helper_st_virt_to_phys(addr, addr);
1845
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1846
                break;
1847
            case 0xA:
1848
                /* Longword virtual access with protection check (hw_ldl/w) */
1849
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1850
                break;
1851
            case 0xB:
1852
                /* Quadword virtual access with protection check (hw_ldq/w) */
1853
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1854
                break;
1855
            case 0xC:
1856
                /* Longword virtual access with alt access mode (hw_ldl/a)*/
1857
                gen_helper_set_alt_mode();
1858
                gen_helper_st_virt_to_phys(addr, addr);
1859
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1860
                gen_helper_restore_mode();
1861
                break;
1862
            case 0xD:
1863
                /* Quadword virtual access with alt access mode (hw_ldq/a) */
1864
                gen_helper_set_alt_mode();
1865
                gen_helper_st_virt_to_phys(addr, addr);
1866
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1867
                gen_helper_restore_mode();
1868
                break;
1869
            case 0xE:
1870
                /* Longword virtual access with alternate access mode and
1871
                 * protection checks (hw_ldl/wa)
1872
                 */
1873
                gen_helper_set_alt_mode();
1874
                gen_helper_ldl_data(cpu_ir[ra], addr);
1875
                gen_helper_restore_mode();
1876
                break;
1877
            case 0xF:
1878
                /* Quadword virtual access with alternate access mode and
1879
                 * protection checks (hw_ldq/wa)
1880
                 */
1881
                gen_helper_set_alt_mode();
1882
                gen_helper_ldq_data(cpu_ir[ra], addr);
1883
                gen_helper_restore_mode();
1884
                break;
1885
            }
1886
            tcg_temp_free(addr);
1887
        }
1888
        break;
1889
#endif
1890
    case 0x1C:
1891
        switch (fn7) {
1892
        case 0x00:
1893
            /* SEXTB */
1894
            if (!(ctx->amask & AMASK_BWX))
1895
                goto invalid_opc;
1896
            if (likely(rc != 31)) {
1897
                if (islit)
1898
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1899
                else
1900
                    tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1901
            }
1902
            break;
1903
        case 0x01:
1904
            /* SEXTW */
1905
            if (!(ctx->amask & AMASK_BWX))
1906
                goto invalid_opc;
1907
            if (likely(rc != 31)) {
1908
                if (islit)
1909
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1910
                else
1911
                    tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1912
            }
1913
            break;
1914
        case 0x30:
1915
            /* CTPOP */
1916
            if (!(ctx->amask & AMASK_CIX))
1917
                goto invalid_opc;
1918
            if (likely(rc != 31)) {
1919
                if (islit)
1920
                    tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1921
                else
1922
                    gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
1923
            }
1924
            break;
1925
        case 0x31:
1926
            /* PERR */
1927
            if (!(ctx->amask & AMASK_MVI))
1928
                goto invalid_opc;
1929
            /* XXX: TODO */
1930
            goto invalid_opc;
1931
            break;
1932
        case 0x32:
1933
            /* CTLZ */
1934
            if (!(ctx->amask & AMASK_CIX))
1935
                goto invalid_opc;
1936
            if (likely(rc != 31)) {
1937
                if (islit)
1938
                    tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1939
                else
1940
                    gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
1941
            }
1942
            break;
1943
        case 0x33:
1944
            /* CTTZ */
1945
            if (!(ctx->amask & AMASK_CIX))
1946
                goto invalid_opc;
1947
            if (likely(rc != 31)) {
1948
                if (islit)
1949
                    tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1950
                else
1951
                    gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
1952
            }
1953
            break;
1954
        case 0x34:
1955
            /* UNPKBW */
1956
            if (!(ctx->amask & AMASK_MVI))
1957
                goto invalid_opc;
1958
            /* XXX: TODO */
1959
            goto invalid_opc;
1960
            break;
1961
        case 0x35:
1962
            /* UNPKWL */
1963
            if (!(ctx->amask & AMASK_MVI))
1964
                goto invalid_opc;
1965
            /* XXX: TODO */
1966
            goto invalid_opc;
1967
            break;
1968
        case 0x36:
1969
            /* PKWB */
1970
            if (!(ctx->amask & AMASK_MVI))
1971
                goto invalid_opc;
1972
            /* XXX: TODO */
1973
            goto invalid_opc;
1974
            break;
1975
        case 0x37:
1976
            /* PKLB */
1977
            if (!(ctx->amask & AMASK_MVI))
1978
                goto invalid_opc;
1979
            /* XXX: TODO */
1980
            goto invalid_opc;
1981
            break;
1982
        case 0x38:
1983
            /* MINSB8 */
1984
            if (!(ctx->amask & AMASK_MVI))
1985
                goto invalid_opc;
1986
            /* XXX: TODO */
1987
            goto invalid_opc;
1988
            break;
1989
        case 0x39:
1990
            /* MINSW4 */
1991
            if (!(ctx->amask & AMASK_MVI))
1992
                goto invalid_opc;
1993
            /* XXX: TODO */
1994
            goto invalid_opc;
1995
            break;
1996
        case 0x3A:
1997
            /* MINUB8 */
1998
            if (!(ctx->amask & AMASK_MVI))
1999
                goto invalid_opc;
2000
            /* XXX: TODO */
2001
            goto invalid_opc;
2002
            break;
2003
        case 0x3B:
2004
            /* MINUW4 */
2005
            if (!(ctx->amask & AMASK_MVI))
2006
                goto invalid_opc;
2007
            /* XXX: TODO */
2008
            goto invalid_opc;
2009
            break;
2010
        case 0x3C:
2011
            /* MAXUB8 */
2012
            if (!(ctx->amask & AMASK_MVI))
2013
                goto invalid_opc;
2014
            /* XXX: TODO */
2015
            goto invalid_opc;
2016
            break;
2017
        case 0x3D:
2018
            /* MAXUW4 */
2019
            if (!(ctx->amask & AMASK_MVI))
2020
                goto invalid_opc;
2021
            /* XXX: TODO */
2022
            goto invalid_opc;
2023
            break;
2024
        case 0x3E:
2025
            /* MAXSB8 */
2026
            if (!(ctx->amask & AMASK_MVI))
2027
                goto invalid_opc;
2028
            /* XXX: TODO */
2029
            goto invalid_opc;
2030
            break;
2031
        case 0x3F:
2032
            /* MAXSW4 */
2033
            if (!(ctx->amask & AMASK_MVI))
2034
                goto invalid_opc;
2035
            /* XXX: TODO */
2036
            goto invalid_opc;
2037
            break;
2038
        case 0x70:
2039
            /* FTOIT */
2040
            if (!(ctx->amask & AMASK_FIX))
2041
                goto invalid_opc;
2042
            if (likely(rc != 31)) {
2043
                if (ra != 31)
2044
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2045
                else
2046
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2047
            }
2048
            break;
2049
        case 0x78:
2050
            /* FTOIS */
2051
            if (!(ctx->amask & AMASK_FIX))
2052
                goto invalid_opc;
2053
            if (rc != 31) {
2054
                TCGv_i32 tmp1 = tcg_temp_new_i32();
2055
                if (ra != 31)
2056
                    gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2057
                else {
2058
                    TCGv tmp2 = tcg_const_i64(0);
2059
                    gen_helper_s_to_memory(tmp1, tmp2);
2060
                    tcg_temp_free(tmp2);
2061
                }
2062
                tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2063
                tcg_temp_free_i32(tmp1);
2064
            }
2065
            break;
2066
        default:
2067
            goto invalid_opc;
2068
        }
2069
        break;
2070
    case 0x1D:
2071
        /* HW_MTPR (PALcode) */
2072
#if defined (CONFIG_USER_ONLY)
2073
        goto invalid_opc;
2074
#else
2075
        if (!ctx->pal_mode)
2076
            goto invalid_opc;
2077
        else {
2078
            TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2079
            if (ra != 31)
2080
                gen_helper_mtpr(tmp1, cpu_ir[ra]);
2081
            else {
2082
                TCGv tmp2 = tcg_const_i64(0);
2083
                gen_helper_mtpr(tmp1, tmp2);
2084
                tcg_temp_free(tmp2);
2085
            }
2086
            tcg_temp_free(tmp1);
2087
            ret = 2;
2088
        }
2089
        break;
2090
#endif
2091
    case 0x1E:
2092
        /* HW_REI (PALcode) */
2093
#if defined (CONFIG_USER_ONLY)
2094
        goto invalid_opc;
2095
#else
2096
        if (!ctx->pal_mode)
2097
            goto invalid_opc;
2098
        if (rb == 31) {
2099
            /* "Old" alpha */
2100
            gen_helper_hw_rei();
2101
        } else {
2102
            TCGv tmp;
2103

    
2104
            if (ra != 31) {
2105
                tmp = tcg_temp_new();
2106
                tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2107
            } else
2108
                tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2109
            gen_helper_hw_ret(tmp);
2110
            tcg_temp_free(tmp);
2111
        }
2112
        ret = 2;
2113
        break;
2114
#endif
2115
    case 0x1F:
2116
        /* HW_ST (PALcode) */
2117
#if defined (CONFIG_USER_ONLY)
2118
        goto invalid_opc;
2119
#else
2120
        if (!ctx->pal_mode)
2121
            goto invalid_opc;
2122
        else {
2123
            TCGv addr, val;
2124
            addr = tcg_temp_new();
2125
            if (rb != 31)
2126
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2127
            else
2128
                tcg_gen_movi_i64(addr, disp12);
2129
            if (ra != 31)
2130
                val = cpu_ir[ra];
2131
            else {
2132
                val = tcg_temp_new();
2133
                tcg_gen_movi_i64(val, 0);
2134
            }
2135
            switch ((insn >> 12) & 0xF) {
2136
            case 0x0:
2137
                /* Longword physical access */
2138
                gen_helper_stl_raw(val, addr);
2139
                break;
2140
            case 0x1:
2141
                /* Quadword physical access */
2142
                gen_helper_stq_raw(val, addr);
2143
                break;
2144
            case 0x2:
2145
                /* Longword physical access with lock */
2146
                gen_helper_stl_c_raw(val, val, addr);
2147
                break;
2148
            case 0x3:
2149
                /* Quadword physical access with lock */
2150
                gen_helper_stq_c_raw(val, val, addr);
2151
                break;
2152
            case 0x4:
2153
                /* Longword virtual access */
2154
                gen_helper_st_virt_to_phys(addr, addr);
2155
                gen_helper_stl_raw(val, addr);
2156
                break;
2157
            case 0x5:
2158
                /* Quadword virtual access */
2159
                gen_helper_st_virt_to_phys(addr, addr);
2160
                gen_helper_stq_raw(val, addr);
2161
                break;
2162
            case 0x6:
2163
                /* Invalid */
2164
                goto invalid_opc;
2165
            case 0x7:
2166
                /* Invalid */
2167
                goto invalid_opc;
2168
            case 0x8:
2169
                /* Invalid */
2170
                goto invalid_opc;
2171
            case 0x9:
2172
                /* Invalid */
2173
                goto invalid_opc;
2174
            case 0xA:
2175
                /* Invalid */
2176
                goto invalid_opc;
2177
            case 0xB:
2178
                /* Invalid */
2179
                goto invalid_opc;
2180
            case 0xC:
2181
                /* Longword virtual access with alternate access mode */
2182
                gen_helper_set_alt_mode();
2183
                gen_helper_st_virt_to_phys(addr, addr);
2184
                gen_helper_stl_raw(val, addr);
2185
                gen_helper_restore_mode();
2186
                break;
2187
            case 0xD:
2188
                /* Quadword virtual access with alternate access mode */
2189
                gen_helper_set_alt_mode();
2190
                gen_helper_st_virt_to_phys(addr, addr);
2191
                gen_helper_stl_raw(val, addr);
2192
                gen_helper_restore_mode();
2193
                break;
2194
            case 0xE:
2195
                /* Invalid */
2196
                goto invalid_opc;
2197
            case 0xF:
2198
                /* Invalid */
2199
                goto invalid_opc;
2200
            }
2201
            if (ra == 31)
2202
                tcg_temp_free(val);
2203
            tcg_temp_free(addr);
2204
        }
2205
        break;
2206
#endif
2207
    case 0x20:
2208
        /* LDF */
2209
        gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2210
        break;
2211
    case 0x21:
2212
        /* LDG */
2213
        gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2214
        break;
2215
    case 0x22:
2216
        /* LDS */
2217
        gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2218
        break;
2219
    case 0x23:
2220
        /* LDT */
2221
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2222
        break;
2223
    case 0x24:
2224
        /* STF */
2225
        gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2226
        break;
2227
    case 0x25:
2228
        /* STG */
2229
        gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2230
        break;
2231
    case 0x26:
2232
        /* STS */
2233
        gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2234
        break;
2235
    case 0x27:
2236
        /* STT */
2237
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2238
        break;
2239
    case 0x28:
2240
        /* LDL */
2241
        gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2242
        break;
2243
    case 0x29:
2244
        /* LDQ */
2245
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2246
        break;
2247
    case 0x2A:
2248
        /* LDL_L */
2249
        gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2250
        break;
2251
    case 0x2B:
2252
        /* LDQ_L */
2253
        gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2254
        break;
2255
    case 0x2C:
2256
        /* STL */
2257
        gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2258
        break;
2259
    case 0x2D:
2260
        /* STQ */
2261
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2262
        break;
2263
    case 0x2E:
2264
        /* STL_C */
2265
        gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2266
        break;
2267
    case 0x2F:
2268
        /* STQ_C */
2269
        gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2270
        break;
2271
    case 0x30:
2272
        /* BR */
2273
        if (ra != 31)
2274
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2275
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2276
        ret = 1;
2277
        break;
2278
    case 0x31: /* FBEQ */
2279
    case 0x32: /* FBLT */
2280
    case 0x33: /* FBLE */
2281
        gen_fbcond(ctx, opc, ra, disp16);
2282
        ret = 1;
2283
        break;
2284
    case 0x34:
2285
        /* BSR */
2286
        if (ra != 31)
2287
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2288
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2289
        ret = 1;
2290
        break;
2291
    case 0x35: /* FBNE */
2292
    case 0x36: /* FBGE */
2293
    case 0x37: /* FBGT */
2294
        gen_fbcond(ctx, opc, ra, disp16);
2295
        ret = 1;
2296
        break;
2297
    case 0x38:
2298
        /* BLBC */
2299
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2300
        ret = 1;
2301
        break;
2302
    case 0x39:
2303
        /* BEQ */
2304
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2305
        ret = 1;
2306
        break;
2307
    case 0x3A:
2308
        /* BLT */
2309
        gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2310
        ret = 1;
2311
        break;
2312
    case 0x3B:
2313
        /* BLE */
2314
        gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2315
        ret = 1;
2316
        break;
2317
    case 0x3C:
2318
        /* BLBS */
2319
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2320
        ret = 1;
2321
        break;
2322
    case 0x3D:
2323
        /* BNE */
2324
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2325
        ret = 1;
2326
        break;
2327
    case 0x3E:
2328
        /* BGE */
2329
        gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2330
        ret = 1;
2331
        break;
2332
    case 0x3F:
2333
        /* BGT */
2334
        gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2335
        ret = 1;
2336
        break;
2337
    invalid_opc:
2338
        gen_invalid(ctx);
2339
        ret = 3;
2340
        break;
2341
    }
2342

    
2343
    return ret;
2344
}
2345

    
2346
static always_inline void gen_intermediate_code_internal (CPUState *env,
2347
                                                          TranslationBlock *tb,
2348
                                                          int search_pc)
2349
{
2350
#if defined ALPHA_DEBUG_DISAS
2351
    static int insn_count;
2352
#endif
2353
    DisasContext ctx, *ctxp = &ctx;
2354
    target_ulong pc_start;
2355
    uint32_t insn;
2356
    uint16_t *gen_opc_end;
2357
    CPUBreakpoint *bp;
2358
    int j, lj = -1;
2359
    int ret;
2360
    int num_insns;
2361
    int max_insns;
2362

    
2363
    pc_start = tb->pc;
2364
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2365
    ctx.pc = pc_start;
2366
    ctx.amask = env->amask;
2367
    ctx.env = env;
2368
#if defined (CONFIG_USER_ONLY)
2369
    ctx.mem_idx = 0;
2370
#else
2371
    ctx.mem_idx = ((env->ps >> 3) & 3);
2372
    ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2373
#endif
2374
    num_insns = 0;
2375
    max_insns = tb->cflags & CF_COUNT_MASK;
2376
    if (max_insns == 0)
2377
        max_insns = CF_COUNT_MASK;
2378

    
2379
    gen_icount_start();
2380
    for (ret = 0; ret == 0;) {
2381
        if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
2382
            TAILQ_FOREACH(bp, &env->breakpoints, entry) {
2383
                if (bp->pc == ctx.pc) {
2384
                    gen_excp(&ctx, EXCP_DEBUG, 0);
2385
                    break;
2386
                }
2387
            }
2388
        }
2389
        if (search_pc) {
2390
            j = gen_opc_ptr - gen_opc_buf;
2391
            if (lj < j) {
2392
                lj++;
2393
                while (lj < j)
2394
                    gen_opc_instr_start[lj++] = 0;
2395
            }
2396
            gen_opc_pc[lj] = ctx.pc;
2397
            gen_opc_instr_start[lj] = 1;
2398
            gen_opc_icount[lj] = num_insns;
2399
        }
2400
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2401
            gen_io_start();
2402
#if defined ALPHA_DEBUG_DISAS
2403
        insn_count++;
2404
        LOG_DISAS("pc " TARGET_FMT_lx " mem_idx %d\n",
2405
                  ctx.pc, ctx.mem_idx);
2406
#endif
2407
        insn = ldl_code(ctx.pc);
2408
#if defined ALPHA_DEBUG_DISAS
2409
        insn_count++;
2410
        LOG_DISAS("opcode %08x %d\n", insn, insn_count);
2411
#endif
2412
        num_insns++;
2413
        ctx.pc += 4;
2414
        ret = translate_one(ctxp, insn);
2415
        if (ret != 0)
2416
            break;
2417
        /* if we reach a page boundary or are single stepping, stop
2418
         * generation
2419
         */
2420
        if (env->singlestep_enabled) {
2421
            gen_excp(&ctx, EXCP_DEBUG, 0);
2422
            break;
2423
        }
2424

    
2425
        if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2426
            break;
2427

    
2428
        if (gen_opc_ptr >= gen_opc_end)
2429
            break;
2430

    
2431
        if (num_insns >= max_insns)
2432
            break;
2433

    
2434
        if (singlestep) {
2435
            break;
2436
        }
2437
    }
2438
    if (ret != 1 && ret != 3) {
2439
        tcg_gen_movi_i64(cpu_pc, ctx.pc);
2440
    }
2441
#if defined (DO_TB_FLUSH)
2442
    gen_helper_tb_flush();
2443
#endif
2444
    if (tb->cflags & CF_LAST_IO)
2445
        gen_io_end();
2446
    /* Generate the return instruction */
2447
    tcg_gen_exit_tb(0);
2448
    gen_icount_end(tb, num_insns);
2449
    *gen_opc_ptr = INDEX_op_end;
2450
    if (search_pc) {
2451
        j = gen_opc_ptr - gen_opc_buf;
2452
        lj++;
2453
        while (lj <= j)
2454
            gen_opc_instr_start[lj++] = 0;
2455
    } else {
2456
        tb->size = ctx.pc - pc_start;
2457
        tb->icount = num_insns;
2458
    }
2459
#if defined ALPHA_DEBUG_DISAS
2460
    log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
2461
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2462
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
2463
        log_target_disas(pc_start, ctx.pc - pc_start, 1);
2464
        qemu_log("\n");
2465
    }
2466
#endif
2467
}
2468

    
2469
void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2470
{
2471
    gen_intermediate_code_internal(env, tb, 0);
2472
}
2473

    
2474
void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2475
{
2476
    gen_intermediate_code_internal(env, tb, 1);
2477
}
2478

    
2479
CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2480
{
2481
    CPUAlphaState *env;
2482
    uint64_t hwpcb;
2483

    
2484
    env = qemu_mallocz(sizeof(CPUAlphaState));
2485
    cpu_exec_init(env);
2486
    alpha_translate_init();
2487
    tlb_flush(env, 1);
2488
    /* XXX: should not be hardcoded */
2489
    env->implver = IMPLVER_2106x;
2490
    env->ps = 0x1F00;
2491
#if defined (CONFIG_USER_ONLY)
2492
    env->ps |= 1 << 3;
2493
#endif
2494
    pal_init(env);
2495
    /* Initialize IPR */
2496
    hwpcb = env->ipr[IPR_PCBB];
2497
    env->ipr[IPR_ASN] = 0;
2498
    env->ipr[IPR_ASTEN] = 0;
2499
    env->ipr[IPR_ASTSR] = 0;
2500
    env->ipr[IPR_DATFX] = 0;
2501
    /* XXX: fix this */
2502
    //    env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2503
    //    env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2504
    //    env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2505
    //    env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2506
    env->ipr[IPR_FEN] = 0;
2507
    env->ipr[IPR_IPL] = 31;
2508
    env->ipr[IPR_MCES] = 0;
2509
    env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2510
    //    env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2511
    env->ipr[IPR_SISR] = 0;
2512
    env->ipr[IPR_VIRBND] = -1ULL;
2513

    
2514
    return env;
2515
}
2516

    
2517
void gen_pc_load(CPUState *env, TranslationBlock *tb,
2518
                unsigned long searched_pc, int pc_pos, void *puc)
2519
{
2520
    env->pc = gen_opc_pc[pc_pos];
2521
}