Statistics
| Branch: | Revision:

root / target-alpha / translate.c @ 8579095b

History | View | Annotate | Download (74.9 kB)

1
/*
2
 *  Alpha emulation cpu translation for qemu.
3
 *
4
 *  Copyright (c) 2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA  02110-1301 USA
19
 */
20

    
21
#include <stdint.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24

    
25
#include "cpu.h"
26
#include "exec-all.h"
27
#include "disas.h"
28
#include "host-utils.h"
29
#include "tcg-op.h"
30
#include "qemu-common.h"
31

    
32
#include "helper.h"
33
#define GEN_HELPER 1
34
#include "helper.h"
35

    
36
/* #define DO_SINGLE_STEP */
37
#define ALPHA_DEBUG_DISAS
38
/* #define DO_TB_FLUSH */
39

    
40

    
41
#ifdef ALPHA_DEBUG_DISAS
42
#  define LOG_DISAS(...) qemu_log(__VA_ARGS__)
43
#else
44
#  define LOG_DISAS(...) do { } while (0)
45
#endif
46

    
47
typedef struct DisasContext DisasContext;
48
struct DisasContext {
49
    uint64_t pc;
50
    int mem_idx;
51
#if !defined (CONFIG_USER_ONLY)
52
    int pal_mode;
53
#endif
54
    CPUAlphaState *env;
55
    uint32_t amask;
56
};
57

    
58
/* global register indexes */
59
static TCGv_ptr cpu_env;
60
static TCGv cpu_ir[31];
61
static TCGv cpu_fir[31];
62
static TCGv cpu_pc;
63
static TCGv cpu_lock;
64

    
65
/* register names */
66
static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
67

    
68
#include "gen-icount.h"
69

    
70
static void alpha_translate_init(void)
71
{
72
    int i;
73
    char *p;
74
    static int done_init = 0;
75

    
76
    if (done_init)
77
        return;
78

    
79
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
80

    
81
    p = cpu_reg_names;
82
    for (i = 0; i < 31; i++) {
83
        sprintf(p, "ir%d", i);
84
        cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
85
                                           offsetof(CPUState, ir[i]), p);
86
        p += (i < 10) ? 4 : 5;
87

    
88
        sprintf(p, "fir%d", i);
89
        cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
90
                                            offsetof(CPUState, fir[i]), p);
91
        p += (i < 10) ? 5 : 6;
92
    }
93

    
94
    cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
95
                                    offsetof(CPUState, pc), "pc");
96

    
97
    cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
98
                                      offsetof(CPUState, lock), "lock");
99

    
100
    /* register helpers */
101
#define GEN_HELPER 2
102
#include "helper.h"
103

    
104
    done_init = 1;
105
}
106

    
107
static always_inline void gen_excp (DisasContext *ctx,
108
                                    int exception, int error_code)
109
{
110
    TCGv_i32 tmp1, tmp2;
111

    
112
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
113
    tmp1 = tcg_const_i32(exception);
114
    tmp2 = tcg_const_i32(error_code);
115
    gen_helper_excp(tmp1, tmp2);
116
    tcg_temp_free_i32(tmp2);
117
    tcg_temp_free_i32(tmp1);
118
}
119

    
120
static always_inline void gen_invalid (DisasContext *ctx)
121
{
122
    gen_excp(ctx, EXCP_OPCDEC, 0);
123
}
124

    
125
static always_inline void gen_qemu_ldf (TCGv t0, TCGv t1, int flags)
126
{
127
    TCGv tmp = tcg_temp_new();
128
    TCGv_i32 tmp32 = tcg_temp_new_i32();
129
    tcg_gen_qemu_ld32u(tmp, t1, flags);
130
    tcg_gen_trunc_i64_i32(tmp32, tmp);
131
    gen_helper_memory_to_f(t0, tmp32);
132
    tcg_temp_free_i32(tmp32);
133
    tcg_temp_free(tmp);
134
}
135

    
136
static always_inline void gen_qemu_ldg (TCGv t0, TCGv t1, int flags)
137
{
138
    TCGv tmp = tcg_temp_new();
139
    tcg_gen_qemu_ld64(tmp, t1, flags);
140
    gen_helper_memory_to_g(t0, tmp);
141
    tcg_temp_free(tmp);
142
}
143

    
144
static always_inline void gen_qemu_lds (TCGv t0, TCGv t1, int flags)
145
{
146
    TCGv tmp = tcg_temp_new();
147
    TCGv_i32 tmp32 = tcg_temp_new_i32();
148
    tcg_gen_qemu_ld32u(tmp, t1, flags);
149
    tcg_gen_trunc_i64_i32(tmp32, tmp);
150
    gen_helper_memory_to_s(t0, tmp32);
151
    tcg_temp_free_i32(tmp32);
152
    tcg_temp_free(tmp);
153
}
154

    
155
static always_inline void gen_qemu_ldl_l (TCGv t0, TCGv t1, int flags)
156
{
157
    tcg_gen_mov_i64(cpu_lock, t1);
158
    tcg_gen_qemu_ld32s(t0, t1, flags);
159
}
160

    
161
static always_inline void gen_qemu_ldq_l (TCGv t0, TCGv t1, int flags)
162
{
163
    tcg_gen_mov_i64(cpu_lock, t1);
164
    tcg_gen_qemu_ld64(t0, t1, flags);
165
}
166

    
167
static always_inline void gen_load_mem (DisasContext *ctx,
168
                                        void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1, int flags),
169
                                        int ra, int rb, int32_t disp16,
170
                                        int fp, int clear)
171
{
172
    TCGv addr;
173

    
174
    if (unlikely(ra == 31))
175
        return;
176

    
177
    addr = tcg_temp_new();
178
    if (rb != 31) {
179
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
180
        if (clear)
181
            tcg_gen_andi_i64(addr, addr, ~0x7);
182
    } else {
183
        if (clear)
184
            disp16 &= ~0x7;
185
        tcg_gen_movi_i64(addr, disp16);
186
    }
187
    if (fp)
188
        tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
189
    else
190
        tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
191
    tcg_temp_free(addr);
192
}
193

    
194
static always_inline void gen_qemu_stf (TCGv t0, TCGv t1, int flags)
195
{
196
    TCGv_i32 tmp32 = tcg_temp_new_i32();
197
    TCGv tmp = tcg_temp_new();
198
    gen_helper_f_to_memory(tmp32, t0);
199
    tcg_gen_extu_i32_i64(tmp, tmp32);
200
    tcg_gen_qemu_st32(tmp, t1, flags);
201
    tcg_temp_free(tmp);
202
    tcg_temp_free_i32(tmp32);
203
}
204

    
205
static always_inline void gen_qemu_stg (TCGv t0, TCGv t1, int flags)
206
{
207
    TCGv tmp = tcg_temp_new();
208
    gen_helper_g_to_memory(tmp, t0);
209
    tcg_gen_qemu_st64(tmp, t1, flags);
210
    tcg_temp_free(tmp);
211
}
212

    
213
static always_inline void gen_qemu_sts (TCGv t0, TCGv t1, int flags)
214
{
215
    TCGv_i32 tmp32 = tcg_temp_new_i32();
216
    TCGv tmp = tcg_temp_new();
217
    gen_helper_s_to_memory(tmp32, t0);
218
    tcg_gen_extu_i32_i64(tmp, tmp32);
219
    tcg_gen_qemu_st32(tmp, t1, flags);
220
    tcg_temp_free(tmp);
221
    tcg_temp_free_i32(tmp32);
222
}
223

    
224
static always_inline void gen_qemu_stl_c (TCGv t0, TCGv t1, int flags)
225
{
226
    int l1, l2;
227

    
228
    l1 = gen_new_label();
229
    l2 = gen_new_label();
230
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
231
    tcg_gen_qemu_st32(t0, t1, flags);
232
    tcg_gen_movi_i64(t0, 1);
233
    tcg_gen_br(l2);
234
    gen_set_label(l1);
235
    tcg_gen_movi_i64(t0, 0);
236
    gen_set_label(l2);
237
    tcg_gen_movi_i64(cpu_lock, -1);
238
}
239

    
240
static always_inline void gen_qemu_stq_c (TCGv t0, TCGv t1, int flags)
241
{
242
    int l1, l2;
243

    
244
    l1 = gen_new_label();
245
    l2 = gen_new_label();
246
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
247
    tcg_gen_qemu_st64(t0, t1, flags);
248
    tcg_gen_movi_i64(t0, 1);
249
    tcg_gen_br(l2);
250
    gen_set_label(l1);
251
    tcg_gen_movi_i64(t0, 0);
252
    gen_set_label(l2);
253
    tcg_gen_movi_i64(cpu_lock, -1);
254
}
255

    
256
static always_inline void gen_store_mem (DisasContext *ctx,
257
                                         void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1, int flags),
258
                                         int ra, int rb, int32_t disp16,
259
                                         int fp, int clear, int local)
260
{
261
    TCGv addr;
262
    if (local)
263
        addr = tcg_temp_local_new();
264
    else
265
        addr = tcg_temp_new();
266
    if (rb != 31) {
267
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
268
        if (clear)
269
            tcg_gen_andi_i64(addr, addr, ~0x7);
270
    } else {
271
        if (clear)
272
            disp16 &= ~0x7;
273
        tcg_gen_movi_i64(addr, disp16);
274
    }
275
    if (ra != 31) {
276
        if (fp)
277
            tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
278
        else
279
            tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
280
    } else {
281
        TCGv zero;
282
        if (local)
283
            zero = tcg_const_local_i64(0);
284
        else
285
            zero = tcg_const_i64(0);
286
        tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
287
        tcg_temp_free(zero);
288
    }
289
    tcg_temp_free(addr);
290
}
291

    
292
static always_inline void gen_bcond (DisasContext *ctx,
293
                                     TCGCond cond,
294
                                     int ra, int32_t disp, int mask)
295
{
296
    int l1, l2;
297

    
298
    l1 = gen_new_label();
299
    l2 = gen_new_label();
300
    if (likely(ra != 31)) {
301
        if (mask) {
302
            TCGv tmp = tcg_temp_new();
303
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
304
            tcg_gen_brcondi_i64(cond, tmp, 0, l1);
305
            tcg_temp_free(tmp);
306
        } else
307
            tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
308
    } else {
309
        /* Very uncommon case - Do not bother to optimize.  */
310
        TCGv tmp = tcg_const_i64(0);
311
        tcg_gen_brcondi_i64(cond, tmp, 0, l1);
312
        tcg_temp_free(tmp);
313
    }
314
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
315
    tcg_gen_br(l2);
316
    gen_set_label(l1);
317
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
318
    gen_set_label(l2);
319
}
320

    
321
static always_inline void gen_fbcond (DisasContext *ctx, int opc,
322
                                      int ra, int32_t disp16)
323
{
324
    int l1, l2;
325
    TCGv tmp;
326
    TCGv src;
327

    
328
    l1 = gen_new_label();
329
    l2 = gen_new_label();
330
    if (ra != 31) {
331
        tmp = tcg_temp_new();
332
        src = cpu_fir[ra];
333
    } else  {
334
        tmp = tcg_const_i64(0);
335
        src = tmp;
336
    }
337
    switch (opc) {
338
    case 0x31: /* FBEQ */
339
        gen_helper_cmpfeq(tmp, src);
340
        break;
341
    case 0x32: /* FBLT */
342
        gen_helper_cmpflt(tmp, src);
343
        break;
344
    case 0x33: /* FBLE */
345
        gen_helper_cmpfle(tmp, src);
346
        break;
347
    case 0x35: /* FBNE */
348
        gen_helper_cmpfne(tmp, src);
349
        break;
350
    case 0x36: /* FBGE */
351
        gen_helper_cmpfge(tmp, src);
352
        break;
353
    case 0x37: /* FBGT */
354
        gen_helper_cmpfgt(tmp, src);
355
        break;
356
    default:
357
        abort();
358
    }
359
    tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
360
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
361
    tcg_gen_br(l2);
362
    gen_set_label(l1);
363
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
364
    gen_set_label(l2);
365
}
366

    
367
static always_inline void gen_cmov (TCGCond inv_cond,
368
                                    int ra, int rb, int rc,
369
                                    int islit, uint8_t lit, int mask)
370
{
371
    int l1;
372

    
373
    if (unlikely(rc == 31))
374
        return;
375

    
376
    l1 = gen_new_label();
377

    
378
    if (ra != 31) {
379
        if (mask) {
380
            TCGv tmp = tcg_temp_new();
381
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
382
            tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
383
            tcg_temp_free(tmp);
384
        } else
385
            tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
386
    } else {
387
        /* Very uncommon case - Do not bother to optimize.  */
388
        TCGv tmp = tcg_const_i64(0);
389
        tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
390
        tcg_temp_free(tmp);
391
    }
392

    
393
    if (islit)
394
        tcg_gen_movi_i64(cpu_ir[rc], lit);
395
    else
396
        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
397
    gen_set_label(l1);
398
}
399

    
400
#define FARITH2(name)                                       \
401
static always_inline void glue(gen_f, name)(int rb, int rc) \
402
{                                                           \
403
    if (unlikely(rc == 31))                                 \
404
      return;                                               \
405
                                                            \
406
    if (rb != 31)                                           \
407
        gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]);    \
408
    else {                                                  \
409
        TCGv tmp = tcg_const_i64(0);                        \
410
        gen_helper_ ## name (cpu_fir[rc], tmp);            \
411
        tcg_temp_free(tmp);                                 \
412
    }                                                       \
413
}
414
FARITH2(sqrts)
415
FARITH2(sqrtf)
416
FARITH2(sqrtg)
417
FARITH2(sqrtt)
418
FARITH2(cvtgf)
419
FARITH2(cvtgq)
420
FARITH2(cvtqf)
421
FARITH2(cvtqg)
422
FARITH2(cvtst)
423
FARITH2(cvtts)
424
FARITH2(cvttq)
425
FARITH2(cvtqs)
426
FARITH2(cvtqt)
427
FARITH2(cvtlq)
428
FARITH2(cvtql)
429
FARITH2(cvtqlv)
430
FARITH2(cvtqlsv)
431

    
432
#define FARITH3(name)                                                     \
433
static always_inline void glue(gen_f, name) (int ra, int rb, int rc)      \
434
{                                                                         \
435
    if (unlikely(rc == 31))                                               \
436
        return;                                                           \
437
                                                                          \
438
    if (ra != 31) {                                                       \
439
        if (rb != 31)                                                     \
440
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);  \
441
        else {                                                            \
442
            TCGv tmp = tcg_const_i64(0);                                  \
443
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp);          \
444
            tcg_temp_free(tmp);                                           \
445
        }                                                                 \
446
    } else {                                                              \
447
        TCGv tmp = tcg_const_i64(0);                                      \
448
        if (rb != 31)                                                     \
449
            gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]);          \
450
        else                                                              \
451
            gen_helper_ ## name (cpu_fir[rc], tmp, tmp);                   \
452
        tcg_temp_free(tmp);                                               \
453
    }                                                                     \
454
}
455

    
456
FARITH3(addf)
457
FARITH3(subf)
458
FARITH3(mulf)
459
FARITH3(divf)
460
FARITH3(addg)
461
FARITH3(subg)
462
FARITH3(mulg)
463
FARITH3(divg)
464
FARITH3(cmpgeq)
465
FARITH3(cmpglt)
466
FARITH3(cmpgle)
467
FARITH3(adds)
468
FARITH3(subs)
469
FARITH3(muls)
470
FARITH3(divs)
471
FARITH3(addt)
472
FARITH3(subt)
473
FARITH3(mult)
474
FARITH3(divt)
475
FARITH3(cmptun)
476
FARITH3(cmpteq)
477
FARITH3(cmptlt)
478
FARITH3(cmptle)
479
FARITH3(cpys)
480
FARITH3(cpysn)
481
FARITH3(cpyse)
482

    
483
#define FCMOV(name)                                                   \
484
static always_inline void glue(gen_f, name) (int ra, int rb, int rc)  \
485
{                                                                     \
486
    int l1;                                                           \
487
    TCGv tmp;                                                         \
488
                                                                      \
489
    if (unlikely(rc == 31))                                           \
490
        return;                                                       \
491
                                                                      \
492
    l1 = gen_new_label();                                             \
493
    tmp = tcg_temp_new();                                 \
494
    if (ra != 31) {                                                   \
495
        tmp = tcg_temp_new();                             \
496
        gen_helper_ ## name (tmp, cpu_fir[ra]);                       \
497
    } else  {                                                         \
498
        tmp = tcg_const_i64(0);                                       \
499
        gen_helper_ ## name (tmp, tmp);                               \
500
    }                                                                 \
501
    tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);                     \
502
    if (rb != 31)                                                     \
503
        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);                    \
504
    else                                                              \
505
        tcg_gen_movi_i64(cpu_fir[rc], 0);                             \
506
    gen_set_label(l1);                                                \
507
}
508
FCMOV(cmpfeq)
509
FCMOV(cmpfne)
510
FCMOV(cmpflt)
511
FCMOV(cmpfge)
512
FCMOV(cmpfle)
513
FCMOV(cmpfgt)
514

    
515
/* EXTWH, EXTWH, EXTLH, EXTQH */
516
static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
517
                                    int ra, int rb, int rc,
518
                                    int islit, uint8_t lit)
519
{
520
    if (unlikely(rc == 31))
521
        return;
522

    
523
    if (ra != 31) {
524
        if (islit) {
525
            if (lit != 0)
526
                tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
527
            else
528
                tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
529
        } else {
530
            TCGv tmp1, tmp2;
531
            tmp1 = tcg_temp_new();
532
            tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
533
            tcg_gen_shli_i64(tmp1, tmp1, 3);
534
            tmp2 = tcg_const_i64(64);
535
            tcg_gen_sub_i64(tmp1, tmp2, tmp1);
536
            tcg_temp_free(tmp2);
537
            tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
538
            tcg_temp_free(tmp1);
539
        }
540
        if (tcg_gen_ext_i64)
541
            tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
542
    } else
543
        tcg_gen_movi_i64(cpu_ir[rc], 0);
544
}
545

    
546
/* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
547
static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
548
                                    int ra, int rb, int rc,
549
                                    int islit, uint8_t lit)
550
{
551
    if (unlikely(rc == 31))
552
        return;
553

    
554
    if (ra != 31) {
555
        if (islit) {
556
                tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
557
        } else {
558
            TCGv tmp = tcg_temp_new();
559
            tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
560
            tcg_gen_shli_i64(tmp, tmp, 3);
561
            tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
562
            tcg_temp_free(tmp);
563
        }
564
        if (tcg_gen_ext_i64)
565
            tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
566
    } else
567
        tcg_gen_movi_i64(cpu_ir[rc], 0);
568
}
569

    
570
/* Code to call arith3 helpers */
571
#define ARITH3(name)                                                  \
572
static always_inline void glue(gen_, name) (int ra, int rb, int rc,   \
573
                                            int islit, uint8_t lit)   \
574
{                                                                     \
575
    if (unlikely(rc == 31))                                           \
576
        return;                                                       \
577
                                                                      \
578
    if (ra != 31) {                                                   \
579
        if (islit) {                                                  \
580
            TCGv tmp = tcg_const_i64(lit);                            \
581
            gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp);         \
582
            tcg_temp_free(tmp);                                       \
583
        } else                                                        \
584
            gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
585
    } else {                                                          \
586
        TCGv tmp1 = tcg_const_i64(0);                                 \
587
        if (islit) {                                                  \
588
            TCGv tmp2 = tcg_const_i64(lit);                           \
589
            gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2);             \
590
            tcg_temp_free(tmp2);                                      \
591
        } else                                                        \
592
            gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]);       \
593
        tcg_temp_free(tmp1);                                          \
594
    }                                                                 \
595
}
596
ARITH3(cmpbge)
597
ARITH3(addlv)
598
ARITH3(sublv)
599
ARITH3(addqv)
600
ARITH3(subqv)
601
ARITH3(mskbl)
602
ARITH3(insbl)
603
ARITH3(mskwl)
604
ARITH3(inswl)
605
ARITH3(mskll)
606
ARITH3(insll)
607
ARITH3(zap)
608
ARITH3(zapnot)
609
ARITH3(mskql)
610
ARITH3(insql)
611
ARITH3(mskwh)
612
ARITH3(inswh)
613
ARITH3(msklh)
614
ARITH3(inslh)
615
ARITH3(mskqh)
616
ARITH3(insqh)
617
ARITH3(umulh)
618
ARITH3(mullv)
619
ARITH3(mulqv)
620

    
621
static always_inline void gen_cmp(TCGCond cond,
622
                                  int ra, int rb, int rc,
623
                                  int islit, uint8_t lit)
624
{
625
    int l1, l2;
626
    TCGv tmp;
627

    
628
    if (unlikely(rc == 31))
629
    return;
630

    
631
    l1 = gen_new_label();
632
    l2 = gen_new_label();
633

    
634
    if (ra != 31) {
635
        tmp = tcg_temp_new();
636
        tcg_gen_mov_i64(tmp, cpu_ir[ra]);
637
    } else
638
        tmp = tcg_const_i64(0);
639
    if (islit)
640
        tcg_gen_brcondi_i64(cond, tmp, lit, l1);
641
    else
642
        tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
643

    
644
    tcg_gen_movi_i64(cpu_ir[rc], 0);
645
    tcg_gen_br(l2);
646
    gen_set_label(l1);
647
    tcg_gen_movi_i64(cpu_ir[rc], 1);
648
    gen_set_label(l2);
649
}
650

    
651
static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
652
{
653
    uint32_t palcode;
654
    int32_t disp21, disp16, disp12;
655
    uint16_t fn11, fn16;
656
    uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
657
    uint8_t lit;
658
    int ret;
659

    
660
    /* Decode all instruction fields */
661
    opc = insn >> 26;
662
    ra = (insn >> 21) & 0x1F;
663
    rb = (insn >> 16) & 0x1F;
664
    rc = insn & 0x1F;
665
    sbz = (insn >> 13) & 0x07;
666
    islit = (insn >> 12) & 1;
667
    if (rb == 31 && !islit) {
668
        islit = 1;
669
        lit = 0;
670
    } else
671
        lit = (insn >> 13) & 0xFF;
672
    palcode = insn & 0x03FFFFFF;
673
    disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
674
    disp16 = (int16_t)(insn & 0x0000FFFF);
675
    disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
676
    fn16 = insn & 0x0000FFFF;
677
    fn11 = (insn >> 5) & 0x000007FF;
678
    fpfn = fn11 & 0x3F;
679
    fn7 = (insn >> 5) & 0x0000007F;
680
    fn2 = (insn >> 5) & 0x00000003;
681
    ret = 0;
682
    LOG_DISAS("opc %02x ra %d rb %d rc %d disp16 %04x\n",
683
              opc, ra, rb, rc, disp16);
684
    switch (opc) {
685
    case 0x00:
686
        /* CALL_PAL */
687
        if (palcode >= 0x80 && palcode < 0xC0) {
688
            /* Unprivileged PAL call */
689
            gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x3F) << 6), 0);
690
#if !defined (CONFIG_USER_ONLY)
691
        } else if (palcode < 0x40) {
692
            /* Privileged PAL code */
693
            if (ctx->mem_idx & 1)
694
                goto invalid_opc;
695
            else
696
                gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x3F) << 6), 0);
697
#endif
698
        } else {
699
            /* Invalid PAL call */
700
            goto invalid_opc;
701
        }
702
        ret = 3;
703
        break;
704
    case 0x01:
705
        /* OPC01 */
706
        goto invalid_opc;
707
    case 0x02:
708
        /* OPC02 */
709
        goto invalid_opc;
710
    case 0x03:
711
        /* OPC03 */
712
        goto invalid_opc;
713
    case 0x04:
714
        /* OPC04 */
715
        goto invalid_opc;
716
    case 0x05:
717
        /* OPC05 */
718
        goto invalid_opc;
719
    case 0x06:
720
        /* OPC06 */
721
        goto invalid_opc;
722
    case 0x07:
723
        /* OPC07 */
724
        goto invalid_opc;
725
    case 0x08:
726
        /* LDA */
727
        if (likely(ra != 31)) {
728
            if (rb != 31)
729
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
730
            else
731
                tcg_gen_movi_i64(cpu_ir[ra], disp16);
732
        }
733
        break;
734
    case 0x09:
735
        /* LDAH */
736
        if (likely(ra != 31)) {
737
            if (rb != 31)
738
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
739
            else
740
                tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
741
        }
742
        break;
743
    case 0x0A:
744
        /* LDBU */
745
        if (!(ctx->amask & AMASK_BWX))
746
            goto invalid_opc;
747
        gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
748
        break;
749
    case 0x0B:
750
        /* LDQ_U */
751
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
752
        break;
753
    case 0x0C:
754
        /* LDWU */
755
        if (!(ctx->amask & AMASK_BWX))
756
            goto invalid_opc;
757
        gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
758
        break;
759
    case 0x0D:
760
        /* STW */
761
        gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
762
        break;
763
    case 0x0E:
764
        /* STB */
765
        gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
766
        break;
767
    case 0x0F:
768
        /* STQ_U */
769
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
770
        break;
771
    case 0x10:
772
        switch (fn7) {
773
        case 0x00:
774
            /* ADDL */
775
            if (likely(rc != 31)) {
776
                if (ra != 31) {
777
                    if (islit) {
778
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
779
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
780
                    } else {
781
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
782
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
783
                    }
784
                } else {
785
                    if (islit)
786
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
787
                    else
788
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
789
                }
790
            }
791
            break;
792
        case 0x02:
793
            /* S4ADDL */
794
            if (likely(rc != 31)) {
795
                if (ra != 31) {
796
                    TCGv tmp = tcg_temp_new();
797
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
798
                    if (islit)
799
                        tcg_gen_addi_i64(tmp, tmp, lit);
800
                    else
801
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
802
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
803
                    tcg_temp_free(tmp);
804
                } else {
805
                    if (islit)
806
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
807
                    else
808
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
809
                }
810
            }
811
            break;
812
        case 0x09:
813
            /* SUBL */
814
            if (likely(rc != 31)) {
815
                if (ra != 31) {
816
                    if (islit)
817
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
818
                    else
819
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
820
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
821
                } else {
822
                    if (islit)
823
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
824
                    else {
825
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
826
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
827
                }
828
            }
829
            break;
830
        case 0x0B:
831
            /* S4SUBL */
832
            if (likely(rc != 31)) {
833
                if (ra != 31) {
834
                    TCGv tmp = tcg_temp_new();
835
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
836
                    if (islit)
837
                        tcg_gen_subi_i64(tmp, tmp, lit);
838
                    else
839
                        tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
840
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
841
                    tcg_temp_free(tmp);
842
                } else {
843
                    if (islit)
844
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
845
                    else {
846
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
847
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
848
                    }
849
                }
850
            }
851
            break;
852
        case 0x0F:
853
            /* CMPBGE */
854
            gen_cmpbge(ra, rb, rc, islit, lit);
855
            break;
856
        case 0x12:
857
            /* S8ADDL */
858
            if (likely(rc != 31)) {
859
                if (ra != 31) {
860
                    TCGv tmp = tcg_temp_new();
861
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
862
                    if (islit)
863
                        tcg_gen_addi_i64(tmp, tmp, lit);
864
                    else
865
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
866
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
867
                    tcg_temp_free(tmp);
868
                } else {
869
                    if (islit)
870
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
871
                    else
872
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
873
                }
874
            }
875
            break;
876
        case 0x1B:
877
            /* S8SUBL */
878
            if (likely(rc != 31)) {
879
                if (ra != 31) {
880
                    TCGv tmp = tcg_temp_new();
881
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
882
                    if (islit)
883
                        tcg_gen_subi_i64(tmp, tmp, lit);
884
                    else
885
                       tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
886
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
887
                    tcg_temp_free(tmp);
888
                } else {
889
                    if (islit)
890
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
891
                    else
892
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
893
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
894
                    }
895
                }
896
            }
897
            break;
898
        case 0x1D:
899
            /* CMPULT */
900
            gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
901
            break;
902
        case 0x20:
903
            /* ADDQ */
904
            if (likely(rc != 31)) {
905
                if (ra != 31) {
906
                    if (islit)
907
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
908
                    else
909
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
910
                } else {
911
                    if (islit)
912
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
913
                    else
914
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
915
                }
916
            }
917
            break;
918
        case 0x22:
919
            /* S4ADDQ */
920
            if (likely(rc != 31)) {
921
                if (ra != 31) {
922
                    TCGv tmp = tcg_temp_new();
923
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
924
                    if (islit)
925
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
926
                    else
927
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
928
                    tcg_temp_free(tmp);
929
                } else {
930
                    if (islit)
931
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
932
                    else
933
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
934
                }
935
            }
936
            break;
937
        case 0x29:
938
            /* SUBQ */
939
            if (likely(rc != 31)) {
940
                if (ra != 31) {
941
                    if (islit)
942
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
943
                    else
944
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
945
                } else {
946
                    if (islit)
947
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
948
                    else
949
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
950
                }
951
            }
952
            break;
953
        case 0x2B:
954
            /* S4SUBQ */
955
            if (likely(rc != 31)) {
956
                if (ra != 31) {
957
                    TCGv tmp = tcg_temp_new();
958
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
959
                    if (islit)
960
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
961
                    else
962
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
963
                    tcg_temp_free(tmp);
964
                } else {
965
                    if (islit)
966
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
967
                    else
968
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
969
                }
970
            }
971
            break;
972
        case 0x2D:
973
            /* CMPEQ */
974
            gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
975
            break;
976
        case 0x32:
977
            /* S8ADDQ */
978
            if (likely(rc != 31)) {
979
                if (ra != 31) {
980
                    TCGv tmp = tcg_temp_new();
981
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
982
                    if (islit)
983
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
984
                    else
985
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
986
                    tcg_temp_free(tmp);
987
                } else {
988
                    if (islit)
989
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
990
                    else
991
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
992
                }
993
            }
994
            break;
995
        case 0x3B:
996
            /* S8SUBQ */
997
            if (likely(rc != 31)) {
998
                if (ra != 31) {
999
                    TCGv tmp = tcg_temp_new();
1000
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1001
                    if (islit)
1002
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1003
                    else
1004
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1005
                    tcg_temp_free(tmp);
1006
                } else {
1007
                    if (islit)
1008
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1009
                    else
1010
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1011
                }
1012
            }
1013
            break;
1014
        case 0x3D:
1015
            /* CMPULE */
1016
            gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1017
            break;
1018
        case 0x40:
1019
            /* ADDL/V */
1020
            gen_addlv(ra, rb, rc, islit, lit);
1021
            break;
1022
        case 0x49:
1023
            /* SUBL/V */
1024
            gen_sublv(ra, rb, rc, islit, lit);
1025
            break;
1026
        case 0x4D:
1027
            /* CMPLT */
1028
            gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1029
            break;
1030
        case 0x60:
1031
            /* ADDQ/V */
1032
            gen_addqv(ra, rb, rc, islit, lit);
1033
            break;
1034
        case 0x69:
1035
            /* SUBQ/V */
1036
            gen_subqv(ra, rb, rc, islit, lit);
1037
            break;
1038
        case 0x6D:
1039
            /* CMPLE */
1040
            gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1041
            break;
1042
        default:
1043
            goto invalid_opc;
1044
        }
1045
        break;
1046
    case 0x11:
1047
        switch (fn7) {
1048
        case 0x00:
1049
            /* AND */
1050
            if (likely(rc != 31)) {
1051
                if (ra == 31)
1052
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1053
                else if (islit)
1054
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1055
                else
1056
                    tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1057
            }
1058
            break;
1059
        case 0x08:
1060
            /* BIC */
1061
            if (likely(rc != 31)) {
1062
                if (ra != 31) {
1063
                    if (islit)
1064
                        tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1065
                    else
1066
                        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1067
                } else
1068
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1069
            }
1070
            break;
1071
        case 0x14:
1072
            /* CMOVLBS */
1073
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1074
            break;
1075
        case 0x16:
1076
            /* CMOVLBC */
1077
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1078
            break;
1079
        case 0x20:
1080
            /* BIS */
1081
            if (likely(rc != 31)) {
1082
                if (ra != 31) {
1083
                    if (islit)
1084
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1085
                    else
1086
                        tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1087
                } else {
1088
                    if (islit)
1089
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1090
                    else
1091
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1092
                }
1093
            }
1094
            break;
1095
        case 0x24:
1096
            /* CMOVEQ */
1097
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1098
            break;
1099
        case 0x26:
1100
            /* CMOVNE */
1101
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1102
            break;
1103
        case 0x28:
1104
            /* ORNOT */
1105
            if (likely(rc != 31)) {
1106
                if (ra != 31) {
1107
                    if (islit)
1108
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1109
                    else
1110
                        tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1111
                } else {
1112
                    if (islit)
1113
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1114
                    else
1115
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1116
                }
1117
            }
1118
            break;
1119
        case 0x40:
1120
            /* XOR */
1121
            if (likely(rc != 31)) {
1122
                if (ra != 31) {
1123
                    if (islit)
1124
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1125
                    else
1126
                        tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1127
                } else {
1128
                    if (islit)
1129
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1130
                    else
1131
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1132
                }
1133
            }
1134
            break;
1135
        case 0x44:
1136
            /* CMOVLT */
1137
            gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1138
            break;
1139
        case 0x46:
1140
            /* CMOVGE */
1141
            gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1142
            break;
1143
        case 0x48:
1144
            /* EQV */
1145
            if (likely(rc != 31)) {
1146
                if (ra != 31) {
1147
                    if (islit)
1148
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1149
                    else
1150
                        tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1151
                } else {
1152
                    if (islit)
1153
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1154
                    else
1155
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1156
                }
1157
            }
1158
            break;
1159
        case 0x61:
1160
            /* AMASK */
1161
            if (likely(rc != 31)) {
1162
                if (islit)
1163
                    tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1164
                else
1165
                    gen_helper_amask(cpu_ir[rc], cpu_ir[rb]);
1166
            }
1167
            break;
1168
        case 0x64:
1169
            /* CMOVLE */
1170
            gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1171
            break;
1172
        case 0x66:
1173
            /* CMOVGT */
1174
            gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1175
            break;
1176
        case 0x6C:
1177
            /* IMPLVER */
1178
            if (rc != 31)
1179
                tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
1180
            break;
1181
        default:
1182
            goto invalid_opc;
1183
        }
1184
        break;
1185
    case 0x12:
1186
        switch (fn7) {
1187
        case 0x02:
1188
            /* MSKBL */
1189
            gen_mskbl(ra, rb, rc, islit, lit);
1190
            break;
1191
        case 0x06:
1192
            /* EXTBL */
1193
            gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1194
            break;
1195
        case 0x0B:
1196
            /* INSBL */
1197
            gen_insbl(ra, rb, rc, islit, lit);
1198
            break;
1199
        case 0x12:
1200
            /* MSKWL */
1201
            gen_mskwl(ra, rb, rc, islit, lit);
1202
            break;
1203
        case 0x16:
1204
            /* EXTWL */
1205
            gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1206
            break;
1207
        case 0x1B:
1208
            /* INSWL */
1209
            gen_inswl(ra, rb, rc, islit, lit);
1210
            break;
1211
        case 0x22:
1212
            /* MSKLL */
1213
            gen_mskll(ra, rb, rc, islit, lit);
1214
            break;
1215
        case 0x26:
1216
            /* EXTLL */
1217
            gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1218
            break;
1219
        case 0x2B:
1220
            /* INSLL */
1221
            gen_insll(ra, rb, rc, islit, lit);
1222
            break;
1223
        case 0x30:
1224
            /* ZAP */
1225
            gen_zap(ra, rb, rc, islit, lit);
1226
            break;
1227
        case 0x31:
1228
            /* ZAPNOT */
1229
            gen_zapnot(ra, rb, rc, islit, lit);
1230
            break;
1231
        case 0x32:
1232
            /* MSKQL */
1233
            gen_mskql(ra, rb, rc, islit, lit);
1234
            break;
1235
        case 0x34:
1236
            /* SRL */
1237
            if (likely(rc != 31)) {
1238
                if (ra != 31) {
1239
                    if (islit)
1240
                        tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1241
                    else {
1242
                        TCGv shift = tcg_temp_new();
1243
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1244
                        tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1245
                        tcg_temp_free(shift);
1246
                    }
1247
                } else
1248
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1249
            }
1250
            break;
1251
        case 0x36:
1252
            /* EXTQL */
1253
            gen_ext_l(NULL, ra, rb, rc, islit, lit);
1254
            break;
1255
        case 0x39:
1256
            /* SLL */
1257
            if (likely(rc != 31)) {
1258
                if (ra != 31) {
1259
                    if (islit)
1260
                        tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1261
                    else {
1262
                        TCGv shift = tcg_temp_new();
1263
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1264
                        tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1265
                        tcg_temp_free(shift);
1266
                    }
1267
                } else
1268
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1269
            }
1270
            break;
1271
        case 0x3B:
1272
            /* INSQL */
1273
            gen_insql(ra, rb, rc, islit, lit);
1274
            break;
1275
        case 0x3C:
1276
            /* SRA */
1277
            if (likely(rc != 31)) {
1278
                if (ra != 31) {
1279
                    if (islit)
1280
                        tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1281
                    else {
1282
                        TCGv shift = tcg_temp_new();
1283
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1284
                        tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1285
                        tcg_temp_free(shift);
1286
                    }
1287
                } else
1288
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1289
            }
1290
            break;
1291
        case 0x52:
1292
            /* MSKWH */
1293
            gen_mskwh(ra, rb, rc, islit, lit);
1294
            break;
1295
        case 0x57:
1296
            /* INSWH */
1297
            gen_inswh(ra, rb, rc, islit, lit);
1298
            break;
1299
        case 0x5A:
1300
            /* EXTWH */
1301
            gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1302
            break;
1303
        case 0x62:
1304
            /* MSKLH */
1305
            gen_msklh(ra, rb, rc, islit, lit);
1306
            break;
1307
        case 0x67:
1308
            /* INSLH */
1309
            gen_inslh(ra, rb, rc, islit, lit);
1310
            break;
1311
        case 0x6A:
1312
            /* EXTLH */
1313
            gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1314
            break;
1315
        case 0x72:
1316
            /* MSKQH */
1317
            gen_mskqh(ra, rb, rc, islit, lit);
1318
            break;
1319
        case 0x77:
1320
            /* INSQH */
1321
            gen_insqh(ra, rb, rc, islit, lit);
1322
            break;
1323
        case 0x7A:
1324
            /* EXTQH */
1325
            gen_ext_h(NULL, ra, rb, rc, islit, lit);
1326
            break;
1327
        default:
1328
            goto invalid_opc;
1329
        }
1330
        break;
1331
    case 0x13:
1332
        switch (fn7) {
1333
        case 0x00:
1334
            /* MULL */
1335
            if (likely(rc != 31)) {
1336
                if (ra == 31)
1337
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1338
                else {
1339
                    if (islit)
1340
                        tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1341
                    else
1342
                        tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1343
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1344
                }
1345
            }
1346
            break;
1347
        case 0x20:
1348
            /* MULQ */
1349
            if (likely(rc != 31)) {
1350
                if (ra == 31)
1351
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1352
                else if (islit)
1353
                    tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1354
                else
1355
                    tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1356
            }
1357
            break;
1358
        case 0x30:
1359
            /* UMULH */
1360
            gen_umulh(ra, rb, rc, islit, lit);
1361
            break;
1362
        case 0x40:
1363
            /* MULL/V */
1364
            gen_mullv(ra, rb, rc, islit, lit);
1365
            break;
1366
        case 0x60:
1367
            /* MULQ/V */
1368
            gen_mulqv(ra, rb, rc, islit, lit);
1369
            break;
1370
        default:
1371
            goto invalid_opc;
1372
        }
1373
        break;
1374
    case 0x14:
1375
        switch (fpfn) { /* f11 & 0x3F */
1376
        case 0x04:
1377
            /* ITOFS */
1378
            if (!(ctx->amask & AMASK_FIX))
1379
                goto invalid_opc;
1380
            if (likely(rc != 31)) {
1381
                if (ra != 31) {
1382
                    TCGv_i32 tmp = tcg_temp_new_i32();
1383
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1384
                    gen_helper_memory_to_s(cpu_fir[rc], tmp);
1385
                    tcg_temp_free_i32(tmp);
1386
                } else
1387
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1388
            }
1389
            break;
1390
        case 0x0A:
1391
            /* SQRTF */
1392
            if (!(ctx->amask & AMASK_FIX))
1393
                goto invalid_opc;
1394
            gen_fsqrtf(rb, rc);
1395
            break;
1396
        case 0x0B:
1397
            /* SQRTS */
1398
            if (!(ctx->amask & AMASK_FIX))
1399
                goto invalid_opc;
1400
            gen_fsqrts(rb, rc);
1401
            break;
1402
        case 0x14:
1403
            /* ITOFF */
1404
            if (!(ctx->amask & AMASK_FIX))
1405
                goto invalid_opc;
1406
            if (likely(rc != 31)) {
1407
                if (ra != 31) {
1408
                    TCGv_i32 tmp = tcg_temp_new_i32();
1409
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1410
                    gen_helper_memory_to_f(cpu_fir[rc], tmp);
1411
                    tcg_temp_free_i32(tmp);
1412
                } else
1413
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1414
            }
1415
            break;
1416
        case 0x24:
1417
            /* ITOFT */
1418
            if (!(ctx->amask & AMASK_FIX))
1419
                goto invalid_opc;
1420
            if (likely(rc != 31)) {
1421
                if (ra != 31)
1422
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1423
                else
1424
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1425
            }
1426
            break;
1427
        case 0x2A:
1428
            /* SQRTG */
1429
            if (!(ctx->amask & AMASK_FIX))
1430
                goto invalid_opc;
1431
            gen_fsqrtg(rb, rc);
1432
            break;
1433
        case 0x02B:
1434
            /* SQRTT */
1435
            if (!(ctx->amask & AMASK_FIX))
1436
                goto invalid_opc;
1437
            gen_fsqrtt(rb, rc);
1438
            break;
1439
        default:
1440
            goto invalid_opc;
1441
        }
1442
        break;
1443
    case 0x15:
1444
        /* VAX floating point */
1445
        /* XXX: rounding mode and trap are ignored (!) */
1446
        switch (fpfn) { /* f11 & 0x3F */
1447
        case 0x00:
1448
            /* ADDF */
1449
            gen_faddf(ra, rb, rc);
1450
            break;
1451
        case 0x01:
1452
            /* SUBF */
1453
            gen_fsubf(ra, rb, rc);
1454
            break;
1455
        case 0x02:
1456
            /* MULF */
1457
            gen_fmulf(ra, rb, rc);
1458
            break;
1459
        case 0x03:
1460
            /* DIVF */
1461
            gen_fdivf(ra, rb, rc);
1462
            break;
1463
        case 0x1E:
1464
            /* CVTDG */
1465
#if 0 // TODO
1466
            gen_fcvtdg(rb, rc);
1467
#else
1468
            goto invalid_opc;
1469
#endif
1470
            break;
1471
        case 0x20:
1472
            /* ADDG */
1473
            gen_faddg(ra, rb, rc);
1474
            break;
1475
        case 0x21:
1476
            /* SUBG */
1477
            gen_fsubg(ra, rb, rc);
1478
            break;
1479
        case 0x22:
1480
            /* MULG */
1481
            gen_fmulg(ra, rb, rc);
1482
            break;
1483
        case 0x23:
1484
            /* DIVG */
1485
            gen_fdivg(ra, rb, rc);
1486
            break;
1487
        case 0x25:
1488
            /* CMPGEQ */
1489
            gen_fcmpgeq(ra, rb, rc);
1490
            break;
1491
        case 0x26:
1492
            /* CMPGLT */
1493
            gen_fcmpglt(ra, rb, rc);
1494
            break;
1495
        case 0x27:
1496
            /* CMPGLE */
1497
            gen_fcmpgle(ra, rb, rc);
1498
            break;
1499
        case 0x2C:
1500
            /* CVTGF */
1501
            gen_fcvtgf(rb, rc);
1502
            break;
1503
        case 0x2D:
1504
            /* CVTGD */
1505
#if 0 // TODO
1506
            gen_fcvtgd(rb, rc);
1507
#else
1508
            goto invalid_opc;
1509
#endif
1510
            break;
1511
        case 0x2F:
1512
            /* CVTGQ */
1513
            gen_fcvtgq(rb, rc);
1514
            break;
1515
        case 0x3C:
1516
            /* CVTQF */
1517
            gen_fcvtqf(rb, rc);
1518
            break;
1519
        case 0x3E:
1520
            /* CVTQG */
1521
            gen_fcvtqg(rb, rc);
1522
            break;
1523
        default:
1524
            goto invalid_opc;
1525
        }
1526
        break;
1527
    case 0x16:
1528
        /* IEEE floating-point */
1529
        /* XXX: rounding mode and traps are ignored (!) */
1530
        switch (fpfn) { /* f11 & 0x3F */
1531
        case 0x00:
1532
            /* ADDS */
1533
            gen_fadds(ra, rb, rc);
1534
            break;
1535
        case 0x01:
1536
            /* SUBS */
1537
            gen_fsubs(ra, rb, rc);
1538
            break;
1539
        case 0x02:
1540
            /* MULS */
1541
            gen_fmuls(ra, rb, rc);
1542
            break;
1543
        case 0x03:
1544
            /* DIVS */
1545
            gen_fdivs(ra, rb, rc);
1546
            break;
1547
        case 0x20:
1548
            /* ADDT */
1549
            gen_faddt(ra, rb, rc);
1550
            break;
1551
        case 0x21:
1552
            /* SUBT */
1553
            gen_fsubt(ra, rb, rc);
1554
            break;
1555
        case 0x22:
1556
            /* MULT */
1557
            gen_fmult(ra, rb, rc);
1558
            break;
1559
        case 0x23:
1560
            /* DIVT */
1561
            gen_fdivt(ra, rb, rc);
1562
            break;
1563
        case 0x24:
1564
            /* CMPTUN */
1565
            gen_fcmptun(ra, rb, rc);
1566
            break;
1567
        case 0x25:
1568
            /* CMPTEQ */
1569
            gen_fcmpteq(ra, rb, rc);
1570
            break;
1571
        case 0x26:
1572
            /* CMPTLT */
1573
            gen_fcmptlt(ra, rb, rc);
1574
            break;
1575
        case 0x27:
1576
            /* CMPTLE */
1577
            gen_fcmptle(ra, rb, rc);
1578
            break;
1579
        case 0x2C:
1580
            /* XXX: incorrect */
1581
            if (fn11 == 0x2AC || fn11 == 0x6AC) {
1582
                /* CVTST */
1583
                gen_fcvtst(rb, rc);
1584
            } else {
1585
                /* CVTTS */
1586
                gen_fcvtts(rb, rc);
1587
            }
1588
            break;
1589
        case 0x2F:
1590
            /* CVTTQ */
1591
            gen_fcvttq(rb, rc);
1592
            break;
1593
        case 0x3C:
1594
            /* CVTQS */
1595
            gen_fcvtqs(rb, rc);
1596
            break;
1597
        case 0x3E:
1598
            /* CVTQT */
1599
            gen_fcvtqt(rb, rc);
1600
            break;
1601
        default:
1602
            goto invalid_opc;
1603
        }
1604
        break;
1605
    case 0x17:
1606
        switch (fn11) {
1607
        case 0x010:
1608
            /* CVTLQ */
1609
            gen_fcvtlq(rb, rc);
1610
            break;
1611
        case 0x020:
1612
            if (likely(rc != 31)) {
1613
                if (ra == rb)
1614
                    /* FMOV */
1615
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1616
                else
1617
                    /* CPYS */
1618
                    gen_fcpys(ra, rb, rc);
1619
            }
1620
            break;
1621
        case 0x021:
1622
            /* CPYSN */
1623
            gen_fcpysn(ra, rb, rc);
1624
            break;
1625
        case 0x022:
1626
            /* CPYSE */
1627
            gen_fcpyse(ra, rb, rc);
1628
            break;
1629
        case 0x024:
1630
            /* MT_FPCR */
1631
            if (likely(ra != 31))
1632
                gen_helper_store_fpcr(cpu_fir[ra]);
1633
            else {
1634
                TCGv tmp = tcg_const_i64(0);
1635
                gen_helper_store_fpcr(tmp);
1636
                tcg_temp_free(tmp);
1637
            }
1638
            break;
1639
        case 0x025:
1640
            /* MF_FPCR */
1641
            if (likely(ra != 31))
1642
                gen_helper_load_fpcr(cpu_fir[ra]);
1643
            break;
1644
        case 0x02A:
1645
            /* FCMOVEQ */
1646
            gen_fcmpfeq(ra, rb, rc);
1647
            break;
1648
        case 0x02B:
1649
            /* FCMOVNE */
1650
            gen_fcmpfne(ra, rb, rc);
1651
            break;
1652
        case 0x02C:
1653
            /* FCMOVLT */
1654
            gen_fcmpflt(ra, rb, rc);
1655
            break;
1656
        case 0x02D:
1657
            /* FCMOVGE */
1658
            gen_fcmpfge(ra, rb, rc);
1659
            break;
1660
        case 0x02E:
1661
            /* FCMOVLE */
1662
            gen_fcmpfle(ra, rb, rc);
1663
            break;
1664
        case 0x02F:
1665
            /* FCMOVGT */
1666
            gen_fcmpfgt(ra, rb, rc);
1667
            break;
1668
        case 0x030:
1669
            /* CVTQL */
1670
            gen_fcvtql(rb, rc);
1671
            break;
1672
        case 0x130:
1673
            /* CVTQL/V */
1674
            gen_fcvtqlv(rb, rc);
1675
            break;
1676
        case 0x530:
1677
            /* CVTQL/SV */
1678
            gen_fcvtqlsv(rb, rc);
1679
            break;
1680
        default:
1681
            goto invalid_opc;
1682
        }
1683
        break;
1684
    case 0x18:
1685
        switch ((uint16_t)disp16) {
1686
        case 0x0000:
1687
            /* TRAPB */
1688
            /* No-op. Just exit from the current tb */
1689
            ret = 2;
1690
            break;
1691
        case 0x0400:
1692
            /* EXCB */
1693
            /* No-op. Just exit from the current tb */
1694
            ret = 2;
1695
            break;
1696
        case 0x4000:
1697
            /* MB */
1698
            /* No-op */
1699
            break;
1700
        case 0x4400:
1701
            /* WMB */
1702
            /* No-op */
1703
            break;
1704
        case 0x8000:
1705
            /* FETCH */
1706
            /* No-op */
1707
            break;
1708
        case 0xA000:
1709
            /* FETCH_M */
1710
            /* No-op */
1711
            break;
1712
        case 0xC000:
1713
            /* RPCC */
1714
            if (ra != 31)
1715
                gen_helper_load_pcc(cpu_ir[ra]);
1716
            break;
1717
        case 0xE000:
1718
            /* RC */
1719
            if (ra != 31)
1720
                gen_helper_rc(cpu_ir[ra]);
1721
            break;
1722
        case 0xE800:
1723
            /* ECB */
1724
            /* XXX: TODO: evict tb cache at address rb */
1725
#if 0
1726
            ret = 2;
1727
#else
1728
            goto invalid_opc;
1729
#endif
1730
            break;
1731
        case 0xF000:
1732
            /* RS */
1733
            if (ra != 31)
1734
                gen_helper_rs(cpu_ir[ra]);
1735
            break;
1736
        case 0xF800:
1737
            /* WH64 */
1738
            /* No-op */
1739
            break;
1740
        default:
1741
            goto invalid_opc;
1742
        }
1743
        break;
1744
    case 0x19:
1745
        /* HW_MFPR (PALcode) */
1746
#if defined (CONFIG_USER_ONLY)
1747
        goto invalid_opc;
1748
#else
1749
        if (!ctx->pal_mode)
1750
            goto invalid_opc;
1751
        if (ra != 31) {
1752
            TCGv tmp = tcg_const_i32(insn & 0xFF);
1753
            gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1754
            tcg_temp_free(tmp);
1755
        }
1756
        break;
1757
#endif
1758
    case 0x1A:
1759
        if (rb != 31)
1760
            tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1761
        else
1762
            tcg_gen_movi_i64(cpu_pc, 0);
1763
        if (ra != 31)
1764
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1765
        /* Those four jumps only differ by the branch prediction hint */
1766
        switch (fn2) {
1767
        case 0x0:
1768
            /* JMP */
1769
            break;
1770
        case 0x1:
1771
            /* JSR */
1772
            break;
1773
        case 0x2:
1774
            /* RET */
1775
            break;
1776
        case 0x3:
1777
            /* JSR_COROUTINE */
1778
            break;
1779
        }
1780
        ret = 1;
1781
        break;
1782
    case 0x1B:
1783
        /* HW_LD (PALcode) */
1784
#if defined (CONFIG_USER_ONLY)
1785
        goto invalid_opc;
1786
#else
1787
        if (!ctx->pal_mode)
1788
            goto invalid_opc;
1789
        if (ra != 31) {
1790
            TCGv addr = tcg_temp_new();
1791
            if (rb != 31)
1792
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1793
            else
1794
                tcg_gen_movi_i64(addr, disp12);
1795
            switch ((insn >> 12) & 0xF) {
1796
            case 0x0:
1797
                /* Longword physical access (hw_ldl/p) */
1798
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1799
                break;
1800
            case 0x1:
1801
                /* Quadword physical access (hw_ldq/p) */
1802
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1803
                break;
1804
            case 0x2:
1805
                /* Longword physical access with lock (hw_ldl_l/p) */
1806
                gen_helper_ldl_l_raw(cpu_ir[ra], addr);
1807
                break;
1808
            case 0x3:
1809
                /* Quadword physical access with lock (hw_ldq_l/p) */
1810
                gen_helper_ldq_l_raw(cpu_ir[ra], addr);
1811
                break;
1812
            case 0x4:
1813
                /* Longword virtual PTE fetch (hw_ldl/v) */
1814
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1815
                break;
1816
            case 0x5:
1817
                /* Quadword virtual PTE fetch (hw_ldq/v) */
1818
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1819
                break;
1820
            case 0x6:
1821
                /* Incpu_ir[ra]id */
1822
                goto invalid_opc;
1823
            case 0x7:
1824
                /* Incpu_ir[ra]id */
1825
                goto invalid_opc;
1826
            case 0x8:
1827
                /* Longword virtual access (hw_ldl) */
1828
                gen_helper_st_virt_to_phys(addr, addr);
1829
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1830
                break;
1831
            case 0x9:
1832
                /* Quadword virtual access (hw_ldq) */
1833
                gen_helper_st_virt_to_phys(addr, addr);
1834
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1835
                break;
1836
            case 0xA:
1837
                /* Longword virtual access with protection check (hw_ldl/w) */
1838
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1839
                break;
1840
            case 0xB:
1841
                /* Quadword virtual access with protection check (hw_ldq/w) */
1842
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1843
                break;
1844
            case 0xC:
1845
                /* Longword virtual access with alt access mode (hw_ldl/a)*/
1846
                gen_helper_set_alt_mode();
1847
                gen_helper_st_virt_to_phys(addr, addr);
1848
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1849
                gen_helper_restore_mode();
1850
                break;
1851
            case 0xD:
1852
                /* Quadword virtual access with alt access mode (hw_ldq/a) */
1853
                gen_helper_set_alt_mode();
1854
                gen_helper_st_virt_to_phys(addr, addr);
1855
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1856
                gen_helper_restore_mode();
1857
                break;
1858
            case 0xE:
1859
                /* Longword virtual access with alternate access mode and
1860
                 * protection checks (hw_ldl/wa)
1861
                 */
1862
                gen_helper_set_alt_mode();
1863
                gen_helper_ldl_data(cpu_ir[ra], addr);
1864
                gen_helper_restore_mode();
1865
                break;
1866
            case 0xF:
1867
                /* Quadword virtual access with alternate access mode and
1868
                 * protection checks (hw_ldq/wa)
1869
                 */
1870
                gen_helper_set_alt_mode();
1871
                gen_helper_ldq_data(cpu_ir[ra], addr);
1872
                gen_helper_restore_mode();
1873
                break;
1874
            }
1875
            tcg_temp_free(addr);
1876
        }
1877
        break;
1878
#endif
1879
    case 0x1C:
1880
        switch (fn7) {
1881
        case 0x00:
1882
            /* SEXTB */
1883
            if (!(ctx->amask & AMASK_BWX))
1884
                goto invalid_opc;
1885
            if (likely(rc != 31)) {
1886
                if (islit)
1887
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1888
                else
1889
                    tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1890
            }
1891
            break;
1892
        case 0x01:
1893
            /* SEXTW */
1894
            if (!(ctx->amask & AMASK_BWX))
1895
                goto invalid_opc;
1896
            if (likely(rc != 31)) {
1897
                if (islit)
1898
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1899
                else
1900
                    tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1901
            }
1902
            break;
1903
        case 0x30:
1904
            /* CTPOP */
1905
            if (!(ctx->amask & AMASK_CIX))
1906
                goto invalid_opc;
1907
            if (likely(rc != 31)) {
1908
                if (islit)
1909
                    tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1910
                else
1911
                    gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
1912
            }
1913
            break;
1914
        case 0x31:
1915
            /* PERR */
1916
            if (!(ctx->amask & AMASK_MVI))
1917
                goto invalid_opc;
1918
            /* XXX: TODO */
1919
            goto invalid_opc;
1920
            break;
1921
        case 0x32:
1922
            /* CTLZ */
1923
            if (!(ctx->amask & AMASK_CIX))
1924
                goto invalid_opc;
1925
            if (likely(rc != 31)) {
1926
                if (islit)
1927
                    tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1928
                else
1929
                    gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
1930
            }
1931
            break;
1932
        case 0x33:
1933
            /* CTTZ */
1934
            if (!(ctx->amask & AMASK_CIX))
1935
                goto invalid_opc;
1936
            if (likely(rc != 31)) {
1937
                if (islit)
1938
                    tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1939
                else
1940
                    gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
1941
            }
1942
            break;
1943
        case 0x34:
1944
            /* UNPKBW */
1945
            if (!(ctx->amask & AMASK_MVI))
1946
                goto invalid_opc;
1947
            /* XXX: TODO */
1948
            goto invalid_opc;
1949
            break;
1950
        case 0x35:
1951
            /* UNPKWL */
1952
            if (!(ctx->amask & AMASK_MVI))
1953
                goto invalid_opc;
1954
            /* XXX: TODO */
1955
            goto invalid_opc;
1956
            break;
1957
        case 0x36:
1958
            /* PKWB */
1959
            if (!(ctx->amask & AMASK_MVI))
1960
                goto invalid_opc;
1961
            /* XXX: TODO */
1962
            goto invalid_opc;
1963
            break;
1964
        case 0x37:
1965
            /* PKLB */
1966
            if (!(ctx->amask & AMASK_MVI))
1967
                goto invalid_opc;
1968
            /* XXX: TODO */
1969
            goto invalid_opc;
1970
            break;
1971
        case 0x38:
1972
            /* MINSB8 */
1973
            if (!(ctx->amask & AMASK_MVI))
1974
                goto invalid_opc;
1975
            /* XXX: TODO */
1976
            goto invalid_opc;
1977
            break;
1978
        case 0x39:
1979
            /* MINSW4 */
1980
            if (!(ctx->amask & AMASK_MVI))
1981
                goto invalid_opc;
1982
            /* XXX: TODO */
1983
            goto invalid_opc;
1984
            break;
1985
        case 0x3A:
1986
            /* MINUB8 */
1987
            if (!(ctx->amask & AMASK_MVI))
1988
                goto invalid_opc;
1989
            /* XXX: TODO */
1990
            goto invalid_opc;
1991
            break;
1992
        case 0x3B:
1993
            /* MINUW4 */
1994
            if (!(ctx->amask & AMASK_MVI))
1995
                goto invalid_opc;
1996
            /* XXX: TODO */
1997
            goto invalid_opc;
1998
            break;
1999
        case 0x3C:
2000
            /* MAXUB8 */
2001
            if (!(ctx->amask & AMASK_MVI))
2002
                goto invalid_opc;
2003
            /* XXX: TODO */
2004
            goto invalid_opc;
2005
            break;
2006
        case 0x3D:
2007
            /* MAXUW4 */
2008
            if (!(ctx->amask & AMASK_MVI))
2009
                goto invalid_opc;
2010
            /* XXX: TODO */
2011
            goto invalid_opc;
2012
            break;
2013
        case 0x3E:
2014
            /* MAXSB8 */
2015
            if (!(ctx->amask & AMASK_MVI))
2016
                goto invalid_opc;
2017
            /* XXX: TODO */
2018
            goto invalid_opc;
2019
            break;
2020
        case 0x3F:
2021
            /* MAXSW4 */
2022
            if (!(ctx->amask & AMASK_MVI))
2023
                goto invalid_opc;
2024
            /* XXX: TODO */
2025
            goto invalid_opc;
2026
            break;
2027
        case 0x70:
2028
            /* FTOIT */
2029
            if (!(ctx->amask & AMASK_FIX))
2030
                goto invalid_opc;
2031
            if (likely(rc != 31)) {
2032
                if (ra != 31)
2033
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2034
                else
2035
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2036
            }
2037
            break;
2038
        case 0x78:
2039
            /* FTOIS */
2040
            if (!(ctx->amask & AMASK_FIX))
2041
                goto invalid_opc;
2042
            if (rc != 31) {
2043
                TCGv_i32 tmp1 = tcg_temp_new_i32();
2044
                if (ra != 31)
2045
                    gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2046
                else {
2047
                    TCGv tmp2 = tcg_const_i64(0);
2048
                    gen_helper_s_to_memory(tmp1, tmp2);
2049
                    tcg_temp_free(tmp2);
2050
                }
2051
                tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2052
                tcg_temp_free_i32(tmp1);
2053
            }
2054
            break;
2055
        default:
2056
            goto invalid_opc;
2057
        }
2058
        break;
2059
    case 0x1D:
2060
        /* HW_MTPR (PALcode) */
2061
#if defined (CONFIG_USER_ONLY)
2062
        goto invalid_opc;
2063
#else
2064
        if (!ctx->pal_mode)
2065
            goto invalid_opc;
2066
        else {
2067
            TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2068
            if (ra != 31)
2069
                gen_helper_mtpr(tmp1, cpu_ir[ra]);
2070
            else {
2071
                TCGv tmp2 = tcg_const_i64(0);
2072
                gen_helper_mtpr(tmp1, tmp2);
2073
                tcg_temp_free(tmp2);
2074
            }
2075
            tcg_temp_free(tmp1);
2076
            ret = 2;
2077
        }
2078
        break;
2079
#endif
2080
    case 0x1E:
2081
        /* HW_REI (PALcode) */
2082
#if defined (CONFIG_USER_ONLY)
2083
        goto invalid_opc;
2084
#else
2085
        if (!ctx->pal_mode)
2086
            goto invalid_opc;
2087
        if (rb == 31) {
2088
            /* "Old" alpha */
2089
            gen_helper_hw_rei();
2090
        } else {
2091
            TCGv tmp;
2092

    
2093
            if (ra != 31) {
2094
                tmp = tcg_temp_new();
2095
                tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2096
            } else
2097
                tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2098
            gen_helper_hw_ret(tmp);
2099
            tcg_temp_free(tmp);
2100
        }
2101
        ret = 2;
2102
        break;
2103
#endif
2104
    case 0x1F:
2105
        /* HW_ST (PALcode) */
2106
#if defined (CONFIG_USER_ONLY)
2107
        goto invalid_opc;
2108
#else
2109
        if (!ctx->pal_mode)
2110
            goto invalid_opc;
2111
        else {
2112
            TCGv addr, val;
2113
            addr = tcg_temp_new();
2114
            if (rb != 31)
2115
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2116
            else
2117
                tcg_gen_movi_i64(addr, disp12);
2118
            if (ra != 31)
2119
                val = cpu_ir[ra];
2120
            else {
2121
                val = tcg_temp_new();
2122
                tcg_gen_movi_i64(val, 0);
2123
            }
2124
            switch ((insn >> 12) & 0xF) {
2125
            case 0x0:
2126
                /* Longword physical access */
2127
                gen_helper_stl_raw(val, addr);
2128
                break;
2129
            case 0x1:
2130
                /* Quadword physical access */
2131
                gen_helper_stq_raw(val, addr);
2132
                break;
2133
            case 0x2:
2134
                /* Longword physical access with lock */
2135
                gen_helper_stl_c_raw(val, val, addr);
2136
                break;
2137
            case 0x3:
2138
                /* Quadword physical access with lock */
2139
                gen_helper_stq_c_raw(val, val, addr);
2140
                break;
2141
            case 0x4:
2142
                /* Longword virtual access */
2143
                gen_helper_st_virt_to_phys(addr, addr);
2144
                gen_helper_stl_raw(val, addr);
2145
                break;
2146
            case 0x5:
2147
                /* Quadword virtual access */
2148
                gen_helper_st_virt_to_phys(addr, addr);
2149
                gen_helper_stq_raw(val, addr);
2150
                break;
2151
            case 0x6:
2152
                /* Invalid */
2153
                goto invalid_opc;
2154
            case 0x7:
2155
                /* Invalid */
2156
                goto invalid_opc;
2157
            case 0x8:
2158
                /* Invalid */
2159
                goto invalid_opc;
2160
            case 0x9:
2161
                /* Invalid */
2162
                goto invalid_opc;
2163
            case 0xA:
2164
                /* Invalid */
2165
                goto invalid_opc;
2166
            case 0xB:
2167
                /* Invalid */
2168
                goto invalid_opc;
2169
            case 0xC:
2170
                /* Longword virtual access with alternate access mode */
2171
                gen_helper_set_alt_mode();
2172
                gen_helper_st_virt_to_phys(addr, addr);
2173
                gen_helper_stl_raw(val, addr);
2174
                gen_helper_restore_mode();
2175
                break;
2176
            case 0xD:
2177
                /* Quadword virtual access with alternate access mode */
2178
                gen_helper_set_alt_mode();
2179
                gen_helper_st_virt_to_phys(addr, addr);
2180
                gen_helper_stl_raw(val, addr);
2181
                gen_helper_restore_mode();
2182
                break;
2183
            case 0xE:
2184
                /* Invalid */
2185
                goto invalid_opc;
2186
            case 0xF:
2187
                /* Invalid */
2188
                goto invalid_opc;
2189
            }
2190
            if (ra == 31)
2191
                tcg_temp_free(val);
2192
            tcg_temp_free(addr);
2193
        }
2194
        break;
2195
#endif
2196
    case 0x20:
2197
        /* LDF */
2198
        gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2199
        break;
2200
    case 0x21:
2201
        /* LDG */
2202
        gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2203
        break;
2204
    case 0x22:
2205
        /* LDS */
2206
        gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2207
        break;
2208
    case 0x23:
2209
        /* LDT */
2210
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2211
        break;
2212
    case 0x24:
2213
        /* STF */
2214
        gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2215
        break;
2216
    case 0x25:
2217
        /* STG */
2218
        gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2219
        break;
2220
    case 0x26:
2221
        /* STS */
2222
        gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2223
        break;
2224
    case 0x27:
2225
        /* STT */
2226
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2227
        break;
2228
    case 0x28:
2229
        /* LDL */
2230
        gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2231
        break;
2232
    case 0x29:
2233
        /* LDQ */
2234
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2235
        break;
2236
    case 0x2A:
2237
        /* LDL_L */
2238
        gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2239
        break;
2240
    case 0x2B:
2241
        /* LDQ_L */
2242
        gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2243
        break;
2244
    case 0x2C:
2245
        /* STL */
2246
        gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2247
        break;
2248
    case 0x2D:
2249
        /* STQ */
2250
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2251
        break;
2252
    case 0x2E:
2253
        /* STL_C */
2254
        gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2255
        break;
2256
    case 0x2F:
2257
        /* STQ_C */
2258
        gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2259
        break;
2260
    case 0x30:
2261
        /* BR */
2262
        if (ra != 31)
2263
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2264
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2265
        ret = 1;
2266
        break;
2267
    case 0x31: /* FBEQ */
2268
    case 0x32: /* FBLT */
2269
    case 0x33: /* FBLE */
2270
        gen_fbcond(ctx, opc, ra, disp16);
2271
        ret = 1;
2272
        break;
2273
    case 0x34:
2274
        /* BSR */
2275
        if (ra != 31)
2276
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2277
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2278
        ret = 1;
2279
        break;
2280
    case 0x35: /* FBNE */
2281
    case 0x36: /* FBGE */
2282
    case 0x37: /* FBGT */
2283
        gen_fbcond(ctx, opc, ra, disp16);
2284
        ret = 1;
2285
        break;
2286
    case 0x38:
2287
        /* BLBC */
2288
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2289
        ret = 1;
2290
        break;
2291
    case 0x39:
2292
        /* BEQ */
2293
        gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2294
        ret = 1;
2295
        break;
2296
    case 0x3A:
2297
        /* BLT */
2298
        gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2299
        ret = 1;
2300
        break;
2301
    case 0x3B:
2302
        /* BLE */
2303
        gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2304
        ret = 1;
2305
        break;
2306
    case 0x3C:
2307
        /* BLBS */
2308
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2309
        ret = 1;
2310
        break;
2311
    case 0x3D:
2312
        /* BNE */
2313
        gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2314
        ret = 1;
2315
        break;
2316
    case 0x3E:
2317
        /* BGE */
2318
        gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2319
        ret = 1;
2320
        break;
2321
    case 0x3F:
2322
        /* BGT */
2323
        gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2324
        ret = 1;
2325
        break;
2326
    invalid_opc:
2327
        gen_invalid(ctx);
2328
        ret = 3;
2329
        break;
2330
    }
2331

    
2332
    return ret;
2333
}
2334

    
2335
static always_inline void gen_intermediate_code_internal (CPUState *env,
2336
                                                          TranslationBlock *tb,
2337
                                                          int search_pc)
2338
{
2339
#if defined ALPHA_DEBUG_DISAS
2340
    static int insn_count;
2341
#endif
2342
    DisasContext ctx, *ctxp = &ctx;
2343
    target_ulong pc_start;
2344
    uint32_t insn;
2345
    uint16_t *gen_opc_end;
2346
    CPUBreakpoint *bp;
2347
    int j, lj = -1;
2348
    int ret;
2349
    int num_insns;
2350
    int max_insns;
2351

    
2352
    pc_start = tb->pc;
2353
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2354
    ctx.pc = pc_start;
2355
    ctx.amask = env->amask;
2356
    ctx.env = env;
2357
#if defined (CONFIG_USER_ONLY)
2358
    ctx.mem_idx = 0;
2359
#else
2360
    ctx.mem_idx = ((env->ps >> 3) & 3);
2361
    ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2362
#endif
2363
    num_insns = 0;
2364
    max_insns = tb->cflags & CF_COUNT_MASK;
2365
    if (max_insns == 0)
2366
        max_insns = CF_COUNT_MASK;
2367

    
2368
    gen_icount_start();
2369
    for (ret = 0; ret == 0;) {
2370
        if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
2371
            TAILQ_FOREACH(bp, &env->breakpoints, entry) {
2372
                if (bp->pc == ctx.pc) {
2373
                    gen_excp(&ctx, EXCP_DEBUG, 0);
2374
                    break;
2375
                }
2376
            }
2377
        }
2378
        if (search_pc) {
2379
            j = gen_opc_ptr - gen_opc_buf;
2380
            if (lj < j) {
2381
                lj++;
2382
                while (lj < j)
2383
                    gen_opc_instr_start[lj++] = 0;
2384
            }
2385
            gen_opc_pc[lj] = ctx.pc;
2386
            gen_opc_instr_start[lj] = 1;
2387
            gen_opc_icount[lj] = num_insns;
2388
        }
2389
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2390
            gen_io_start();
2391
#if defined ALPHA_DEBUG_DISAS
2392
        insn_count++;
2393
        LOG_DISAS("pc " TARGET_FMT_lx " mem_idx %d\n",
2394
                  ctx.pc, ctx.mem_idx);
2395
#endif
2396
        insn = ldl_code(ctx.pc);
2397
#if defined ALPHA_DEBUG_DISAS
2398
        insn_count++;
2399
        LOG_DISAS("opcode %08x %d\n", insn, insn_count);
2400
#endif
2401
        num_insns++;
2402
        ctx.pc += 4;
2403
        ret = translate_one(ctxp, insn);
2404
        if (ret != 0)
2405
            break;
2406
        /* if we reach a page boundary or are single stepping, stop
2407
         * generation
2408
         */
2409
        if (env->singlestep_enabled) {
2410
            gen_excp(&ctx, EXCP_DEBUG, 0);
2411
            break;
2412
        }
2413

    
2414
        if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2415
            break;
2416

    
2417
        if (gen_opc_ptr >= gen_opc_end)
2418
            break;
2419

    
2420
        if (num_insns >= max_insns)
2421
            break;
2422

    
2423
        if (singlestep) {
2424
            break;
2425
        }
2426
    }
2427
    if (ret != 1 && ret != 3) {
2428
        tcg_gen_movi_i64(cpu_pc, ctx.pc);
2429
    }
2430
#if defined (DO_TB_FLUSH)
2431
    gen_helper_tb_flush();
2432
#endif
2433
    if (tb->cflags & CF_LAST_IO)
2434
        gen_io_end();
2435
    /* Generate the return instruction */
2436
    tcg_gen_exit_tb(0);
2437
    gen_icount_end(tb, num_insns);
2438
    *gen_opc_ptr = INDEX_op_end;
2439
    if (search_pc) {
2440
        j = gen_opc_ptr - gen_opc_buf;
2441
        lj++;
2442
        while (lj <= j)
2443
            gen_opc_instr_start[lj++] = 0;
2444
    } else {
2445
        tb->size = ctx.pc - pc_start;
2446
        tb->icount = num_insns;
2447
    }
2448
#if defined ALPHA_DEBUG_DISAS
2449
    log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
2450
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2451
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
2452
        log_target_disas(pc_start, ctx.pc - pc_start, 1);
2453
        qemu_log("\n");
2454
    }
2455
#endif
2456
}
2457

    
2458
void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2459
{
2460
    gen_intermediate_code_internal(env, tb, 0);
2461
}
2462

    
2463
void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2464
{
2465
    gen_intermediate_code_internal(env, tb, 1);
2466
}
2467

    
2468
CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2469
{
2470
    CPUAlphaState *env;
2471
    uint64_t hwpcb;
2472

    
2473
    env = qemu_mallocz(sizeof(CPUAlphaState));
2474
    cpu_exec_init(env);
2475
    alpha_translate_init();
2476
    tlb_flush(env, 1);
2477
    /* XXX: should not be hardcoded */
2478
    env->implver = IMPLVER_2106x;
2479
    env->ps = 0x1F00;
2480
#if defined (CONFIG_USER_ONLY)
2481
    env->ps |= 1 << 3;
2482
#endif
2483
    pal_init(env);
2484
    /* Initialize IPR */
2485
    hwpcb = env->ipr[IPR_PCBB];
2486
    env->ipr[IPR_ASN] = 0;
2487
    env->ipr[IPR_ASTEN] = 0;
2488
    env->ipr[IPR_ASTSR] = 0;
2489
    env->ipr[IPR_DATFX] = 0;
2490
    /* XXX: fix this */
2491
    //    env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2492
    //    env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2493
    //    env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2494
    //    env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2495
    env->ipr[IPR_FEN] = 0;
2496
    env->ipr[IPR_IPL] = 31;
2497
    env->ipr[IPR_MCES] = 0;
2498
    env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2499
    //    env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2500
    env->ipr[IPR_SISR] = 0;
2501
    env->ipr[IPR_VIRBND] = -1ULL;
2502

    
2503
    return env;
2504
}
2505

    
2506
void gen_pc_load(CPUState *env, TranslationBlock *tb,
2507
                unsigned long searched_pc, int pc_pos, void *puc)
2508
{
2509
    env->pc = gen_opc_pc[pc_pos];
2510
}