Statistics
| Branch: | Revision:

root / target-alpha / translate.c @ 4b4a72e5

History | View | Annotate | Download (99.9 kB)

1
/*
2
 *  Alpha emulation cpu translation for qemu.
3
 *
4
 *  Copyright (c) 2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18
 */
19

    
20
#include <stdint.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23

    
24
#include "cpu.h"
25
#include "exec-all.h"
26
#include "disas.h"
27
#include "host-utils.h"
28
#include "tcg-op.h"
29
#include "qemu-common.h"
30

    
31
#include "helper.h"
32
#define GEN_HELPER 1
33
#include "helper.h"
34

    
35
#undef ALPHA_DEBUG_DISAS
36
#define CONFIG_SOFTFLOAT_INLINE
37

    
38
#ifdef ALPHA_DEBUG_DISAS
39
#  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
40
#else
41
#  define LOG_DISAS(...) do { } while (0)
42
#endif
43

    
44
typedef struct DisasContext DisasContext;
45
struct DisasContext {
46
    struct TranslationBlock *tb;
47
    CPUAlphaState *env;
48
    uint64_t pc;
49
    int mem_idx;
50
#if !defined (CONFIG_USER_ONLY)
51
    int pal_mode;
52
#endif
53
    uint32_t amask;
54

    
55
    /* Current rounding mode for this TB.  */
56
    int tb_rm;
57
    /* Current flush-to-zero setting for this TB.  */
58
    int tb_ftz;
59
};
60

    
61
/* Return values from translate_one, indicating the state of the TB.
62
   Note that zero indicates that we are not exiting the TB.  */
63

    
64
typedef enum {
65
    NO_EXIT,
66

    
67
    /* We have emitted one or more goto_tb.  No fixup required.  */
68
    EXIT_GOTO_TB,
69

    
70
    /* We are not using a goto_tb (for whatever reason), but have updated
71
       the PC (for whatever reason), so there's no need to do it again on
72
       exiting the TB.  */
73
    EXIT_PC_UPDATED,
74

    
75
    /* We are exiting the TB, but have neither emitted a goto_tb, nor
76
       updated the PC for the next instruction to be executed.  */
77
    EXIT_PC_STALE,
78

    
79
    /* We are ending the TB with a noreturn function call, e.g. longjmp.
80
       No following code will be executed.  */
81
    EXIT_NORETURN,
82
} ExitStatus;
83

    
84
/* global register indexes */
85
static TCGv_ptr cpu_env;
86
static TCGv cpu_ir[31];
87
static TCGv cpu_fir[31];
88
static TCGv cpu_pc;
89
static TCGv cpu_lock_addr;
90
static TCGv cpu_lock_st_addr;
91
static TCGv cpu_lock_value;
92
#ifdef CONFIG_USER_ONLY
93
static TCGv cpu_uniq;
94
#endif
95

    
96
/* register names */
97
static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
98

    
99
#include "gen-icount.h"
100

    
101
static void alpha_translate_init(void)
102
{
103
    int i;
104
    char *p;
105
    static int done_init = 0;
106

    
107
    if (done_init)
108
        return;
109

    
110
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
111

    
112
    p = cpu_reg_names;
113
    for (i = 0; i < 31; i++) {
114
        sprintf(p, "ir%d", i);
115
        cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
116
                                           offsetof(CPUState, ir[i]), p);
117
        p += (i < 10) ? 4 : 5;
118

    
119
        sprintf(p, "fir%d", i);
120
        cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
121
                                            offsetof(CPUState, fir[i]), p);
122
        p += (i < 10) ? 5 : 6;
123
    }
124

    
125
    cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
126
                                    offsetof(CPUState, pc), "pc");
127

    
128
    cpu_lock_addr = tcg_global_mem_new_i64(TCG_AREG0,
129
                                           offsetof(CPUState, lock_addr),
130
                                           "lock_addr");
131
    cpu_lock_st_addr = tcg_global_mem_new_i64(TCG_AREG0,
132
                                              offsetof(CPUState, lock_st_addr),
133
                                              "lock_st_addr");
134
    cpu_lock_value = tcg_global_mem_new_i64(TCG_AREG0,
135
                                            offsetof(CPUState, lock_value),
136
                                            "lock_value");
137

    
138
#ifdef CONFIG_USER_ONLY
139
    cpu_uniq = tcg_global_mem_new_i64(TCG_AREG0,
140
                                      offsetof(CPUState, unique), "uniq");
141
#endif
142

    
143
    /* register helpers */
144
#define GEN_HELPER 2
145
#include "helper.h"
146

    
147
    done_init = 1;
148
}
149

    
150
static ExitStatus gen_excp(DisasContext *ctx, int exception, int error_code)
151
{
152
    TCGv_i32 tmp1, tmp2;
153

    
154
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
155
    tmp1 = tcg_const_i32(exception);
156
    tmp2 = tcg_const_i32(error_code);
157
    gen_helper_excp(tmp1, tmp2);
158
    tcg_temp_free_i32(tmp2);
159
    tcg_temp_free_i32(tmp1);
160

    
161
    return EXIT_NORETURN;
162
}
163

    
164
static inline ExitStatus gen_invalid(DisasContext *ctx)
165
{
166
    return gen_excp(ctx, EXCP_OPCDEC, 0);
167
}
168

    
169
static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
170
{
171
    TCGv tmp = tcg_temp_new();
172
    TCGv_i32 tmp32 = tcg_temp_new_i32();
173
    tcg_gen_qemu_ld32u(tmp, t1, flags);
174
    tcg_gen_trunc_i64_i32(tmp32, tmp);
175
    gen_helper_memory_to_f(t0, tmp32);
176
    tcg_temp_free_i32(tmp32);
177
    tcg_temp_free(tmp);
178
}
179

    
180
static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
181
{
182
    TCGv tmp = tcg_temp_new();
183
    tcg_gen_qemu_ld64(tmp, t1, flags);
184
    gen_helper_memory_to_g(t0, tmp);
185
    tcg_temp_free(tmp);
186
}
187

    
188
static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
189
{
190
    TCGv tmp = tcg_temp_new();
191
    TCGv_i32 tmp32 = tcg_temp_new_i32();
192
    tcg_gen_qemu_ld32u(tmp, t1, flags);
193
    tcg_gen_trunc_i64_i32(tmp32, tmp);
194
    gen_helper_memory_to_s(t0, tmp32);
195
    tcg_temp_free_i32(tmp32);
196
    tcg_temp_free(tmp);
197
}
198

    
199
static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
200
{
201
    tcg_gen_qemu_ld32s(t0, t1, flags);
202
    tcg_gen_mov_i64(cpu_lock_addr, t1);
203
    tcg_gen_mov_i64(cpu_lock_value, t0);
204
}
205

    
206
static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
207
{
208
    tcg_gen_qemu_ld64(t0, t1, flags);
209
    tcg_gen_mov_i64(cpu_lock_addr, t1);
210
    tcg_gen_mov_i64(cpu_lock_value, t0);
211
}
212

    
213
static inline void gen_load_mem(DisasContext *ctx,
214
                                void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
215
                                                          int flags),
216
                                int ra, int rb, int32_t disp16, int fp,
217
                                int clear)
218
{
219
    TCGv addr, va;
220

    
221
    /* LDQ_U with ra $31 is UNOP.  Other various loads are forms of
222
       prefetches, which we can treat as nops.  No worries about
223
       missed exceptions here.  */
224
    if (unlikely(ra == 31)) {
225
        return;
226
    }
227

    
228
    addr = tcg_temp_new();
229
    if (rb != 31) {
230
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
231
        if (clear) {
232
            tcg_gen_andi_i64(addr, addr, ~0x7);
233
        }
234
    } else {
235
        if (clear) {
236
            disp16 &= ~0x7;
237
        }
238
        tcg_gen_movi_i64(addr, disp16);
239
    }
240

    
241
    va = (fp ? cpu_fir[ra] : cpu_ir[ra]);
242
    tcg_gen_qemu_load(va, addr, ctx->mem_idx);
243

    
244
    tcg_temp_free(addr);
245
}
246

    
247
static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
248
{
249
    TCGv_i32 tmp32 = tcg_temp_new_i32();
250
    TCGv tmp = tcg_temp_new();
251
    gen_helper_f_to_memory(tmp32, t0);
252
    tcg_gen_extu_i32_i64(tmp, tmp32);
253
    tcg_gen_qemu_st32(tmp, t1, flags);
254
    tcg_temp_free(tmp);
255
    tcg_temp_free_i32(tmp32);
256
}
257

    
258
static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
259
{
260
    TCGv tmp = tcg_temp_new();
261
    gen_helper_g_to_memory(tmp, t0);
262
    tcg_gen_qemu_st64(tmp, t1, flags);
263
    tcg_temp_free(tmp);
264
}
265

    
266
static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
267
{
268
    TCGv_i32 tmp32 = tcg_temp_new_i32();
269
    TCGv tmp = tcg_temp_new();
270
    gen_helper_s_to_memory(tmp32, t0);
271
    tcg_gen_extu_i32_i64(tmp, tmp32);
272
    tcg_gen_qemu_st32(tmp, t1, flags);
273
    tcg_temp_free(tmp);
274
    tcg_temp_free_i32(tmp32);
275
}
276

    
277
static inline void gen_store_mem(DisasContext *ctx,
278
                                 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
279
                                                            int flags),
280
                                 int ra, int rb, int32_t disp16, int fp,
281
                                 int clear)
282
{
283
    TCGv addr, va;
284

    
285
    addr = tcg_temp_new();
286
    if (rb != 31) {
287
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
288
        if (clear) {
289
            tcg_gen_andi_i64(addr, addr, ~0x7);
290
        }
291
    } else {
292
        if (clear) {
293
            disp16 &= ~0x7;
294
        }
295
        tcg_gen_movi_i64(addr, disp16);
296
    }
297

    
298
    if (ra == 31) {
299
        va = tcg_const_i64(0);
300
    } else {
301
        va = (fp ? cpu_fir[ra] : cpu_ir[ra]);
302
    }
303
    tcg_gen_qemu_store(va, addr, ctx->mem_idx);
304

    
305
    tcg_temp_free(addr);
306
    if (ra == 31) {
307
        tcg_temp_free(va);
308
    }
309
}
310

    
311
static ExitStatus gen_store_conditional(DisasContext *ctx, int ra, int rb,
312
                                        int32_t disp16, int quad)
313
{
314
    TCGv addr;
315

    
316
    if (ra == 31) {
317
        /* ??? Don't bother storing anything.  The user can't tell
318
           the difference, since the zero register always reads zero.  */
319
        return NO_EXIT;
320
    }
321

    
322
#if defined(CONFIG_USER_ONLY)
323
    addr = cpu_lock_st_addr;
324
#else
325
    addr = tcg_local_new();
326
#endif
327

    
328
    if (rb != 31) {
329
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
330
    } else {
331
        tcg_gen_movi_i64(addr, disp16);
332
    }
333

    
334
#if defined(CONFIG_USER_ONLY)
335
    /* ??? This is handled via a complicated version of compare-and-swap
336
       in the cpu_loop.  Hopefully one day we'll have a real CAS opcode
337
       in TCG so that this isn't necessary.  */
338
    return gen_excp(ctx, quad ? EXCP_STQ_C : EXCP_STL_C, ra);
339
#else
340
    /* ??? In system mode we are never multi-threaded, so CAS can be
341
       implemented via a non-atomic load-compare-store sequence.  */
342
    {
343
        int lab_fail, lab_done;
344
        TCGv val;
345

    
346
        lab_fail = gen_new_label();
347
        lab_done = gen_new_label();
348
        tcg_gen_brcond(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
349

    
350
        val = tcg_temp_new();
351
        if (quad) {
352
            tcg_gen_qemu_ld64(val, addr, ctx->mem_idx);
353
        } else {
354
            tcg_gen_qemu_ld32s(val, addr, ctx->mem_idx);
355
        }
356
        tcg_gen_brcond(TCG_COND_NE, val, cpu_lock_value, lab_fail);
357

    
358
        if (quad) {
359
            tcg_gen_qemu_st64(cpu_ir[ra], addr, ctx->mem_idx);
360
        } else {
361
            tcg_gen_qemu_st32(cpu_ir[ra], addr, ctx->mem_idx);
362
        }
363
        tcg_gen_movi_i64(cpu_ir[ra], 1);
364
        tcg_gen_br(lab_done);
365

    
366
        gen_set_label(lab_fail);
367
        tcg_gen_movi_i64(cpu_ir[ra], 0);
368

    
369
        gen_set_label(lab_done);
370
        tcg_gen_movi_i64(cpu_lock_addr, -1);
371

    
372
        tcg_temp_free(addr);
373
        return NO_EXIT;
374
    }
375
#endif
376
}
377

    
378
static int use_goto_tb(DisasContext *ctx, uint64_t dest)
379
{
380
    /* Check for the dest on the same page as the start of the TB.  We
381
       also want to suppress goto_tb in the case of single-steping and IO.  */
382
    return (((ctx->tb->pc ^ dest) & TARGET_PAGE_MASK) == 0
383
            && !ctx->env->singlestep_enabled
384
            && !(ctx->tb->cflags & CF_LAST_IO));
385
}
386

    
387
static ExitStatus gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
388
{
389
    uint64_t dest = ctx->pc + (disp << 2);
390

    
391
    if (ra != 31) {
392
        tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
393
    }
394

    
395
    /* Notice branch-to-next; used to initialize RA with the PC.  */
396
    if (disp == 0) {
397
        return 0;
398
    } else if (use_goto_tb(ctx, dest)) {
399
        tcg_gen_goto_tb(0);
400
        tcg_gen_movi_i64(cpu_pc, dest);
401
        tcg_gen_exit_tb((tcg_target_long)ctx->tb);
402
        return EXIT_GOTO_TB;
403
    } else {
404
        tcg_gen_movi_i64(cpu_pc, dest);
405
        return EXIT_PC_UPDATED;
406
    }
407
}
408

    
409
static ExitStatus gen_bcond_internal(DisasContext *ctx, TCGCond cond,
410
                                     TCGv cmp, int32_t disp)
411
{
412
    uint64_t dest = ctx->pc + (disp << 2);
413
    int lab_true = gen_new_label();
414

    
415
    if (use_goto_tb(ctx, dest)) {
416
        tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
417

    
418
        tcg_gen_goto_tb(0);
419
        tcg_gen_movi_i64(cpu_pc, ctx->pc);
420
        tcg_gen_exit_tb((tcg_target_long)ctx->tb);
421

    
422
        gen_set_label(lab_true);
423
        tcg_gen_goto_tb(1);
424
        tcg_gen_movi_i64(cpu_pc, dest);
425
        tcg_gen_exit_tb((tcg_target_long)ctx->tb + 1);
426

    
427
        return EXIT_GOTO_TB;
428
    } else {
429
        int lab_over = gen_new_label();
430

    
431
        /* ??? Consider using either
432
             movi pc, next
433
             addi tmp, pc, disp
434
             movcond pc, cond, 0, tmp, pc
435
           or
436
             setcond tmp, cond, 0
437
             movi pc, next
438
             neg tmp, tmp
439
             andi tmp, tmp, disp
440
             add pc, pc, tmp
441
           The current diamond subgraph surely isn't efficient.  */
442

    
443
        tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
444
        tcg_gen_movi_i64(cpu_pc, ctx->pc);
445
        tcg_gen_br(lab_over);
446
        gen_set_label(lab_true);
447
        tcg_gen_movi_i64(cpu_pc, dest);
448
        gen_set_label(lab_over);
449

    
450
        return EXIT_PC_UPDATED;
451
    }
452
}
453

    
454
static ExitStatus gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
455
                            int32_t disp, int mask)
456
{
457
    TCGv cmp_tmp;
458

    
459
    if (unlikely(ra == 31)) {
460
        cmp_tmp = tcg_const_i64(0);
461
    } else {
462
        cmp_tmp = tcg_temp_new();
463
        if (mask) {
464
            tcg_gen_andi_i64(cmp_tmp, cpu_ir[ra], 1);
465
        } else {
466
            tcg_gen_mov_i64(cmp_tmp, cpu_ir[ra]);
467
        }
468
    }
469

    
470
    return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
471
}
472

    
473
/* Fold -0.0 for comparison with COND.  */
474

    
475
static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
476
{
477
    uint64_t mzero = 1ull << 63;
478

    
479
    switch (cond) {
480
    case TCG_COND_LE:
481
    case TCG_COND_GT:
482
        /* For <= or >, the -0.0 value directly compares the way we want.  */
483
        tcg_gen_mov_i64(dest, src);
484
        break;
485

    
486
    case TCG_COND_EQ:
487
    case TCG_COND_NE:
488
        /* For == or !=, we can simply mask off the sign bit and compare.  */
489
        tcg_gen_andi_i64(dest, src, mzero - 1);
490
        break;
491

    
492
    case TCG_COND_GE:
493
    case TCG_COND_LT:
494
        /* For >= or <, map -0.0 to +0.0 via comparison and mask.  */
495
        tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
496
        tcg_gen_neg_i64(dest, dest);
497
        tcg_gen_and_i64(dest, dest, src);
498
        break;
499

    
500
    default:
501
        abort();
502
    }
503
}
504

    
505
static ExitStatus gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
506
                             int32_t disp)
507
{
508
    TCGv cmp_tmp;
509

    
510
    if (unlikely(ra == 31)) {
511
        /* Very uncommon case, but easier to optimize it to an integer
512
           comparison than continuing with the floating point comparison.  */
513
        return gen_bcond(ctx, cond, ra, disp, 0);
514
    }
515

    
516
    cmp_tmp = tcg_temp_new();
517
    gen_fold_mzero(cond, cmp_tmp, cpu_fir[ra]);
518
    return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
519
}
520

    
521
static void gen_cmov(TCGCond cond, int ra, int rb, int rc,
522
                     int islit, uint8_t lit, int mask)
523
{
524
    TCGCond inv_cond = tcg_invert_cond(cond);
525
    int l1;
526

    
527
    if (unlikely(rc == 31))
528
        return;
529

    
530
    l1 = gen_new_label();
531

    
532
    if (ra != 31) {
533
        if (mask) {
534
            TCGv tmp = tcg_temp_new();
535
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
536
            tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
537
            tcg_temp_free(tmp);
538
        } else
539
            tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
540
    } else {
541
        /* Very uncommon case - Do not bother to optimize.  */
542
        TCGv tmp = tcg_const_i64(0);
543
        tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
544
        tcg_temp_free(tmp);
545
    }
546

    
547
    if (islit)
548
        tcg_gen_movi_i64(cpu_ir[rc], lit);
549
    else
550
        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
551
    gen_set_label(l1);
552
}
553

    
554
static void gen_fcmov(TCGCond cond, int ra, int rb, int rc)
555
{
556
    TCGv cmp_tmp;
557
    int l1;
558

    
559
    if (unlikely(rc == 31)) {
560
        return;
561
    }
562

    
563
    cmp_tmp = tcg_temp_new();
564
    if (unlikely(ra == 31)) {
565
        tcg_gen_movi_i64(cmp_tmp, 0);
566
    } else {
567
        gen_fold_mzero(cond, cmp_tmp, cpu_fir[ra]);
568
    }
569

    
570
    l1 = gen_new_label();
571
    tcg_gen_brcondi_i64(tcg_invert_cond(cond), cmp_tmp, 0, l1);
572
    tcg_temp_free(cmp_tmp);
573

    
574
    if (rb != 31)
575
        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[rb]);
576
    else
577
        tcg_gen_movi_i64(cpu_fir[rc], 0);
578
    gen_set_label(l1);
579
}
580

    
581
#define QUAL_RM_N       0x080   /* Round mode nearest even */
582
#define QUAL_RM_C       0x000   /* Round mode chopped */
583
#define QUAL_RM_M       0x040   /* Round mode minus infinity */
584
#define QUAL_RM_D       0x0c0   /* Round mode dynamic */
585
#define QUAL_RM_MASK    0x0c0
586

    
587
#define QUAL_U          0x100   /* Underflow enable (fp output) */
588
#define QUAL_V          0x100   /* Overflow enable (int output) */
589
#define QUAL_S          0x400   /* Software completion enable */
590
#define QUAL_I          0x200   /* Inexact detection enable */
591

    
592
static void gen_qual_roundmode(DisasContext *ctx, int fn11)
593
{
594
    TCGv_i32 tmp;
595

    
596
    fn11 &= QUAL_RM_MASK;
597
    if (fn11 == ctx->tb_rm) {
598
        return;
599
    }
600
    ctx->tb_rm = fn11;
601

    
602
    tmp = tcg_temp_new_i32();
603
    switch (fn11) {
604
    case QUAL_RM_N:
605
        tcg_gen_movi_i32(tmp, float_round_nearest_even);
606
        break;
607
    case QUAL_RM_C:
608
        tcg_gen_movi_i32(tmp, float_round_to_zero);
609
        break;
610
    case QUAL_RM_M:
611
        tcg_gen_movi_i32(tmp, float_round_down);
612
        break;
613
    case QUAL_RM_D:
614
        tcg_gen_ld8u_i32(tmp, cpu_env, offsetof(CPUState, fpcr_dyn_round));
615
        break;
616
    }
617

    
618
#if defined(CONFIG_SOFTFLOAT_INLINE)
619
    /* ??? The "softfloat.h" interface is to call set_float_rounding_mode.
620
       With CONFIG_SOFTFLOAT that expands to an out-of-line call that just
621
       sets the one field.  */
622
    tcg_gen_st8_i32(tmp, cpu_env,
623
                    offsetof(CPUState, fp_status.float_rounding_mode));
624
#else
625
    gen_helper_setroundmode(tmp);
626
#endif
627

    
628
    tcg_temp_free_i32(tmp);
629
}
630

    
631
static void gen_qual_flushzero(DisasContext *ctx, int fn11)
632
{
633
    TCGv_i32 tmp;
634

    
635
    fn11 &= QUAL_U;
636
    if (fn11 == ctx->tb_ftz) {
637
        return;
638
    }
639
    ctx->tb_ftz = fn11;
640

    
641
    tmp = tcg_temp_new_i32();
642
    if (fn11) {
643
        /* Underflow is enabled, use the FPCR setting.  */
644
        tcg_gen_ld8u_i32(tmp, cpu_env, offsetof(CPUState, fpcr_flush_to_zero));
645
    } else {
646
        /* Underflow is disabled, force flush-to-zero.  */
647
        tcg_gen_movi_i32(tmp, 1);
648
    }
649

    
650
#if defined(CONFIG_SOFTFLOAT_INLINE)
651
    tcg_gen_st8_i32(tmp, cpu_env,
652
                    offsetof(CPUState, fp_status.flush_to_zero));
653
#else
654
    gen_helper_setflushzero(tmp);
655
#endif
656

    
657
    tcg_temp_free_i32(tmp);
658
}
659

    
660
static TCGv gen_ieee_input(int reg, int fn11, int is_cmp)
661
{
662
    TCGv val = tcg_temp_new();
663
    if (reg == 31) {
664
        tcg_gen_movi_i64(val, 0);
665
    } else if (fn11 & QUAL_S) {
666
        gen_helper_ieee_input_s(val, cpu_fir[reg]);
667
    } else if (is_cmp) {
668
        gen_helper_ieee_input_cmp(val, cpu_fir[reg]);
669
    } else {
670
        gen_helper_ieee_input(val, cpu_fir[reg]);
671
    }
672
    return val;
673
}
674

    
675
static void gen_fp_exc_clear(void)
676
{
677
#if defined(CONFIG_SOFTFLOAT_INLINE)
678
    TCGv_i32 zero = tcg_const_i32(0);
679
    tcg_gen_st8_i32(zero, cpu_env,
680
                    offsetof(CPUState, fp_status.float_exception_flags));
681
    tcg_temp_free_i32(zero);
682
#else
683
    gen_helper_fp_exc_clear();
684
#endif
685
}
686

    
687
static void gen_fp_exc_raise_ignore(int rc, int fn11, int ignore)
688
{
689
    /* ??? We ought to be able to do something with imprecise exceptions.
690
       E.g. notice we're still in the trap shadow of something within the
691
       TB and do not generate the code to signal the exception; end the TB
692
       when an exception is forced to arrive, either by consumption of a
693
       register value or TRAPB or EXCB.  */
694
    TCGv_i32 exc = tcg_temp_new_i32();
695
    TCGv_i32 reg;
696

    
697
#if defined(CONFIG_SOFTFLOAT_INLINE)
698
    tcg_gen_ld8u_i32(exc, cpu_env,
699
                     offsetof(CPUState, fp_status.float_exception_flags));
700
#else
701
    gen_helper_fp_exc_get(exc);
702
#endif
703

    
704
    if (ignore) {
705
        tcg_gen_andi_i32(exc, exc, ~ignore);
706
    }
707

    
708
    /* ??? Pass in the regno of the destination so that the helper can
709
       set EXC_MASK, which contains a bitmask of destination registers
710
       that have caused arithmetic traps.  A simple userspace emulation
711
       does not require this.  We do need it for a guest kernel's entArith,
712
       or if we were to do something clever with imprecise exceptions.  */
713
    reg = tcg_const_i32(rc + 32);
714

    
715
    if (fn11 & QUAL_S) {
716
        gen_helper_fp_exc_raise_s(exc, reg);
717
    } else {
718
        gen_helper_fp_exc_raise(exc, reg);
719
    }
720

    
721
    tcg_temp_free_i32(reg);
722
    tcg_temp_free_i32(exc);
723
}
724

    
725
static inline void gen_fp_exc_raise(int rc, int fn11)
726
{
727
    gen_fp_exc_raise_ignore(rc, fn11, fn11 & QUAL_I ? 0 : float_flag_inexact);
728
}
729

    
730
static void gen_fcvtlq(int rb, int rc)
731
{
732
    if (unlikely(rc == 31)) {
733
        return;
734
    }
735
    if (unlikely(rb == 31)) {
736
        tcg_gen_movi_i64(cpu_fir[rc], 0);
737
    } else {
738
        TCGv tmp = tcg_temp_new();
739

    
740
        /* The arithmetic right shift here, plus the sign-extended mask below
741
           yields a sign-extended result without an explicit ext32s_i64.  */
742
        tcg_gen_sari_i64(tmp, cpu_fir[rb], 32);
743
        tcg_gen_shri_i64(cpu_fir[rc], cpu_fir[rb], 29);
744
        tcg_gen_andi_i64(tmp, tmp, (int32_t)0xc0000000);
745
        tcg_gen_andi_i64(cpu_fir[rc], cpu_fir[rc], 0x3fffffff);
746
        tcg_gen_or_i64(cpu_fir[rc], cpu_fir[rc], tmp);
747

    
748
        tcg_temp_free(tmp);
749
    }
750
}
751

    
752
static void gen_fcvtql(int rb, int rc)
753
{
754
    if (unlikely(rc == 31)) {
755
        return;
756
    }
757
    if (unlikely(rb == 31)) {
758
        tcg_gen_movi_i64(cpu_fir[rc], 0);
759
    } else {
760
        TCGv tmp = tcg_temp_new();
761

    
762
        tcg_gen_andi_i64(tmp, cpu_fir[rb], 0xC0000000);
763
        tcg_gen_andi_i64(cpu_fir[rc], cpu_fir[rb], 0x3FFFFFFF);
764
        tcg_gen_shli_i64(tmp, tmp, 32);
765
        tcg_gen_shli_i64(cpu_fir[rc], cpu_fir[rc], 29);
766
        tcg_gen_or_i64(cpu_fir[rc], cpu_fir[rc], tmp);
767

    
768
        tcg_temp_free(tmp);
769
    }
770
}
771

    
772
static void gen_fcvtql_v(DisasContext *ctx, int rb, int rc)
773
{
774
    if (rb != 31) {
775
        int lab = gen_new_label();
776
        TCGv tmp = tcg_temp_new();
777

    
778
        tcg_gen_ext32s_i64(tmp, cpu_fir[rb]);
779
        tcg_gen_brcond_i64(TCG_COND_EQ, tmp, cpu_fir[rb], lab);
780
        gen_excp(ctx, EXCP_ARITH, EXC_M_IOV);
781

    
782
        gen_set_label(lab);
783
    }
784
    gen_fcvtql(rb, rc);
785
}
786

    
787
#define FARITH2(name)                                   \
788
static inline void glue(gen_f, name)(int rb, int rc)    \
789
{                                                       \
790
    if (unlikely(rc == 31)) {                           \
791
        return;                                         \
792
    }                                                   \
793
    if (rb != 31) {                                     \
794
        gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]); \
795
    } else {                                                \
796
        TCGv tmp = tcg_const_i64(0);                    \
797
        gen_helper_ ## name (cpu_fir[rc], tmp);         \
798
        tcg_temp_free(tmp);                             \
799
    }                                                   \
800
}
801

    
802
/* ??? VAX instruction qualifiers ignored.  */
803
FARITH2(sqrtf)
804
FARITH2(sqrtg)
805
FARITH2(cvtgf)
806
FARITH2(cvtgq)
807
FARITH2(cvtqf)
808
FARITH2(cvtqg)
809

    
810
static void gen_ieee_arith2(DisasContext *ctx, void (*helper)(TCGv, TCGv),
811
                            int rb, int rc, int fn11)
812
{
813
    TCGv vb;
814

    
815
    /* ??? This is wrong: the instruction is not a nop, it still may
816
       raise exceptions.  */
817
    if (unlikely(rc == 31)) {
818
        return;
819
    }
820

    
821
    gen_qual_roundmode(ctx, fn11);
822
    gen_qual_flushzero(ctx, fn11);
823
    gen_fp_exc_clear();
824

    
825
    vb = gen_ieee_input(rb, fn11, 0);
826
    helper(cpu_fir[rc], vb);
827
    tcg_temp_free(vb);
828

    
829
    gen_fp_exc_raise(rc, fn11);
830
}
831

    
832
#define IEEE_ARITH2(name)                                       \
833
static inline void glue(gen_f, name)(DisasContext *ctx,         \
834
                                     int rb, int rc, int fn11)  \
835
{                                                               \
836
    gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11);      \
837
}
838
IEEE_ARITH2(sqrts)
839
IEEE_ARITH2(sqrtt)
840
IEEE_ARITH2(cvtst)
841
IEEE_ARITH2(cvtts)
842

    
843
static void gen_fcvttq(DisasContext *ctx, int rb, int rc, int fn11)
844
{
845
    TCGv vb;
846
    int ignore = 0;
847

    
848
    /* ??? This is wrong: the instruction is not a nop, it still may
849
       raise exceptions.  */
850
    if (unlikely(rc == 31)) {
851
        return;
852
    }
853

    
854
    /* No need to set flushzero, since we have an integer output.  */
855
    gen_fp_exc_clear();
856
    vb = gen_ieee_input(rb, fn11, 0);
857

    
858
    /* Almost all integer conversions use cropped rounding, and most
859
       also do not have integer overflow enabled.  Special case that.  */
860
    switch (fn11) {
861
    case QUAL_RM_C:
862
        gen_helper_cvttq_c(cpu_fir[rc], vb);
863
        break;
864
    case QUAL_V | QUAL_RM_C:
865
    case QUAL_S | QUAL_V | QUAL_RM_C:
866
        ignore = float_flag_inexact;
867
        /* FALLTHRU */
868
    case QUAL_S | QUAL_V | QUAL_I | QUAL_RM_C:
869
        gen_helper_cvttq_svic(cpu_fir[rc], vb);
870
        break;
871
    default:
872
        gen_qual_roundmode(ctx, fn11);
873
        gen_helper_cvttq(cpu_fir[rc], vb);
874
        ignore |= (fn11 & QUAL_V ? 0 : float_flag_overflow);
875
        ignore |= (fn11 & QUAL_I ? 0 : float_flag_inexact);
876
        break;
877
    }
878
    tcg_temp_free(vb);
879

    
880
    gen_fp_exc_raise_ignore(rc, fn11, ignore);
881
}
882

    
883
static void gen_ieee_intcvt(DisasContext *ctx, void (*helper)(TCGv, TCGv),
884
                            int rb, int rc, int fn11)
885
{
886
    TCGv vb;
887

    
888
    /* ??? This is wrong: the instruction is not a nop, it still may
889
       raise exceptions.  */
890
    if (unlikely(rc == 31)) {
891
        return;
892
    }
893

    
894
    gen_qual_roundmode(ctx, fn11);
895

    
896
    if (rb == 31) {
897
        vb = tcg_const_i64(0);
898
    } else {
899
        vb = cpu_fir[rb];
900
    }
901

    
902
    /* The only exception that can be raised by integer conversion
903
       is inexact.  Thus we only need to worry about exceptions when
904
       inexact handling is requested.  */
905
    if (fn11 & QUAL_I) {
906
        gen_fp_exc_clear();
907
        helper(cpu_fir[rc], vb);
908
        gen_fp_exc_raise(rc, fn11);
909
    } else {
910
        helper(cpu_fir[rc], vb);
911
    }
912

    
913
    if (rb == 31) {
914
        tcg_temp_free(vb);
915
    }
916
}
917

    
918
#define IEEE_INTCVT(name)                                       \
919
static inline void glue(gen_f, name)(DisasContext *ctx,         \
920
                                     int rb, int rc, int fn11)  \
921
{                                                               \
922
    gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11);      \
923
}
924
IEEE_INTCVT(cvtqs)
925
IEEE_INTCVT(cvtqt)
926

    
927
static void gen_cpys_internal(int ra, int rb, int rc, int inv_a, uint64_t mask)
928
{
929
    TCGv va, vb, vmask;
930
    int za = 0, zb = 0;
931

    
932
    if (unlikely(rc == 31)) {
933
        return;
934
    }
935

    
936
    vmask = tcg_const_i64(mask);
937

    
938
    TCGV_UNUSED_I64(va);
939
    if (ra == 31) {
940
        if (inv_a) {
941
            va = vmask;
942
        } else {
943
            za = 1;
944
        }
945
    } else {
946
        va = tcg_temp_new_i64();
947
        tcg_gen_mov_i64(va, cpu_fir[ra]);
948
        if (inv_a) {
949
            tcg_gen_andc_i64(va, vmask, va);
950
        } else {
951
            tcg_gen_and_i64(va, va, vmask);
952
        }
953
    }
954

    
955
    TCGV_UNUSED_I64(vb);
956
    if (rb == 31) {
957
        zb = 1;
958
    } else {
959
        vb = tcg_temp_new_i64();
960
        tcg_gen_andc_i64(vb, cpu_fir[rb], vmask);
961
    }
962

    
963
    switch (za << 1 | zb) {
964
    case 0 | 0:
965
        tcg_gen_or_i64(cpu_fir[rc], va, vb);
966
        break;
967
    case 0 | 1:
968
        tcg_gen_mov_i64(cpu_fir[rc], va);
969
        break;
970
    case 2 | 0:
971
        tcg_gen_mov_i64(cpu_fir[rc], vb);
972
        break;
973
    case 2 | 1:
974
        tcg_gen_movi_i64(cpu_fir[rc], 0);
975
        break;
976
    }
977

    
978
    tcg_temp_free(vmask);
979
    if (ra != 31) {
980
        tcg_temp_free(va);
981
    }
982
    if (rb != 31) {
983
        tcg_temp_free(vb);
984
    }
985
}
986

    
987
static inline void gen_fcpys(int ra, int rb, int rc)
988
{
989
    gen_cpys_internal(ra, rb, rc, 0, 0x8000000000000000ULL);
990
}
991

    
992
static inline void gen_fcpysn(int ra, int rb, int rc)
993
{
994
    gen_cpys_internal(ra, rb, rc, 1, 0x8000000000000000ULL);
995
}
996

    
997
static inline void gen_fcpyse(int ra, int rb, int rc)
998
{
999
    gen_cpys_internal(ra, rb, rc, 0, 0xFFF0000000000000ULL);
1000
}
1001

    
1002
#define FARITH3(name)                                           \
1003
static inline void glue(gen_f, name)(int ra, int rb, int rc)    \
1004
{                                                               \
1005
    TCGv va, vb;                                                \
1006
                                                                \
1007
    if (unlikely(rc == 31)) {                                   \
1008
        return;                                                 \
1009
    }                                                           \
1010
    if (ra == 31) {                                             \
1011
        va = tcg_const_i64(0);                                  \
1012
    } else {                                                    \
1013
        va = cpu_fir[ra];                                       \
1014
    }                                                           \
1015
    if (rb == 31) {                                             \
1016
        vb = tcg_const_i64(0);                                  \
1017
    } else {                                                    \
1018
        vb = cpu_fir[rb];                                       \
1019
    }                                                           \
1020
                                                                \
1021
    gen_helper_ ## name (cpu_fir[rc], va, vb);                  \
1022
                                                                \
1023
    if (ra == 31) {                                             \
1024
        tcg_temp_free(va);                                      \
1025
    }                                                           \
1026
    if (rb == 31) {                                             \
1027
        tcg_temp_free(vb);                                      \
1028
    }                                                           \
1029
}
1030

    
1031
/* ??? VAX instruction qualifiers ignored.  */
1032
FARITH3(addf)
1033
FARITH3(subf)
1034
FARITH3(mulf)
1035
FARITH3(divf)
1036
FARITH3(addg)
1037
FARITH3(subg)
1038
FARITH3(mulg)
1039
FARITH3(divg)
1040
FARITH3(cmpgeq)
1041
FARITH3(cmpglt)
1042
FARITH3(cmpgle)
1043

    
1044
static void gen_ieee_arith3(DisasContext *ctx,
1045
                            void (*helper)(TCGv, TCGv, TCGv),
1046
                            int ra, int rb, int rc, int fn11)
1047
{
1048
    TCGv va, vb;
1049

    
1050
    /* ??? This is wrong: the instruction is not a nop, it still may
1051
       raise exceptions.  */
1052
    if (unlikely(rc == 31)) {
1053
        return;
1054
    }
1055

    
1056
    gen_qual_roundmode(ctx, fn11);
1057
    gen_qual_flushzero(ctx, fn11);
1058
    gen_fp_exc_clear();
1059

    
1060
    va = gen_ieee_input(ra, fn11, 0);
1061
    vb = gen_ieee_input(rb, fn11, 0);
1062
    helper(cpu_fir[rc], va, vb);
1063
    tcg_temp_free(va);
1064
    tcg_temp_free(vb);
1065

    
1066
    gen_fp_exc_raise(rc, fn11);
1067
}
1068

    
1069
#define IEEE_ARITH3(name)                                               \
1070
static inline void glue(gen_f, name)(DisasContext *ctx,                 \
1071
                                     int ra, int rb, int rc, int fn11)  \
1072
{                                                                       \
1073
    gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11);          \
1074
}
1075
IEEE_ARITH3(adds)
1076
IEEE_ARITH3(subs)
1077
IEEE_ARITH3(muls)
1078
IEEE_ARITH3(divs)
1079
IEEE_ARITH3(addt)
1080
IEEE_ARITH3(subt)
1081
IEEE_ARITH3(mult)
1082
IEEE_ARITH3(divt)
1083

    
1084
static void gen_ieee_compare(DisasContext *ctx,
1085
                             void (*helper)(TCGv, TCGv, TCGv),
1086
                             int ra, int rb, int rc, int fn11)
1087
{
1088
    TCGv va, vb;
1089

    
1090
    /* ??? This is wrong: the instruction is not a nop, it still may
1091
       raise exceptions.  */
1092
    if (unlikely(rc == 31)) {
1093
        return;
1094
    }
1095

    
1096
    gen_fp_exc_clear();
1097

    
1098
    va = gen_ieee_input(ra, fn11, 1);
1099
    vb = gen_ieee_input(rb, fn11, 1);
1100
    helper(cpu_fir[rc], va, vb);
1101
    tcg_temp_free(va);
1102
    tcg_temp_free(vb);
1103

    
1104
    gen_fp_exc_raise(rc, fn11);
1105
}
1106

    
1107
#define IEEE_CMP3(name)                                                 \
1108
static inline void glue(gen_f, name)(DisasContext *ctx,                 \
1109
                                     int ra, int rb, int rc, int fn11)  \
1110
{                                                                       \
1111
    gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11);         \
1112
}
1113
IEEE_CMP3(cmptun)
1114
IEEE_CMP3(cmpteq)
1115
IEEE_CMP3(cmptlt)
1116
IEEE_CMP3(cmptle)
1117

    
1118
static inline uint64_t zapnot_mask(uint8_t lit)
1119
{
1120
    uint64_t mask = 0;
1121
    int i;
1122

    
1123
    for (i = 0; i < 8; ++i) {
1124
        if ((lit >> i) & 1)
1125
            mask |= 0xffull << (i * 8);
1126
    }
1127
    return mask;
1128
}
1129

    
1130
/* Implement zapnot with an immediate operand, which expands to some
1131
   form of immediate AND.  This is a basic building block in the
1132
   definition of many of the other byte manipulation instructions.  */
1133
static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
1134
{
1135
    switch (lit) {
1136
    case 0x00:
1137
        tcg_gen_movi_i64(dest, 0);
1138
        break;
1139
    case 0x01:
1140
        tcg_gen_ext8u_i64(dest, src);
1141
        break;
1142
    case 0x03:
1143
        tcg_gen_ext16u_i64(dest, src);
1144
        break;
1145
    case 0x0f:
1146
        tcg_gen_ext32u_i64(dest, src);
1147
        break;
1148
    case 0xff:
1149
        tcg_gen_mov_i64(dest, src);
1150
        break;
1151
    default:
1152
        tcg_gen_andi_i64 (dest, src, zapnot_mask (lit));
1153
        break;
1154
    }
1155
}
1156

    
1157
static inline void gen_zapnot(int ra, int rb, int rc, int islit, uint8_t lit)
1158
{
1159
    if (unlikely(rc == 31))
1160
        return;
1161
    else if (unlikely(ra == 31))
1162
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1163
    else if (islit)
1164
        gen_zapnoti(cpu_ir[rc], cpu_ir[ra], lit);
1165
    else
1166
        gen_helper_zapnot (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1167
}
1168

    
1169
static inline void gen_zap(int ra, int rb, int rc, int islit, uint8_t lit)
1170
{
1171
    if (unlikely(rc == 31))
1172
        return;
1173
    else if (unlikely(ra == 31))
1174
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1175
    else if (islit)
1176
        gen_zapnoti(cpu_ir[rc], cpu_ir[ra], ~lit);
1177
    else
1178
        gen_helper_zap (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1179
}
1180

    
1181

    
1182
/* EXTWH, EXTLH, EXTQH */
1183
static void gen_ext_h(int ra, int rb, int rc, int islit,
1184
                      uint8_t lit, uint8_t byte_mask)
1185
{
1186
    if (unlikely(rc == 31))
1187
        return;
1188
    else if (unlikely(ra == 31))
1189
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1190
    else {
1191
        if (islit) {
1192
            lit = (64 - (lit & 7) * 8) & 0x3f;
1193
            tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1194
        } else {
1195
            TCGv tmp1 = tcg_temp_new();
1196
            tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
1197
            tcg_gen_shli_i64(tmp1, tmp1, 3);
1198
            tcg_gen_neg_i64(tmp1, tmp1);
1199
            tcg_gen_andi_i64(tmp1, tmp1, 0x3f);
1200
            tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
1201
            tcg_temp_free(tmp1);
1202
        }
1203
        gen_zapnoti(cpu_ir[rc], cpu_ir[rc], byte_mask);
1204
    }
1205
}
1206

    
1207
/* EXTBL, EXTWL, EXTLL, EXTQL */
1208
static void gen_ext_l(int ra, int rb, int rc, int islit,
1209
                      uint8_t lit, uint8_t byte_mask)
1210
{
1211
    if (unlikely(rc == 31))
1212
        return;
1213
    else if (unlikely(ra == 31))
1214
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1215
    else {
1216
        if (islit) {
1217
            tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
1218
        } else {
1219
            TCGv tmp = tcg_temp_new();
1220
            tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
1221
            tcg_gen_shli_i64(tmp, tmp, 3);
1222
            tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1223
            tcg_temp_free(tmp);
1224
        }
1225
        gen_zapnoti(cpu_ir[rc], cpu_ir[rc], byte_mask);
1226
    }
1227
}
1228

    
1229
/* INSWH, INSLH, INSQH */
1230
static void gen_ins_h(int ra, int rb, int rc, int islit,
1231
                      uint8_t lit, uint8_t byte_mask)
1232
{
1233
    if (unlikely(rc == 31))
1234
        return;
1235
    else if (unlikely(ra == 31) || (islit && (lit & 7) == 0))
1236
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1237
    else {
1238
        TCGv tmp = tcg_temp_new();
1239

    
1240
        /* The instruction description has us left-shift the byte mask
1241
           and extract bits <15:8> and apply that zap at the end.  This
1242
           is equivalent to simply performing the zap first and shifting
1243
           afterward.  */
1244
        gen_zapnoti (tmp, cpu_ir[ra], byte_mask);
1245

    
1246
        if (islit) {
1247
            /* Note that we have handled the lit==0 case above.  */
1248
            tcg_gen_shri_i64 (cpu_ir[rc], tmp, 64 - (lit & 7) * 8);
1249
        } else {
1250
            TCGv shift = tcg_temp_new();
1251

    
1252
            /* If (B & 7) == 0, we need to shift by 64 and leave a zero.
1253
               Do this portably by splitting the shift into two parts:
1254
               shift_count-1 and 1.  Arrange for the -1 by using
1255
               ones-complement instead of twos-complement in the negation:
1256
               ~((B & 7) * 8) & 63.  */
1257

    
1258
            tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
1259
            tcg_gen_shli_i64(shift, shift, 3);
1260
            tcg_gen_not_i64(shift, shift);
1261
            tcg_gen_andi_i64(shift, shift, 0x3f);
1262

    
1263
            tcg_gen_shr_i64(cpu_ir[rc], tmp, shift);
1264
            tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[rc], 1);
1265
            tcg_temp_free(shift);
1266
        }
1267
        tcg_temp_free(tmp);
1268
    }
1269
}
1270

    
1271
/* INSBL, INSWL, INSLL, INSQL */
1272
static void gen_ins_l(int ra, int rb, int rc, int islit,
1273
                      uint8_t lit, uint8_t byte_mask)
1274
{
1275
    if (unlikely(rc == 31))
1276
        return;
1277
    else if (unlikely(ra == 31))
1278
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1279
    else {
1280
        TCGv tmp = tcg_temp_new();
1281

    
1282
        /* The instruction description has us left-shift the byte mask
1283
           the same number of byte slots as the data and apply the zap
1284
           at the end.  This is equivalent to simply performing the zap
1285
           first and shifting afterward.  */
1286
        gen_zapnoti (tmp, cpu_ir[ra], byte_mask);
1287

    
1288
        if (islit) {
1289
            tcg_gen_shli_i64(cpu_ir[rc], tmp, (lit & 7) * 8);
1290
        } else {
1291
            TCGv shift = tcg_temp_new();
1292
            tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
1293
            tcg_gen_shli_i64(shift, shift, 3);
1294
            tcg_gen_shl_i64(cpu_ir[rc], tmp, shift);
1295
            tcg_temp_free(shift);
1296
        }
1297
        tcg_temp_free(tmp);
1298
    }
1299
}
1300

    
1301
/* MSKWH, MSKLH, MSKQH */
1302
static void gen_msk_h(int ra, int rb, int rc, int islit,
1303
                      uint8_t lit, uint8_t byte_mask)
1304
{
1305
    if (unlikely(rc == 31))
1306
        return;
1307
    else if (unlikely(ra == 31))
1308
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1309
    else if (islit) {
1310
        gen_zapnoti (cpu_ir[rc], cpu_ir[ra], ~((byte_mask << (lit & 7)) >> 8));
1311
    } else {
1312
        TCGv shift = tcg_temp_new();
1313
        TCGv mask = tcg_temp_new();
1314

    
1315
        /* The instruction description is as above, where the byte_mask
1316
           is shifted left, and then we extract bits <15:8>.  This can be
1317
           emulated with a right-shift on the expanded byte mask.  This
1318
           requires extra care because for an input <2:0> == 0 we need a
1319
           shift of 64 bits in order to generate a zero.  This is done by
1320
           splitting the shift into two parts, the variable shift - 1
1321
           followed by a constant 1 shift.  The code we expand below is
1322
           equivalent to ~((B & 7) * 8) & 63.  */
1323

    
1324
        tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
1325
        tcg_gen_shli_i64(shift, shift, 3);
1326
        tcg_gen_not_i64(shift, shift);
1327
        tcg_gen_andi_i64(shift, shift, 0x3f);
1328
        tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1329
        tcg_gen_shr_i64(mask, mask, shift);
1330
        tcg_gen_shri_i64(mask, mask, 1);
1331

    
1332
        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], mask);
1333

    
1334
        tcg_temp_free(mask);
1335
        tcg_temp_free(shift);
1336
    }
1337
}
1338

    
1339
/* MSKBL, MSKWL, MSKLL, MSKQL */
1340
static void gen_msk_l(int ra, int rb, int rc, int islit,
1341
                      uint8_t lit, uint8_t byte_mask)
1342
{
1343
    if (unlikely(rc == 31))
1344
        return;
1345
    else if (unlikely(ra == 31))
1346
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1347
    else if (islit) {
1348
        gen_zapnoti (cpu_ir[rc], cpu_ir[ra], ~(byte_mask << (lit & 7)));
1349
    } else {
1350
        TCGv shift = tcg_temp_new();
1351
        TCGv mask = tcg_temp_new();
1352

    
1353
        tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
1354
        tcg_gen_shli_i64(shift, shift, 3);
1355
        tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1356
        tcg_gen_shl_i64(mask, mask, shift);
1357

    
1358
        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], mask);
1359

    
1360
        tcg_temp_free(mask);
1361
        tcg_temp_free(shift);
1362
    }
1363
}
1364

    
1365
/* Code to call arith3 helpers */
1366
#define ARITH3(name)                                                  \
1367
static inline void glue(gen_, name)(int ra, int rb, int rc, int islit,\
1368
                                    uint8_t lit)                      \
1369
{                                                                     \
1370
    if (unlikely(rc == 31))                                           \
1371
        return;                                                       \
1372
                                                                      \
1373
    if (ra != 31) {                                                   \
1374
        if (islit) {                                                  \
1375
            TCGv tmp = tcg_const_i64(lit);                            \
1376
            gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp);         \
1377
            tcg_temp_free(tmp);                                       \
1378
        } else                                                        \
1379
            gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
1380
    } else {                                                          \
1381
        TCGv tmp1 = tcg_const_i64(0);                                 \
1382
        if (islit) {                                                  \
1383
            TCGv tmp2 = tcg_const_i64(lit);                           \
1384
            gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2);             \
1385
            tcg_temp_free(tmp2);                                      \
1386
        } else                                                        \
1387
            gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]);       \
1388
        tcg_temp_free(tmp1);                                          \
1389
    }                                                                 \
1390
}
1391
ARITH3(cmpbge)
1392
ARITH3(addlv)
1393
ARITH3(sublv)
1394
ARITH3(addqv)
1395
ARITH3(subqv)
1396
ARITH3(umulh)
1397
ARITH3(mullv)
1398
ARITH3(mulqv)
1399
ARITH3(minub8)
1400
ARITH3(minsb8)
1401
ARITH3(minuw4)
1402
ARITH3(minsw4)
1403
ARITH3(maxub8)
1404
ARITH3(maxsb8)
1405
ARITH3(maxuw4)
1406
ARITH3(maxsw4)
1407
ARITH3(perr)
1408

    
1409
#define MVIOP2(name)                                    \
1410
static inline void glue(gen_, name)(int rb, int rc)     \
1411
{                                                       \
1412
    if (unlikely(rc == 31))                             \
1413
        return;                                         \
1414
    if (unlikely(rb == 31))                             \
1415
        tcg_gen_movi_i64(cpu_ir[rc], 0);                \
1416
    else                                                \
1417
        gen_helper_ ## name (cpu_ir[rc], cpu_ir[rb]);   \
1418
}
1419
MVIOP2(pklb)
1420
MVIOP2(pkwb)
1421
MVIOP2(unpkbl)
1422
MVIOP2(unpkbw)
1423

    
1424
static void gen_cmp(TCGCond cond, int ra, int rb, int rc,
1425
                    int islit, uint8_t lit)
1426
{
1427
    TCGv va, vb;
1428

    
1429
    if (unlikely(rc == 31)) {
1430
        return;
1431
    }
1432

    
1433
    if (ra == 31) {
1434
        va = tcg_const_i64(0);
1435
    } else {
1436
        va = cpu_ir[ra];
1437
    }
1438
    if (islit) {
1439
        vb = tcg_const_i64(lit);
1440
    } else {
1441
        vb = cpu_ir[rb];
1442
    }
1443

    
1444
    tcg_gen_setcond_i64(cond, cpu_ir[rc], va, vb);
1445

    
1446
    if (ra == 31) {
1447
        tcg_temp_free(va);
1448
    }
1449
    if (islit) {
1450
        tcg_temp_free(vb);
1451
    }
1452
}
1453

    
1454
static void gen_rx(int ra, int set)
1455
{
1456
    TCGv_i32 tmp;
1457

    
1458
    if (ra != 31) {
1459
        tcg_gen_ld8u_i64(cpu_ir[ra], cpu_env, offsetof(CPUState, intr_flag));
1460
    }
1461

    
1462
    tmp = tcg_const_i32(set);
1463
    tcg_gen_st8_i32(tmp, cpu_env, offsetof(CPUState, intr_flag));
1464
    tcg_temp_free_i32(tmp);
1465
}
1466

    
1467
static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
1468
{
1469
    uint32_t palcode;
1470
    int32_t disp21, disp16, disp12;
1471
    uint16_t fn11;
1472
    uint8_t opc, ra, rb, rc, fpfn, fn7, fn2, islit, real_islit;
1473
    uint8_t lit;
1474
    ExitStatus ret;
1475

    
1476
    /* Decode all instruction fields */
1477
    opc = insn >> 26;
1478
    ra = (insn >> 21) & 0x1F;
1479
    rb = (insn >> 16) & 0x1F;
1480
    rc = insn & 0x1F;
1481
    real_islit = islit = (insn >> 12) & 1;
1482
    if (rb == 31 && !islit) {
1483
        islit = 1;
1484
        lit = 0;
1485
    } else
1486
        lit = (insn >> 13) & 0xFF;
1487
    palcode = insn & 0x03FFFFFF;
1488
    disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
1489
    disp16 = (int16_t)(insn & 0x0000FFFF);
1490
    disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
1491
    fn11 = (insn >> 5) & 0x000007FF;
1492
    fpfn = fn11 & 0x3F;
1493
    fn7 = (insn >> 5) & 0x0000007F;
1494
    fn2 = (insn >> 5) & 0x00000003;
1495
    LOG_DISAS("opc %02x ra %2d rb %2d rc %2d disp16 %6d\n",
1496
              opc, ra, rb, rc, disp16);
1497

    
1498
    ret = NO_EXIT;
1499
    switch (opc) {
1500
    case 0x00:
1501
        /* CALL_PAL */
1502
#ifdef CONFIG_USER_ONLY
1503
        if (palcode == 0x9E) {
1504
            /* RDUNIQUE */
1505
            tcg_gen_mov_i64(cpu_ir[IR_V0], cpu_uniq);
1506
            break;
1507
        } else if (palcode == 0x9F) {
1508
            /* WRUNIQUE */
1509
            tcg_gen_mov_i64(cpu_uniq, cpu_ir[IR_A0]);
1510
            break;
1511
        }
1512
#endif
1513
        if (palcode >= 0x80 && palcode < 0xC0) {
1514
            /* Unprivileged PAL call */
1515
            ret = gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x3F) << 6), 0);
1516
            break;
1517
        }
1518
#ifndef CONFIG_USER_ONLY
1519
        if (palcode < 0x40) {
1520
            /* Privileged PAL code */
1521
            if (ctx->mem_idx & 1)
1522
                goto invalid_opc;
1523
            ret = gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x3F) << 6), 0);
1524
        }
1525
#endif
1526
        /* Invalid PAL call */
1527
        goto invalid_opc;
1528
    case 0x01:
1529
        /* OPC01 */
1530
        goto invalid_opc;
1531
    case 0x02:
1532
        /* OPC02 */
1533
        goto invalid_opc;
1534
    case 0x03:
1535
        /* OPC03 */
1536
        goto invalid_opc;
1537
    case 0x04:
1538
        /* OPC04 */
1539
        goto invalid_opc;
1540
    case 0x05:
1541
        /* OPC05 */
1542
        goto invalid_opc;
1543
    case 0x06:
1544
        /* OPC06 */
1545
        goto invalid_opc;
1546
    case 0x07:
1547
        /* OPC07 */
1548
        goto invalid_opc;
1549
    case 0x08:
1550
        /* LDA */
1551
        if (likely(ra != 31)) {
1552
            if (rb != 31)
1553
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
1554
            else
1555
                tcg_gen_movi_i64(cpu_ir[ra], disp16);
1556
        }
1557
        break;
1558
    case 0x09:
1559
        /* LDAH */
1560
        if (likely(ra != 31)) {
1561
            if (rb != 31)
1562
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
1563
            else
1564
                tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
1565
        }
1566
        break;
1567
    case 0x0A:
1568
        /* LDBU */
1569
        if (!(ctx->amask & AMASK_BWX))
1570
            goto invalid_opc;
1571
        gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
1572
        break;
1573
    case 0x0B:
1574
        /* LDQ_U */
1575
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
1576
        break;
1577
    case 0x0C:
1578
        /* LDWU */
1579
        if (!(ctx->amask & AMASK_BWX))
1580
            goto invalid_opc;
1581
        gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
1582
        break;
1583
    case 0x0D:
1584
        /* STW */
1585
        gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
1586
        break;
1587
    case 0x0E:
1588
        /* STB */
1589
        gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
1590
        break;
1591
    case 0x0F:
1592
        /* STQ_U */
1593
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
1594
        break;
1595
    case 0x10:
1596
        switch (fn7) {
1597
        case 0x00:
1598
            /* ADDL */
1599
            if (likely(rc != 31)) {
1600
                if (ra != 31) {
1601
                    if (islit) {
1602
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1603
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1604
                    } else {
1605
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1606
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1607
                    }
1608
                } else {
1609
                    if (islit)
1610
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1611
                    else
1612
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
1613
                }
1614
            }
1615
            break;
1616
        case 0x02:
1617
            /* S4ADDL */
1618
            if (likely(rc != 31)) {
1619
                if (ra != 31) {
1620
                    TCGv tmp = tcg_temp_new();
1621
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1622
                    if (islit)
1623
                        tcg_gen_addi_i64(tmp, tmp, lit);
1624
                    else
1625
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
1626
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1627
                    tcg_temp_free(tmp);
1628
                } else {
1629
                    if (islit)
1630
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1631
                    else
1632
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
1633
                }
1634
            }
1635
            break;
1636
        case 0x09:
1637
            /* SUBL */
1638
            if (likely(rc != 31)) {
1639
                if (ra != 31) {
1640
                    if (islit)
1641
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1642
                    else
1643
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1644
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1645
                } else {
1646
                    if (islit)
1647
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1648
                    else {
1649
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1650
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1651
                }
1652
            }
1653
            break;
1654
        case 0x0B:
1655
            /* S4SUBL */
1656
            if (likely(rc != 31)) {
1657
                if (ra != 31) {
1658
                    TCGv tmp = tcg_temp_new();
1659
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1660
                    if (islit)
1661
                        tcg_gen_subi_i64(tmp, tmp, lit);
1662
                    else
1663
                        tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
1664
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1665
                    tcg_temp_free(tmp);
1666
                } else {
1667
                    if (islit)
1668
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1669
                    else {
1670
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1671
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1672
                    }
1673
                }
1674
            }
1675
            break;
1676
        case 0x0F:
1677
            /* CMPBGE */
1678
            gen_cmpbge(ra, rb, rc, islit, lit);
1679
            break;
1680
        case 0x12:
1681
            /* S8ADDL */
1682
            if (likely(rc != 31)) {
1683
                if (ra != 31) {
1684
                    TCGv tmp = tcg_temp_new();
1685
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1686
                    if (islit)
1687
                        tcg_gen_addi_i64(tmp, tmp, lit);
1688
                    else
1689
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
1690
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1691
                    tcg_temp_free(tmp);
1692
                } else {
1693
                    if (islit)
1694
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1695
                    else
1696
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
1697
                }
1698
            }
1699
            break;
1700
        case 0x1B:
1701
            /* S8SUBL */
1702
            if (likely(rc != 31)) {
1703
                if (ra != 31) {
1704
                    TCGv tmp = tcg_temp_new();
1705
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1706
                    if (islit)
1707
                        tcg_gen_subi_i64(tmp, tmp, lit);
1708
                    else
1709
                       tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
1710
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1711
                    tcg_temp_free(tmp);
1712
                } else {
1713
                    if (islit)
1714
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1715
                    else
1716
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1717
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1718
                    }
1719
                }
1720
            }
1721
            break;
1722
        case 0x1D:
1723
            /* CMPULT */
1724
            gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
1725
            break;
1726
        case 0x20:
1727
            /* ADDQ */
1728
            if (likely(rc != 31)) {
1729
                if (ra != 31) {
1730
                    if (islit)
1731
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1732
                    else
1733
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1734
                } else {
1735
                    if (islit)
1736
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1737
                    else
1738
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1739
                }
1740
            }
1741
            break;
1742
        case 0x22:
1743
            /* S4ADDQ */
1744
            if (likely(rc != 31)) {
1745
                if (ra != 31) {
1746
                    TCGv tmp = tcg_temp_new();
1747
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1748
                    if (islit)
1749
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
1750
                    else
1751
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1752
                    tcg_temp_free(tmp);
1753
                } else {
1754
                    if (islit)
1755
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1756
                    else
1757
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1758
                }
1759
            }
1760
            break;
1761
        case 0x29:
1762
            /* SUBQ */
1763
            if (likely(rc != 31)) {
1764
                if (ra != 31) {
1765
                    if (islit)
1766
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1767
                    else
1768
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1769
                } else {
1770
                    if (islit)
1771
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1772
                    else
1773
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1774
                }
1775
            }
1776
            break;
1777
        case 0x2B:
1778
            /* S4SUBQ */
1779
            if (likely(rc != 31)) {
1780
                if (ra != 31) {
1781
                    TCGv tmp = tcg_temp_new();
1782
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1783
                    if (islit)
1784
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1785
                    else
1786
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1787
                    tcg_temp_free(tmp);
1788
                } else {
1789
                    if (islit)
1790
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1791
                    else
1792
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1793
                }
1794
            }
1795
            break;
1796
        case 0x2D:
1797
            /* CMPEQ */
1798
            gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
1799
            break;
1800
        case 0x32:
1801
            /* S8ADDQ */
1802
            if (likely(rc != 31)) {
1803
                if (ra != 31) {
1804
                    TCGv tmp = tcg_temp_new();
1805
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1806
                    if (islit)
1807
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
1808
                    else
1809
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1810
                    tcg_temp_free(tmp);
1811
                } else {
1812
                    if (islit)
1813
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1814
                    else
1815
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1816
                }
1817
            }
1818
            break;
1819
        case 0x3B:
1820
            /* S8SUBQ */
1821
            if (likely(rc != 31)) {
1822
                if (ra != 31) {
1823
                    TCGv tmp = tcg_temp_new();
1824
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1825
                    if (islit)
1826
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1827
                    else
1828
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1829
                    tcg_temp_free(tmp);
1830
                } else {
1831
                    if (islit)
1832
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1833
                    else
1834
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1835
                }
1836
            }
1837
            break;
1838
        case 0x3D:
1839
            /* CMPULE */
1840
            gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1841
            break;
1842
        case 0x40:
1843
            /* ADDL/V */
1844
            gen_addlv(ra, rb, rc, islit, lit);
1845
            break;
1846
        case 0x49:
1847
            /* SUBL/V */
1848
            gen_sublv(ra, rb, rc, islit, lit);
1849
            break;
1850
        case 0x4D:
1851
            /* CMPLT */
1852
            gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1853
            break;
1854
        case 0x60:
1855
            /* ADDQ/V */
1856
            gen_addqv(ra, rb, rc, islit, lit);
1857
            break;
1858
        case 0x69:
1859
            /* SUBQ/V */
1860
            gen_subqv(ra, rb, rc, islit, lit);
1861
            break;
1862
        case 0x6D:
1863
            /* CMPLE */
1864
            gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1865
            break;
1866
        default:
1867
            goto invalid_opc;
1868
        }
1869
        break;
1870
    case 0x11:
1871
        switch (fn7) {
1872
        case 0x00:
1873
            /* AND */
1874
            if (likely(rc != 31)) {
1875
                if (ra == 31)
1876
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1877
                else if (islit)
1878
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1879
                else
1880
                    tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1881
            }
1882
            break;
1883
        case 0x08:
1884
            /* BIC */
1885
            if (likely(rc != 31)) {
1886
                if (ra != 31) {
1887
                    if (islit)
1888
                        tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1889
                    else
1890
                        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1891
                } else
1892
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1893
            }
1894
            break;
1895
        case 0x14:
1896
            /* CMOVLBS */
1897
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1898
            break;
1899
        case 0x16:
1900
            /* CMOVLBC */
1901
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1902
            break;
1903
        case 0x20:
1904
            /* BIS */
1905
            if (likely(rc != 31)) {
1906
                if (ra != 31) {
1907
                    if (islit)
1908
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1909
                    else
1910
                        tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1911
                } else {
1912
                    if (islit)
1913
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1914
                    else
1915
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1916
                }
1917
            }
1918
            break;
1919
        case 0x24:
1920
            /* CMOVEQ */
1921
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1922
            break;
1923
        case 0x26:
1924
            /* CMOVNE */
1925
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1926
            break;
1927
        case 0x28:
1928
            /* ORNOT */
1929
            if (likely(rc != 31)) {
1930
                if (ra != 31) {
1931
                    if (islit)
1932
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1933
                    else
1934
                        tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1935
                } else {
1936
                    if (islit)
1937
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1938
                    else
1939
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1940
                }
1941
            }
1942
            break;
1943
        case 0x40:
1944
            /* XOR */
1945
            if (likely(rc != 31)) {
1946
                if (ra != 31) {
1947
                    if (islit)
1948
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1949
                    else
1950
                        tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1951
                } else {
1952
                    if (islit)
1953
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1954
                    else
1955
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1956
                }
1957
            }
1958
            break;
1959
        case 0x44:
1960
            /* CMOVLT */
1961
            gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1962
            break;
1963
        case 0x46:
1964
            /* CMOVGE */
1965
            gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1966
            break;
1967
        case 0x48:
1968
            /* EQV */
1969
            if (likely(rc != 31)) {
1970
                if (ra != 31) {
1971
                    if (islit)
1972
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1973
                    else
1974
                        tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1975
                } else {
1976
                    if (islit)
1977
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1978
                    else
1979
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1980
                }
1981
            }
1982
            break;
1983
        case 0x61:
1984
            /* AMASK */
1985
            if (likely(rc != 31)) {
1986
                if (islit)
1987
                    tcg_gen_movi_i64(cpu_ir[rc], lit);
1988
                else
1989
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1990
                switch (ctx->env->implver) {
1991
                case IMPLVER_2106x:
1992
                    /* EV4, EV45, LCA, LCA45 & EV5 */
1993
                    break;
1994
                case IMPLVER_21164:
1995
                case IMPLVER_21264:
1996
                case IMPLVER_21364:
1997
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[rc],
1998
                                     ~(uint64_t)ctx->amask);
1999
                    break;
2000
                }
2001
            }
2002
            break;
2003
        case 0x64:
2004
            /* CMOVLE */
2005
            gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
2006
            break;
2007
        case 0x66:
2008
            /* CMOVGT */
2009
            gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
2010
            break;
2011
        case 0x6C:
2012
            /* IMPLVER */
2013
            if (rc != 31)
2014
                tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
2015
            break;
2016
        default:
2017
            goto invalid_opc;
2018
        }
2019
        break;
2020
    case 0x12:
2021
        switch (fn7) {
2022
        case 0x02:
2023
            /* MSKBL */
2024
            gen_msk_l(ra, rb, rc, islit, lit, 0x01);
2025
            break;
2026
        case 0x06:
2027
            /* EXTBL */
2028
            gen_ext_l(ra, rb, rc, islit, lit, 0x01);
2029
            break;
2030
        case 0x0B:
2031
            /* INSBL */
2032
            gen_ins_l(ra, rb, rc, islit, lit, 0x01);
2033
            break;
2034
        case 0x12:
2035
            /* MSKWL */
2036
            gen_msk_l(ra, rb, rc, islit, lit, 0x03);
2037
            break;
2038
        case 0x16:
2039
            /* EXTWL */
2040
            gen_ext_l(ra, rb, rc, islit, lit, 0x03);
2041
            break;
2042
        case 0x1B:
2043
            /* INSWL */
2044
            gen_ins_l(ra, rb, rc, islit, lit, 0x03);
2045
            break;
2046
        case 0x22:
2047
            /* MSKLL */
2048
            gen_msk_l(ra, rb, rc, islit, lit, 0x0f);
2049
            break;
2050
        case 0x26:
2051
            /* EXTLL */
2052
            gen_ext_l(ra, rb, rc, islit, lit, 0x0f);
2053
            break;
2054
        case 0x2B:
2055
            /* INSLL */
2056
            gen_ins_l(ra, rb, rc, islit, lit, 0x0f);
2057
            break;
2058
        case 0x30:
2059
            /* ZAP */
2060
            gen_zap(ra, rb, rc, islit, lit);
2061
            break;
2062
        case 0x31:
2063
            /* ZAPNOT */
2064
            gen_zapnot(ra, rb, rc, islit, lit);
2065
            break;
2066
        case 0x32:
2067
            /* MSKQL */
2068
            gen_msk_l(ra, rb, rc, islit, lit, 0xff);
2069
            break;
2070
        case 0x34:
2071
            /* SRL */
2072
            if (likely(rc != 31)) {
2073
                if (ra != 31) {
2074
                    if (islit)
2075
                        tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
2076
                    else {
2077
                        TCGv shift = tcg_temp_new();
2078
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
2079
                        tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
2080
                        tcg_temp_free(shift);
2081
                    }
2082
                } else
2083
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2084
            }
2085
            break;
2086
        case 0x36:
2087
            /* EXTQL */
2088
            gen_ext_l(ra, rb, rc, islit, lit, 0xff);
2089
            break;
2090
        case 0x39:
2091
            /* SLL */
2092
            if (likely(rc != 31)) {
2093
                if (ra != 31) {
2094
                    if (islit)
2095
                        tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
2096
                    else {
2097
                        TCGv shift = tcg_temp_new();
2098
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
2099
                        tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
2100
                        tcg_temp_free(shift);
2101
                    }
2102
                } else
2103
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2104
            }
2105
            break;
2106
        case 0x3B:
2107
            /* INSQL */
2108
            gen_ins_l(ra, rb, rc, islit, lit, 0xff);
2109
            break;
2110
        case 0x3C:
2111
            /* SRA */
2112
            if (likely(rc != 31)) {
2113
                if (ra != 31) {
2114
                    if (islit)
2115
                        tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
2116
                    else {
2117
                        TCGv shift = tcg_temp_new();
2118
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
2119
                        tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
2120
                        tcg_temp_free(shift);
2121
                    }
2122
                } else
2123
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2124
            }
2125
            break;
2126
        case 0x52:
2127
            /* MSKWH */
2128
            gen_msk_h(ra, rb, rc, islit, lit, 0x03);
2129
            break;
2130
        case 0x57:
2131
            /* INSWH */
2132
            gen_ins_h(ra, rb, rc, islit, lit, 0x03);
2133
            break;
2134
        case 0x5A:
2135
            /* EXTWH */
2136
            gen_ext_h(ra, rb, rc, islit, lit, 0x03);
2137
            break;
2138
        case 0x62:
2139
            /* MSKLH */
2140
            gen_msk_h(ra, rb, rc, islit, lit, 0x0f);
2141
            break;
2142
        case 0x67:
2143
            /* INSLH */
2144
            gen_ins_h(ra, rb, rc, islit, lit, 0x0f);
2145
            break;
2146
        case 0x6A:
2147
            /* EXTLH */
2148
            gen_ext_h(ra, rb, rc, islit, lit, 0x0f);
2149
            break;
2150
        case 0x72:
2151
            /* MSKQH */
2152
            gen_msk_h(ra, rb, rc, islit, lit, 0xff);
2153
            break;
2154
        case 0x77:
2155
            /* INSQH */
2156
            gen_ins_h(ra, rb, rc, islit, lit, 0xff);
2157
            break;
2158
        case 0x7A:
2159
            /* EXTQH */
2160
            gen_ext_h(ra, rb, rc, islit, lit, 0xff);
2161
            break;
2162
        default:
2163
            goto invalid_opc;
2164
        }
2165
        break;
2166
    case 0x13:
2167
        switch (fn7) {
2168
        case 0x00:
2169
            /* MULL */
2170
            if (likely(rc != 31)) {
2171
                if (ra == 31)
2172
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2173
                else {
2174
                    if (islit)
2175
                        tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
2176
                    else
2177
                        tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
2178
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
2179
                }
2180
            }
2181
            break;
2182
        case 0x20:
2183
            /* MULQ */
2184
            if (likely(rc != 31)) {
2185
                if (ra == 31)
2186
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2187
                else if (islit)
2188
                    tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
2189
                else
2190
                    tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
2191
            }
2192
            break;
2193
        case 0x30:
2194
            /* UMULH */
2195
            gen_umulh(ra, rb, rc, islit, lit);
2196
            break;
2197
        case 0x40:
2198
            /* MULL/V */
2199
            gen_mullv(ra, rb, rc, islit, lit);
2200
            break;
2201
        case 0x60:
2202
            /* MULQ/V */
2203
            gen_mulqv(ra, rb, rc, islit, lit);
2204
            break;
2205
        default:
2206
            goto invalid_opc;
2207
        }
2208
        break;
2209
    case 0x14:
2210
        switch (fpfn) { /* fn11 & 0x3F */
2211
        case 0x04:
2212
            /* ITOFS */
2213
            if (!(ctx->amask & AMASK_FIX))
2214
                goto invalid_opc;
2215
            if (likely(rc != 31)) {
2216
                if (ra != 31) {
2217
                    TCGv_i32 tmp = tcg_temp_new_i32();
2218
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
2219
                    gen_helper_memory_to_s(cpu_fir[rc], tmp);
2220
                    tcg_temp_free_i32(tmp);
2221
                } else
2222
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
2223
            }
2224
            break;
2225
        case 0x0A:
2226
            /* SQRTF */
2227
            if (!(ctx->amask & AMASK_FIX))
2228
                goto invalid_opc;
2229
            gen_fsqrtf(rb, rc);
2230
            break;
2231
        case 0x0B:
2232
            /* SQRTS */
2233
            if (!(ctx->amask & AMASK_FIX))
2234
                goto invalid_opc;
2235
            gen_fsqrts(ctx, rb, rc, fn11);
2236
            break;
2237
        case 0x14:
2238
            /* ITOFF */
2239
            if (!(ctx->amask & AMASK_FIX))
2240
                goto invalid_opc;
2241
            if (likely(rc != 31)) {
2242
                if (ra != 31) {
2243
                    TCGv_i32 tmp = tcg_temp_new_i32();
2244
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
2245
                    gen_helper_memory_to_f(cpu_fir[rc], tmp);
2246
                    tcg_temp_free_i32(tmp);
2247
                } else
2248
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
2249
            }
2250
            break;
2251
        case 0x24:
2252
            /* ITOFT */
2253
            if (!(ctx->amask & AMASK_FIX))
2254
                goto invalid_opc;
2255
            if (likely(rc != 31)) {
2256
                if (ra != 31)
2257
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
2258
                else
2259
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
2260
            }
2261
            break;
2262
        case 0x2A:
2263
            /* SQRTG */
2264
            if (!(ctx->amask & AMASK_FIX))
2265
                goto invalid_opc;
2266
            gen_fsqrtg(rb, rc);
2267
            break;
2268
        case 0x02B:
2269
            /* SQRTT */
2270
            if (!(ctx->amask & AMASK_FIX))
2271
                goto invalid_opc;
2272
            gen_fsqrtt(ctx, rb, rc, fn11);
2273
            break;
2274
        default:
2275
            goto invalid_opc;
2276
        }
2277
        break;
2278
    case 0x15:
2279
        /* VAX floating point */
2280
        /* XXX: rounding mode and trap are ignored (!) */
2281
        switch (fpfn) { /* fn11 & 0x3F */
2282
        case 0x00:
2283
            /* ADDF */
2284
            gen_faddf(ra, rb, rc);
2285
            break;
2286
        case 0x01:
2287
            /* SUBF */
2288
            gen_fsubf(ra, rb, rc);
2289
            break;
2290
        case 0x02:
2291
            /* MULF */
2292
            gen_fmulf(ra, rb, rc);
2293
            break;
2294
        case 0x03:
2295
            /* DIVF */
2296
            gen_fdivf(ra, rb, rc);
2297
            break;
2298
        case 0x1E:
2299
            /* CVTDG */
2300
#if 0 // TODO
2301
            gen_fcvtdg(rb, rc);
2302
#else
2303
            goto invalid_opc;
2304
#endif
2305
            break;
2306
        case 0x20:
2307
            /* ADDG */
2308
            gen_faddg(ra, rb, rc);
2309
            break;
2310
        case 0x21:
2311
            /* SUBG */
2312
            gen_fsubg(ra, rb, rc);
2313
            break;
2314
        case 0x22:
2315
            /* MULG */
2316
            gen_fmulg(ra, rb, rc);
2317
            break;
2318
        case 0x23:
2319
            /* DIVG */
2320
            gen_fdivg(ra, rb, rc);
2321
            break;
2322
        case 0x25:
2323
            /* CMPGEQ */
2324
            gen_fcmpgeq(ra, rb, rc);
2325
            break;
2326
        case 0x26:
2327
            /* CMPGLT */
2328
            gen_fcmpglt(ra, rb, rc);
2329
            break;
2330
        case 0x27:
2331
            /* CMPGLE */
2332
            gen_fcmpgle(ra, rb, rc);
2333
            break;
2334
        case 0x2C:
2335
            /* CVTGF */
2336
            gen_fcvtgf(rb, rc);
2337
            break;
2338
        case 0x2D:
2339
            /* CVTGD */
2340
#if 0 // TODO
2341
            gen_fcvtgd(rb, rc);
2342
#else
2343
            goto invalid_opc;
2344
#endif
2345
            break;
2346
        case 0x2F:
2347
            /* CVTGQ */
2348
            gen_fcvtgq(rb, rc);
2349
            break;
2350
        case 0x3C:
2351
            /* CVTQF */
2352
            gen_fcvtqf(rb, rc);
2353
            break;
2354
        case 0x3E:
2355
            /* CVTQG */
2356
            gen_fcvtqg(rb, rc);
2357
            break;
2358
        default:
2359
            goto invalid_opc;
2360
        }
2361
        break;
2362
    case 0x16:
2363
        /* IEEE floating-point */
2364
        switch (fpfn) { /* fn11 & 0x3F */
2365
        case 0x00:
2366
            /* ADDS */
2367
            gen_fadds(ctx, ra, rb, rc, fn11);
2368
            break;
2369
        case 0x01:
2370
            /* SUBS */
2371
            gen_fsubs(ctx, ra, rb, rc, fn11);
2372
            break;
2373
        case 0x02:
2374
            /* MULS */
2375
            gen_fmuls(ctx, ra, rb, rc, fn11);
2376
            break;
2377
        case 0x03:
2378
            /* DIVS */
2379
            gen_fdivs(ctx, ra, rb, rc, fn11);
2380
            break;
2381
        case 0x20:
2382
            /* ADDT */
2383
            gen_faddt(ctx, ra, rb, rc, fn11);
2384
            break;
2385
        case 0x21:
2386
            /* SUBT */
2387
            gen_fsubt(ctx, ra, rb, rc, fn11);
2388
            break;
2389
        case 0x22:
2390
            /* MULT */
2391
            gen_fmult(ctx, ra, rb, rc, fn11);
2392
            break;
2393
        case 0x23:
2394
            /* DIVT */
2395
            gen_fdivt(ctx, ra, rb, rc, fn11);
2396
            break;
2397
        case 0x24:
2398
            /* CMPTUN */
2399
            gen_fcmptun(ctx, ra, rb, rc, fn11);
2400
            break;
2401
        case 0x25:
2402
            /* CMPTEQ */
2403
            gen_fcmpteq(ctx, ra, rb, rc, fn11);
2404
            break;
2405
        case 0x26:
2406
            /* CMPTLT */
2407
            gen_fcmptlt(ctx, ra, rb, rc, fn11);
2408
            break;
2409
        case 0x27:
2410
            /* CMPTLE */
2411
            gen_fcmptle(ctx, ra, rb, rc, fn11);
2412
            break;
2413
        case 0x2C:
2414
            if (fn11 == 0x2AC || fn11 == 0x6AC) {
2415
                /* CVTST */
2416
                gen_fcvtst(ctx, rb, rc, fn11);
2417
            } else {
2418
                /* CVTTS */
2419
                gen_fcvtts(ctx, rb, rc, fn11);
2420
            }
2421
            break;
2422
        case 0x2F:
2423
            /* CVTTQ */
2424
            gen_fcvttq(ctx, rb, rc, fn11);
2425
            break;
2426
        case 0x3C:
2427
            /* CVTQS */
2428
            gen_fcvtqs(ctx, rb, rc, fn11);
2429
            break;
2430
        case 0x3E:
2431
            /* CVTQT */
2432
            gen_fcvtqt(ctx, rb, rc, fn11);
2433
            break;
2434
        default:
2435
            goto invalid_opc;
2436
        }
2437
        break;
2438
    case 0x17:
2439
        switch (fn11) {
2440
        case 0x010:
2441
            /* CVTLQ */
2442
            gen_fcvtlq(rb, rc);
2443
            break;
2444
        case 0x020:
2445
            if (likely(rc != 31)) {
2446
                if (ra == rb) {
2447
                    /* FMOV */
2448
                    if (ra == 31)
2449
                        tcg_gen_movi_i64(cpu_fir[rc], 0);
2450
                    else
2451
                        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
2452
                } else {
2453
                    /* CPYS */
2454
                    gen_fcpys(ra, rb, rc);
2455
                }
2456
            }
2457
            break;
2458
        case 0x021:
2459
            /* CPYSN */
2460
            gen_fcpysn(ra, rb, rc);
2461
            break;
2462
        case 0x022:
2463
            /* CPYSE */
2464
            gen_fcpyse(ra, rb, rc);
2465
            break;
2466
        case 0x024:
2467
            /* MT_FPCR */
2468
            if (likely(ra != 31))
2469
                gen_helper_store_fpcr(cpu_fir[ra]);
2470
            else {
2471
                TCGv tmp = tcg_const_i64(0);
2472
                gen_helper_store_fpcr(tmp);
2473
                tcg_temp_free(tmp);
2474
            }
2475
            break;
2476
        case 0x025:
2477
            /* MF_FPCR */
2478
            if (likely(ra != 31))
2479
                gen_helper_load_fpcr(cpu_fir[ra]);
2480
            break;
2481
        case 0x02A:
2482
            /* FCMOVEQ */
2483
            gen_fcmov(TCG_COND_EQ, ra, rb, rc);
2484
            break;
2485
        case 0x02B:
2486
            /* FCMOVNE */
2487
            gen_fcmov(TCG_COND_NE, ra, rb, rc);
2488
            break;
2489
        case 0x02C:
2490
            /* FCMOVLT */
2491
            gen_fcmov(TCG_COND_LT, ra, rb, rc);
2492
            break;
2493
        case 0x02D:
2494
            /* FCMOVGE */
2495
            gen_fcmov(TCG_COND_GE, ra, rb, rc);
2496
            break;
2497
        case 0x02E:
2498
            /* FCMOVLE */
2499
            gen_fcmov(TCG_COND_LE, ra, rb, rc);
2500
            break;
2501
        case 0x02F:
2502
            /* FCMOVGT */
2503
            gen_fcmov(TCG_COND_GT, ra, rb, rc);
2504
            break;
2505
        case 0x030:
2506
            /* CVTQL */
2507
            gen_fcvtql(rb, rc);
2508
            break;
2509
        case 0x130:
2510
            /* CVTQL/V */
2511
        case 0x530:
2512
            /* CVTQL/SV */
2513
            /* ??? I'm pretty sure there's nothing that /sv needs to do that
2514
               /v doesn't do.  The only thing I can think is that /sv is a
2515
               valid instruction merely for completeness in the ISA.  */
2516
            gen_fcvtql_v(ctx, rb, rc);
2517
            break;
2518
        default:
2519
            goto invalid_opc;
2520
        }
2521
        break;
2522
    case 0x18:
2523
        switch ((uint16_t)disp16) {
2524
        case 0x0000:
2525
            /* TRAPB */
2526
            /* No-op.  */
2527
            break;
2528
        case 0x0400:
2529
            /* EXCB */
2530
            /* No-op.  */
2531
            break;
2532
        case 0x4000:
2533
            /* MB */
2534
            /* No-op */
2535
            break;
2536
        case 0x4400:
2537
            /* WMB */
2538
            /* No-op */
2539
            break;
2540
        case 0x8000:
2541
            /* FETCH */
2542
            /* No-op */
2543
            break;
2544
        case 0xA000:
2545
            /* FETCH_M */
2546
            /* No-op */
2547
            break;
2548
        case 0xC000:
2549
            /* RPCC */
2550
            if (ra != 31)
2551
                gen_helper_load_pcc(cpu_ir[ra]);
2552
            break;
2553
        case 0xE000:
2554
            /* RC */
2555
            gen_rx(ra, 0);
2556
            break;
2557
        case 0xE800:
2558
            /* ECB */
2559
            break;
2560
        case 0xF000:
2561
            /* RS */
2562
            gen_rx(ra, 1);
2563
            break;
2564
        case 0xF800:
2565
            /* WH64 */
2566
            /* No-op */
2567
            break;
2568
        default:
2569
            goto invalid_opc;
2570
        }
2571
        break;
2572
    case 0x19:
2573
        /* HW_MFPR (PALcode) */
2574
#if defined (CONFIG_USER_ONLY)
2575
        goto invalid_opc;
2576
#else
2577
        if (!ctx->pal_mode)
2578
            goto invalid_opc;
2579
        if (ra != 31) {
2580
            TCGv tmp = tcg_const_i32(insn & 0xFF);
2581
            gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
2582
            tcg_temp_free(tmp);
2583
        }
2584
        break;
2585
#endif
2586
    case 0x1A:
2587
        /* JMP, JSR, RET, JSR_COROUTINE.  These only differ by the branch
2588
           prediction stack action, which of course we don't implement.  */
2589
        if (rb != 31) {
2590
            tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
2591
        } else {
2592
            tcg_gen_movi_i64(cpu_pc, 0);
2593
        }
2594
        if (ra != 31) {
2595
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2596
        }
2597
        ret = EXIT_PC_UPDATED;
2598
        break;
2599
    case 0x1B:
2600
        /* HW_LD (PALcode) */
2601
#if defined (CONFIG_USER_ONLY)
2602
        goto invalid_opc;
2603
#else
2604
        if (!ctx->pal_mode)
2605
            goto invalid_opc;
2606
        if (ra != 31) {
2607
            TCGv addr = tcg_temp_new();
2608
            if (rb != 31)
2609
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2610
            else
2611
                tcg_gen_movi_i64(addr, disp12);
2612
            switch ((insn >> 12) & 0xF) {
2613
            case 0x0:
2614
                /* Longword physical access (hw_ldl/p) */
2615
                gen_helper_ldl_raw(cpu_ir[ra], addr);
2616
                break;
2617
            case 0x1:
2618
                /* Quadword physical access (hw_ldq/p) */
2619
                gen_helper_ldq_raw(cpu_ir[ra], addr);
2620
                break;
2621
            case 0x2:
2622
                /* Longword physical access with lock (hw_ldl_l/p) */
2623
                gen_helper_ldl_l_raw(cpu_ir[ra], addr);
2624
                break;
2625
            case 0x3:
2626
                /* Quadword physical access with lock (hw_ldq_l/p) */
2627
                gen_helper_ldq_l_raw(cpu_ir[ra], addr);
2628
                break;
2629
            case 0x4:
2630
                /* Longword virtual PTE fetch (hw_ldl/v) */
2631
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
2632
                break;
2633
            case 0x5:
2634
                /* Quadword virtual PTE fetch (hw_ldq/v) */
2635
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
2636
                break;
2637
            case 0x6:
2638
                /* Incpu_ir[ra]id */
2639
                goto invalid_opc;
2640
            case 0x7:
2641
                /* Incpu_ir[ra]id */
2642
                goto invalid_opc;
2643
            case 0x8:
2644
                /* Longword virtual access (hw_ldl) */
2645
                gen_helper_st_virt_to_phys(addr, addr);
2646
                gen_helper_ldl_raw(cpu_ir[ra], addr);
2647
                break;
2648
            case 0x9:
2649
                /* Quadword virtual access (hw_ldq) */
2650
                gen_helper_st_virt_to_phys(addr, addr);
2651
                gen_helper_ldq_raw(cpu_ir[ra], addr);
2652
                break;
2653
            case 0xA:
2654
                /* Longword virtual access with protection check (hw_ldl/w) */
2655
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
2656
                break;
2657
            case 0xB:
2658
                /* Quadword virtual access with protection check (hw_ldq/w) */
2659
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
2660
                break;
2661
            case 0xC:
2662
                /* Longword virtual access with alt access mode (hw_ldl/a)*/
2663
                gen_helper_set_alt_mode();
2664
                gen_helper_st_virt_to_phys(addr, addr);
2665
                gen_helper_ldl_raw(cpu_ir[ra], addr);
2666
                gen_helper_restore_mode();
2667
                break;
2668
            case 0xD:
2669
                /* Quadword virtual access with alt access mode (hw_ldq/a) */
2670
                gen_helper_set_alt_mode();
2671
                gen_helper_st_virt_to_phys(addr, addr);
2672
                gen_helper_ldq_raw(cpu_ir[ra], addr);
2673
                gen_helper_restore_mode();
2674
                break;
2675
            case 0xE:
2676
                /* Longword virtual access with alternate access mode and
2677
                 * protection checks (hw_ldl/wa)
2678
                 */
2679
                gen_helper_set_alt_mode();
2680
                gen_helper_ldl_data(cpu_ir[ra], addr);
2681
                gen_helper_restore_mode();
2682
                break;
2683
            case 0xF:
2684
                /* Quadword virtual access with alternate access mode and
2685
                 * protection checks (hw_ldq/wa)
2686
                 */
2687
                gen_helper_set_alt_mode();
2688
                gen_helper_ldq_data(cpu_ir[ra], addr);
2689
                gen_helper_restore_mode();
2690
                break;
2691
            }
2692
            tcg_temp_free(addr);
2693
        }
2694
        break;
2695
#endif
2696
    case 0x1C:
2697
        switch (fn7) {
2698
        case 0x00:
2699
            /* SEXTB */
2700
            if (!(ctx->amask & AMASK_BWX))
2701
                goto invalid_opc;
2702
            if (likely(rc != 31)) {
2703
                if (islit)
2704
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
2705
                else
2706
                    tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
2707
            }
2708
            break;
2709
        case 0x01:
2710
            /* SEXTW */
2711
            if (!(ctx->amask & AMASK_BWX))
2712
                goto invalid_opc;
2713
            if (likely(rc != 31)) {
2714
                if (islit)
2715
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
2716
                else
2717
                    tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
2718
            }
2719
            break;
2720
        case 0x30:
2721
            /* CTPOP */
2722
            if (!(ctx->amask & AMASK_CIX))
2723
                goto invalid_opc;
2724
            if (likely(rc != 31)) {
2725
                if (islit)
2726
                    tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
2727
                else
2728
                    gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
2729
            }
2730
            break;
2731
        case 0x31:
2732
            /* PERR */
2733
            if (!(ctx->amask & AMASK_MVI))
2734
                goto invalid_opc;
2735
            gen_perr(ra, rb, rc, islit, lit);
2736
            break;
2737
        case 0x32:
2738
            /* CTLZ */
2739
            if (!(ctx->amask & AMASK_CIX))
2740
                goto invalid_opc;
2741
            if (likely(rc != 31)) {
2742
                if (islit)
2743
                    tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
2744
                else
2745
                    gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
2746
            }
2747
            break;
2748
        case 0x33:
2749
            /* CTTZ */
2750
            if (!(ctx->amask & AMASK_CIX))
2751
                goto invalid_opc;
2752
            if (likely(rc != 31)) {
2753
                if (islit)
2754
                    tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
2755
                else
2756
                    gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
2757
            }
2758
            break;
2759
        case 0x34:
2760
            /* UNPKBW */
2761
            if (!(ctx->amask & AMASK_MVI))
2762
                goto invalid_opc;
2763
            if (real_islit || ra != 31)
2764
                goto invalid_opc;
2765
            gen_unpkbw (rb, rc);
2766
            break;
2767
        case 0x35:
2768
            /* UNPKBL */
2769
            if (!(ctx->amask & AMASK_MVI))
2770
                goto invalid_opc;
2771
            if (real_islit || ra != 31)
2772
                goto invalid_opc;
2773
            gen_unpkbl (rb, rc);
2774
            break;
2775
        case 0x36:
2776
            /* PKWB */
2777
            if (!(ctx->amask & AMASK_MVI))
2778
                goto invalid_opc;
2779
            if (real_islit || ra != 31)
2780
                goto invalid_opc;
2781
            gen_pkwb (rb, rc);
2782
            break;
2783
        case 0x37:
2784
            /* PKLB */
2785
            if (!(ctx->amask & AMASK_MVI))
2786
                goto invalid_opc;
2787
            if (real_islit || ra != 31)
2788
                goto invalid_opc;
2789
            gen_pklb (rb, rc);
2790
            break;
2791
        case 0x38:
2792
            /* MINSB8 */
2793
            if (!(ctx->amask & AMASK_MVI))
2794
                goto invalid_opc;
2795
            gen_minsb8 (ra, rb, rc, islit, lit);
2796
            break;
2797
        case 0x39:
2798
            /* MINSW4 */
2799
            if (!(ctx->amask & AMASK_MVI))
2800
                goto invalid_opc;
2801
            gen_minsw4 (ra, rb, rc, islit, lit);
2802
            break;
2803
        case 0x3A:
2804
            /* MINUB8 */
2805
            if (!(ctx->amask & AMASK_MVI))
2806
                goto invalid_opc;
2807
            gen_minub8 (ra, rb, rc, islit, lit);
2808
            break;
2809
        case 0x3B:
2810
            /* MINUW4 */
2811
            if (!(ctx->amask & AMASK_MVI))
2812
                goto invalid_opc;
2813
            gen_minuw4 (ra, rb, rc, islit, lit);
2814
            break;
2815
        case 0x3C:
2816
            /* MAXUB8 */
2817
            if (!(ctx->amask & AMASK_MVI))
2818
                goto invalid_opc;
2819
            gen_maxub8 (ra, rb, rc, islit, lit);
2820
            break;
2821
        case 0x3D:
2822
            /* MAXUW4 */
2823
            if (!(ctx->amask & AMASK_MVI))
2824
                goto invalid_opc;
2825
            gen_maxuw4 (ra, rb, rc, islit, lit);
2826
            break;
2827
        case 0x3E:
2828
            /* MAXSB8 */
2829
            if (!(ctx->amask & AMASK_MVI))
2830
                goto invalid_opc;
2831
            gen_maxsb8 (ra, rb, rc, islit, lit);
2832
            break;
2833
        case 0x3F:
2834
            /* MAXSW4 */
2835
            if (!(ctx->amask & AMASK_MVI))
2836
                goto invalid_opc;
2837
            gen_maxsw4 (ra, rb, rc, islit, lit);
2838
            break;
2839
        case 0x70:
2840
            /* FTOIT */
2841
            if (!(ctx->amask & AMASK_FIX))
2842
                goto invalid_opc;
2843
            if (likely(rc != 31)) {
2844
                if (ra != 31)
2845
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2846
                else
2847
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2848
            }
2849
            break;
2850
        case 0x78:
2851
            /* FTOIS */
2852
            if (!(ctx->amask & AMASK_FIX))
2853
                goto invalid_opc;
2854
            if (rc != 31) {
2855
                TCGv_i32 tmp1 = tcg_temp_new_i32();
2856
                if (ra != 31)
2857
                    gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2858
                else {
2859
                    TCGv tmp2 = tcg_const_i64(0);
2860
                    gen_helper_s_to_memory(tmp1, tmp2);
2861
                    tcg_temp_free(tmp2);
2862
                }
2863
                tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2864
                tcg_temp_free_i32(tmp1);
2865
            }
2866
            break;
2867
        default:
2868
            goto invalid_opc;
2869
        }
2870
        break;
2871
    case 0x1D:
2872
        /* HW_MTPR (PALcode) */
2873
#if defined (CONFIG_USER_ONLY)
2874
        goto invalid_opc;
2875
#else
2876
        if (!ctx->pal_mode)
2877
            goto invalid_opc;
2878
        else {
2879
            TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2880
            if (ra != 31)
2881
                gen_helper_mtpr(tmp1, cpu_ir[ra]);
2882
            else {
2883
                TCGv tmp2 = tcg_const_i64(0);
2884
                gen_helper_mtpr(tmp1, tmp2);
2885
                tcg_temp_free(tmp2);
2886
            }
2887
            tcg_temp_free(tmp1);
2888
            ret = EXIT_PC_STALE;
2889
        }
2890
        break;
2891
#endif
2892
    case 0x1E:
2893
        /* HW_REI (PALcode) */
2894
#if defined (CONFIG_USER_ONLY)
2895
        goto invalid_opc;
2896
#else
2897
        if (!ctx->pal_mode)
2898
            goto invalid_opc;
2899
        if (rb == 31) {
2900
            /* "Old" alpha */
2901
            gen_helper_hw_rei();
2902
        } else {
2903
            TCGv tmp;
2904

    
2905
            if (ra != 31) {
2906
                tmp = tcg_temp_new();
2907
                tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2908
            } else
2909
                tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2910
            gen_helper_hw_ret(tmp);
2911
            tcg_temp_free(tmp);
2912
        }
2913
        ret = EXIT_PC_UPDATED;
2914
        break;
2915
#endif
2916
    case 0x1F:
2917
        /* HW_ST (PALcode) */
2918
#if defined (CONFIG_USER_ONLY)
2919
        goto invalid_opc;
2920
#else
2921
        if (!ctx->pal_mode)
2922
            goto invalid_opc;
2923
        else {
2924
            TCGv addr, val;
2925
            addr = tcg_temp_new();
2926
            if (rb != 31)
2927
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2928
            else
2929
                tcg_gen_movi_i64(addr, disp12);
2930
            if (ra != 31)
2931
                val = cpu_ir[ra];
2932
            else {
2933
                val = tcg_temp_new();
2934
                tcg_gen_movi_i64(val, 0);
2935
            }
2936
            switch ((insn >> 12) & 0xF) {
2937
            case 0x0:
2938
                /* Longword physical access */
2939
                gen_helper_stl_raw(val, addr);
2940
                break;
2941
            case 0x1:
2942
                /* Quadword physical access */
2943
                gen_helper_stq_raw(val, addr);
2944
                break;
2945
            case 0x2:
2946
                /* Longword physical access with lock */
2947
                gen_helper_stl_c_raw(val, val, addr);
2948
                break;
2949
            case 0x3:
2950
                /* Quadword physical access with lock */
2951
                gen_helper_stq_c_raw(val, val, addr);
2952
                break;
2953
            case 0x4:
2954
                /* Longword virtual access */
2955
                gen_helper_st_virt_to_phys(addr, addr);
2956
                gen_helper_stl_raw(val, addr);
2957
                break;
2958
            case 0x5:
2959
                /* Quadword virtual access */
2960
                gen_helper_st_virt_to_phys(addr, addr);
2961
                gen_helper_stq_raw(val, addr);
2962
                break;
2963
            case 0x6:
2964
                /* Invalid */
2965
                goto invalid_opc;
2966
            case 0x7:
2967
                /* Invalid */
2968
                goto invalid_opc;
2969
            case 0x8:
2970
                /* Invalid */
2971
                goto invalid_opc;
2972
            case 0x9:
2973
                /* Invalid */
2974
                goto invalid_opc;
2975
            case 0xA:
2976
                /* Invalid */
2977
                goto invalid_opc;
2978
            case 0xB:
2979
                /* Invalid */
2980
                goto invalid_opc;
2981
            case 0xC:
2982
                /* Longword virtual access with alternate access mode */
2983
                gen_helper_set_alt_mode();
2984
                gen_helper_st_virt_to_phys(addr, addr);
2985
                gen_helper_stl_raw(val, addr);
2986
                gen_helper_restore_mode();
2987
                break;
2988
            case 0xD:
2989
                /* Quadword virtual access with alternate access mode */
2990
                gen_helper_set_alt_mode();
2991
                gen_helper_st_virt_to_phys(addr, addr);
2992
                gen_helper_stl_raw(val, addr);
2993
                gen_helper_restore_mode();
2994
                break;
2995
            case 0xE:
2996
                /* Invalid */
2997
                goto invalid_opc;
2998
            case 0xF:
2999
                /* Invalid */
3000
                goto invalid_opc;
3001
            }
3002
            if (ra == 31)
3003
                tcg_temp_free(val);
3004
            tcg_temp_free(addr);
3005
        }
3006
        break;
3007
#endif
3008
    case 0x20:
3009
        /* LDF */
3010
        gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
3011
        break;
3012
    case 0x21:
3013
        /* LDG */
3014
        gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
3015
        break;
3016
    case 0x22:
3017
        /* LDS */
3018
        gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
3019
        break;
3020
    case 0x23:
3021
        /* LDT */
3022
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
3023
        break;
3024
    case 0x24:
3025
        /* STF */
3026
        gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
3027
        break;
3028
    case 0x25:
3029
        /* STG */
3030
        gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
3031
        break;
3032
    case 0x26:
3033
        /* STS */
3034
        gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
3035
        break;
3036
    case 0x27:
3037
        /* STT */
3038
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
3039
        break;
3040
    case 0x28:
3041
        /* LDL */
3042
        gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
3043
        break;
3044
    case 0x29:
3045
        /* LDQ */
3046
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
3047
        break;
3048
    case 0x2A:
3049
        /* LDL_L */
3050
        gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
3051
        break;
3052
    case 0x2B:
3053
        /* LDQ_L */
3054
        gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
3055
        break;
3056
    case 0x2C:
3057
        /* STL */
3058
        gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
3059
        break;
3060
    case 0x2D:
3061
        /* STQ */
3062
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
3063
        break;
3064
    case 0x2E:
3065
        /* STL_C */
3066
        ret = gen_store_conditional(ctx, ra, rb, disp16, 0);
3067
        break;
3068
    case 0x2F:
3069
        /* STQ_C */
3070
        ret = gen_store_conditional(ctx, ra, rb, disp16, 1);
3071
        break;
3072
    case 0x30:
3073
        /* BR */
3074
        ret = gen_bdirect(ctx, ra, disp21);
3075
        break;
3076
    case 0x31: /* FBEQ */
3077
        ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
3078
        break;
3079
    case 0x32: /* FBLT */
3080
        ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
3081
        break;
3082
    case 0x33: /* FBLE */
3083
        ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
3084
        break;
3085
    case 0x34:
3086
        /* BSR */
3087
        ret = gen_bdirect(ctx, ra, disp21);
3088
        break;
3089
    case 0x35: /* FBNE */
3090
        ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
3091
        break;
3092
    case 0x36: /* FBGE */
3093
        ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
3094
        break;
3095
    case 0x37: /* FBGT */
3096
        ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
3097
        break;
3098
    case 0x38:
3099
        /* BLBC */
3100
        ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
3101
        break;
3102
    case 0x39:
3103
        /* BEQ */
3104
        ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
3105
        break;
3106
    case 0x3A:
3107
        /* BLT */
3108
        ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
3109
        break;
3110
    case 0x3B:
3111
        /* BLE */
3112
        ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
3113
        break;
3114
    case 0x3C:
3115
        /* BLBS */
3116
        ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
3117
        break;
3118
    case 0x3D:
3119
        /* BNE */
3120
        ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
3121
        break;
3122
    case 0x3E:
3123
        /* BGE */
3124
        ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
3125
        break;
3126
    case 0x3F:
3127
        /* BGT */
3128
        ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
3129
        break;
3130
    invalid_opc:
3131
        ret = gen_invalid(ctx);
3132
        break;
3133
    }
3134

    
3135
    return ret;
3136
}
3137

    
3138
static inline void gen_intermediate_code_internal(CPUState *env,
3139
                                                  TranslationBlock *tb,
3140
                                                  int search_pc)
3141
{
3142
    DisasContext ctx, *ctxp = &ctx;
3143
    target_ulong pc_start;
3144
    uint32_t insn;
3145
    uint16_t *gen_opc_end;
3146
    CPUBreakpoint *bp;
3147
    int j, lj = -1;
3148
    ExitStatus ret;
3149
    int num_insns;
3150
    int max_insns;
3151

    
3152
    pc_start = tb->pc;
3153
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
3154

    
3155
    ctx.tb = tb;
3156
    ctx.env = env;
3157
    ctx.pc = pc_start;
3158
    ctx.amask = env->amask;
3159
#if defined (CONFIG_USER_ONLY)
3160
    ctx.mem_idx = 0;
3161
#else
3162
    ctx.mem_idx = ((env->ps >> 3) & 3);
3163
    ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
3164
#endif
3165

    
3166
    /* ??? Every TB begins with unset rounding mode, to be initialized on
3167
       the first fp insn of the TB.  Alternately we could define a proper
3168
       default for every TB (e.g. QUAL_RM_N or QUAL_RM_D) and make sure
3169
       to reset the FP_STATUS to that default at the end of any TB that
3170
       changes the default.  We could even (gasp) dynamiclly figure out
3171
       what default would be most efficient given the running program.  */
3172
    ctx.tb_rm = -1;
3173
    /* Similarly for flush-to-zero.  */
3174
    ctx.tb_ftz = -1;
3175

    
3176
    num_insns = 0;
3177
    max_insns = tb->cflags & CF_COUNT_MASK;
3178
    if (max_insns == 0)
3179
        max_insns = CF_COUNT_MASK;
3180

    
3181
    gen_icount_start();
3182
    do {
3183
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
3184
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
3185
                if (bp->pc == ctx.pc) {
3186
                    gen_excp(&ctx, EXCP_DEBUG, 0);
3187
                    break;
3188
                }
3189
            }
3190
        }
3191
        if (search_pc) {
3192
            j = gen_opc_ptr - gen_opc_buf;
3193
            if (lj < j) {
3194
                lj++;
3195
                while (lj < j)
3196
                    gen_opc_instr_start[lj++] = 0;
3197
            }
3198
            gen_opc_pc[lj] = ctx.pc;
3199
            gen_opc_instr_start[lj] = 1;
3200
            gen_opc_icount[lj] = num_insns;
3201
        }
3202
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
3203
            gen_io_start();
3204
        insn = ldl_code(ctx.pc);
3205
        num_insns++;
3206

    
3207
        if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP))) {
3208
            tcg_gen_debug_insn_start(ctx.pc);
3209
        }
3210

    
3211
        ctx.pc += 4;
3212
        ret = translate_one(ctxp, insn);
3213

    
3214
        if (ret == NO_EXIT) {
3215
            /* If we reach a page boundary, are single stepping,
3216
               or exhaust instruction count, stop generation.  */
3217
            if (env->singlestep_enabled) {
3218
                gen_excp(&ctx, EXCP_DEBUG, 0);
3219
                ret = EXIT_PC_UPDATED;
3220
            } else if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0
3221
                       || gen_opc_ptr >= gen_opc_end
3222
                       || num_insns >= max_insns
3223
                       || singlestep) {
3224
                ret = EXIT_PC_STALE;
3225
            }
3226
        }
3227
    } while (ret == NO_EXIT);
3228

    
3229
    if (tb->cflags & CF_LAST_IO) {
3230
        gen_io_end();
3231
    }
3232

    
3233
    switch (ret) {
3234
    case EXIT_GOTO_TB:
3235
    case EXIT_NORETURN:
3236
        break;
3237
    case EXIT_PC_STALE:
3238
        tcg_gen_movi_i64(cpu_pc, ctx.pc);
3239
        /* FALLTHRU */
3240
    case EXIT_PC_UPDATED:
3241
        tcg_gen_exit_tb(0);
3242
        break;
3243
    default:
3244
        abort();
3245
    }
3246

    
3247
    gen_icount_end(tb, num_insns);
3248
    *gen_opc_ptr = INDEX_op_end;
3249
    if (search_pc) {
3250
        j = gen_opc_ptr - gen_opc_buf;
3251
        lj++;
3252
        while (lj <= j)
3253
            gen_opc_instr_start[lj++] = 0;
3254
    } else {
3255
        tb->size = ctx.pc - pc_start;
3256
        tb->icount = num_insns;
3257
    }
3258

    
3259
#ifdef DEBUG_DISAS
3260
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
3261
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
3262
        log_target_disas(pc_start, ctx.pc - pc_start, 1);
3263
        qemu_log("\n");
3264
    }
3265
#endif
3266
}
3267

    
3268
void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
3269
{
3270
    gen_intermediate_code_internal(env, tb, 0);
3271
}
3272

    
3273
void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
3274
{
3275
    gen_intermediate_code_internal(env, tb, 1);
3276
}
3277

    
3278
struct cpu_def_t {
3279
    const char *name;
3280
    int implver, amask;
3281
};
3282

    
3283
static const struct cpu_def_t cpu_defs[] = {
3284
    { "ev4",   IMPLVER_2106x, 0 },
3285
    { "ev5",   IMPLVER_21164, 0 },
3286
    { "ev56",  IMPLVER_21164, AMASK_BWX },
3287
    { "pca56", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
3288
    { "ev6",   IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
3289
    { "ev67",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
3290
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
3291
    { "ev68",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
3292
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
3293
    { "21064", IMPLVER_2106x, 0 },
3294
    { "21164", IMPLVER_21164, 0 },
3295
    { "21164a", IMPLVER_21164, AMASK_BWX },
3296
    { "21164pc", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
3297
    { "21264", IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
3298
    { "21264a", IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
3299
                                | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), }
3300
};
3301

    
3302
CPUAlphaState * cpu_alpha_init (const char *cpu_model)
3303
{
3304
    CPUAlphaState *env;
3305
    int implver, amask, i, max;
3306

    
3307
    env = qemu_mallocz(sizeof(CPUAlphaState));
3308
    cpu_exec_init(env);
3309
    alpha_translate_init();
3310
    tlb_flush(env, 1);
3311

    
3312
    /* Default to ev67; no reason not to emulate insns by default.  */
3313
    implver = IMPLVER_21264;
3314
    amask = (AMASK_BWX | AMASK_FIX | AMASK_CIX | AMASK_MVI
3315
             | AMASK_TRAP | AMASK_PREFETCH);
3316

    
3317
    max = ARRAY_SIZE(cpu_defs);
3318
    for (i = 0; i < max; i++) {
3319
        if (strcmp (cpu_model, cpu_defs[i].name) == 0) {
3320
            implver = cpu_defs[i].implver;
3321
            amask = cpu_defs[i].amask;
3322
            break;
3323
        }
3324
    }
3325
    env->implver = implver;
3326
    env->amask = amask;
3327

    
3328
    env->ps = 0x1F00;
3329
#if defined (CONFIG_USER_ONLY)
3330
    env->ps |= 1 << 3;
3331
    cpu_alpha_store_fpcr(env, (FPCR_INVD | FPCR_DZED | FPCR_OVFD
3332
                               | FPCR_UNFD | FPCR_INED | FPCR_DNOD));
3333
#else
3334
    pal_init(env);
3335
#endif
3336
    env->lock_addr = -1;
3337

    
3338
    /* Initialize IPR */
3339
#if defined (CONFIG_USER_ONLY)
3340
    env->ipr[IPR_EXC_ADDR] = 0;
3341
    env->ipr[IPR_EXC_SUM] = 0;
3342
    env->ipr[IPR_EXC_MASK] = 0;
3343
#else
3344
    {
3345
        // uint64_t hwpcb;
3346
        // hwpcb = env->ipr[IPR_PCBB];
3347
        env->ipr[IPR_ASN] = 0;
3348
        env->ipr[IPR_ASTEN] = 0;
3349
        env->ipr[IPR_ASTSR] = 0;
3350
        env->ipr[IPR_DATFX] = 0;
3351
        /* XXX: fix this */
3352
        //    env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
3353
        //    env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
3354
        //    env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
3355
        //    env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
3356
        env->ipr[IPR_FEN] = 0;
3357
        env->ipr[IPR_IPL] = 31;
3358
        env->ipr[IPR_MCES] = 0;
3359
        env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
3360
        //    env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
3361
        env->ipr[IPR_SISR] = 0;
3362
        env->ipr[IPR_VIRBND] = -1ULL;
3363
    }
3364
#endif
3365

    
3366
    qemu_init_vcpu(env);
3367
    return env;
3368
}
3369

    
3370
void gen_pc_load(CPUState *env, TranslationBlock *tb,
3371
                unsigned long searched_pc, int pc_pos, void *puc)
3372
{
3373
    env->pc = gen_opc_pc[pc_pos];
3374
}