Statistics
| Branch: | Revision:

root / target-alpha / translate.c @ 034ebc27

History | View | Annotate | Download (103.4 kB)

1
/*
2
 *  Alpha emulation cpu translation for qemu.
3
 *
4
 *  Copyright (c) 2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18
 */
19

    
20
#include <stdint.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23

    
24
#include "cpu.h"
25
#include "disas.h"
26
#include "host-utils.h"
27
#include "tcg-op.h"
28
#include "qemu-common.h"
29

    
30
#include "helper.h"
31
#define GEN_HELPER 1
32
#include "helper.h"
33

    
34
#undef ALPHA_DEBUG_DISAS
35
#define CONFIG_SOFTFLOAT_INLINE
36

    
37
#ifdef ALPHA_DEBUG_DISAS
38
#  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
39
#else
40
#  define LOG_DISAS(...) do { } while (0)
41
#endif
42

    
43
typedef struct DisasContext DisasContext;
44
struct DisasContext {
45
    struct TranslationBlock *tb;
46
    CPUAlphaState *env;
47
    uint64_t pc;
48
    int mem_idx;
49

    
50
    /* Current rounding mode for this TB.  */
51
    int tb_rm;
52
    /* Current flush-to-zero setting for this TB.  */
53
    int tb_ftz;
54
};
55

    
56
/* Return values from translate_one, indicating the state of the TB.
57
   Note that zero indicates that we are not exiting the TB.  */
58

    
59
typedef enum {
60
    NO_EXIT,
61

    
62
    /* We have emitted one or more goto_tb.  No fixup required.  */
63
    EXIT_GOTO_TB,
64

    
65
    /* We are not using a goto_tb (for whatever reason), but have updated
66
       the PC (for whatever reason), so there's no need to do it again on
67
       exiting the TB.  */
68
    EXIT_PC_UPDATED,
69

    
70
    /* We are exiting the TB, but have neither emitted a goto_tb, nor
71
       updated the PC for the next instruction to be executed.  */
72
    EXIT_PC_STALE,
73

    
74
    /* We are ending the TB with a noreturn function call, e.g. longjmp.
75
       No following code will be executed.  */
76
    EXIT_NORETURN,
77
} ExitStatus;
78

    
79
/* global register indexes */
80
static TCGv_ptr cpu_env;
81
static TCGv cpu_ir[31];
82
static TCGv cpu_fir[31];
83
static TCGv cpu_pc;
84
static TCGv cpu_lock_addr;
85
static TCGv cpu_lock_st_addr;
86
static TCGv cpu_lock_value;
87
static TCGv cpu_unique;
88
#ifndef CONFIG_USER_ONLY
89
static TCGv cpu_sysval;
90
static TCGv cpu_usp;
91
#endif
92

    
93
/* register names */
94
static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
95

    
96
#include "gen-icount.h"
97

    
98
static void alpha_translate_init(void)
99
{
100
    int i;
101
    char *p;
102
    static int done_init = 0;
103

    
104
    if (done_init)
105
        return;
106

    
107
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
108

    
109
    p = cpu_reg_names;
110
    for (i = 0; i < 31; i++) {
111
        sprintf(p, "ir%d", i);
112
        cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
113
                                           offsetof(CPUState, ir[i]), p);
114
        p += (i < 10) ? 4 : 5;
115

    
116
        sprintf(p, "fir%d", i);
117
        cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
118
                                            offsetof(CPUState, fir[i]), p);
119
        p += (i < 10) ? 5 : 6;
120
    }
121

    
122
    cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
123
                                    offsetof(CPUState, pc), "pc");
124

    
125
    cpu_lock_addr = tcg_global_mem_new_i64(TCG_AREG0,
126
                                           offsetof(CPUState, lock_addr),
127
                                           "lock_addr");
128
    cpu_lock_st_addr = tcg_global_mem_new_i64(TCG_AREG0,
129
                                              offsetof(CPUState, lock_st_addr),
130
                                              "lock_st_addr");
131
    cpu_lock_value = tcg_global_mem_new_i64(TCG_AREG0,
132
                                            offsetof(CPUState, lock_value),
133
                                            "lock_value");
134

    
135
    cpu_unique = tcg_global_mem_new_i64(TCG_AREG0,
136
                                        offsetof(CPUState, unique), "unique");
137
#ifndef CONFIG_USER_ONLY
138
    cpu_sysval = tcg_global_mem_new_i64(TCG_AREG0,
139
                                        offsetof(CPUState, sysval), "sysval");
140
    cpu_usp = tcg_global_mem_new_i64(TCG_AREG0,
141
                                     offsetof(CPUState, usp), "usp");
142
#endif
143

    
144
    /* register helpers */
145
#define GEN_HELPER 2
146
#include "helper.h"
147

    
148
    done_init = 1;
149
}
150

    
151
static void gen_excp_1(int exception, int error_code)
152
{
153
    TCGv_i32 tmp1, tmp2;
154

    
155
    tmp1 = tcg_const_i32(exception);
156
    tmp2 = tcg_const_i32(error_code);
157
    gen_helper_excp(tmp1, tmp2);
158
    tcg_temp_free_i32(tmp2);
159
    tcg_temp_free_i32(tmp1);
160
}
161

    
162
static ExitStatus gen_excp(DisasContext *ctx, int exception, int error_code)
163
{
164
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
165
    gen_excp_1(exception, error_code);
166
    return EXIT_NORETURN;
167
}
168

    
169
static inline ExitStatus gen_invalid(DisasContext *ctx)
170
{
171
    return gen_excp(ctx, EXCP_OPCDEC, 0);
172
}
173

    
174
static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
175
{
176
    TCGv tmp = tcg_temp_new();
177
    TCGv_i32 tmp32 = tcg_temp_new_i32();
178
    tcg_gen_qemu_ld32u(tmp, t1, flags);
179
    tcg_gen_trunc_i64_i32(tmp32, tmp);
180
    gen_helper_memory_to_f(t0, tmp32);
181
    tcg_temp_free_i32(tmp32);
182
    tcg_temp_free(tmp);
183
}
184

    
185
static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
186
{
187
    TCGv tmp = tcg_temp_new();
188
    tcg_gen_qemu_ld64(tmp, t1, flags);
189
    gen_helper_memory_to_g(t0, tmp);
190
    tcg_temp_free(tmp);
191
}
192

    
193
static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
194
{
195
    TCGv tmp = tcg_temp_new();
196
    TCGv_i32 tmp32 = tcg_temp_new_i32();
197
    tcg_gen_qemu_ld32u(tmp, t1, flags);
198
    tcg_gen_trunc_i64_i32(tmp32, tmp);
199
    gen_helper_memory_to_s(t0, tmp32);
200
    tcg_temp_free_i32(tmp32);
201
    tcg_temp_free(tmp);
202
}
203

    
204
static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
205
{
206
    tcg_gen_qemu_ld32s(t0, t1, flags);
207
    tcg_gen_mov_i64(cpu_lock_addr, t1);
208
    tcg_gen_mov_i64(cpu_lock_value, t0);
209
}
210

    
211
static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
212
{
213
    tcg_gen_qemu_ld64(t0, t1, flags);
214
    tcg_gen_mov_i64(cpu_lock_addr, t1);
215
    tcg_gen_mov_i64(cpu_lock_value, t0);
216
}
217

    
218
static inline void gen_load_mem(DisasContext *ctx,
219
                                void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
220
                                                          int flags),
221
                                int ra, int rb, int32_t disp16, int fp,
222
                                int clear)
223
{
224
    TCGv addr, va;
225

    
226
    /* LDQ_U with ra $31 is UNOP.  Other various loads are forms of
227
       prefetches, which we can treat as nops.  No worries about
228
       missed exceptions here.  */
229
    if (unlikely(ra == 31)) {
230
        return;
231
    }
232

    
233
    addr = tcg_temp_new();
234
    if (rb != 31) {
235
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
236
        if (clear) {
237
            tcg_gen_andi_i64(addr, addr, ~0x7);
238
        }
239
    } else {
240
        if (clear) {
241
            disp16 &= ~0x7;
242
        }
243
        tcg_gen_movi_i64(addr, disp16);
244
    }
245

    
246
    va = (fp ? cpu_fir[ra] : cpu_ir[ra]);
247
    tcg_gen_qemu_load(va, addr, ctx->mem_idx);
248

    
249
    tcg_temp_free(addr);
250
}
251

    
252
static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
253
{
254
    TCGv_i32 tmp32 = tcg_temp_new_i32();
255
    TCGv tmp = tcg_temp_new();
256
    gen_helper_f_to_memory(tmp32, t0);
257
    tcg_gen_extu_i32_i64(tmp, tmp32);
258
    tcg_gen_qemu_st32(tmp, t1, flags);
259
    tcg_temp_free(tmp);
260
    tcg_temp_free_i32(tmp32);
261
}
262

    
263
static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
264
{
265
    TCGv tmp = tcg_temp_new();
266
    gen_helper_g_to_memory(tmp, t0);
267
    tcg_gen_qemu_st64(tmp, t1, flags);
268
    tcg_temp_free(tmp);
269
}
270

    
271
static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
272
{
273
    TCGv_i32 tmp32 = tcg_temp_new_i32();
274
    TCGv tmp = tcg_temp_new();
275
    gen_helper_s_to_memory(tmp32, t0);
276
    tcg_gen_extu_i32_i64(tmp, tmp32);
277
    tcg_gen_qemu_st32(tmp, t1, flags);
278
    tcg_temp_free(tmp);
279
    tcg_temp_free_i32(tmp32);
280
}
281

    
282
static inline void gen_store_mem(DisasContext *ctx,
283
                                 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
284
                                                            int flags),
285
                                 int ra, int rb, int32_t disp16, int fp,
286
                                 int clear)
287
{
288
    TCGv addr, va;
289

    
290
    addr = tcg_temp_new();
291
    if (rb != 31) {
292
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
293
        if (clear) {
294
            tcg_gen_andi_i64(addr, addr, ~0x7);
295
        }
296
    } else {
297
        if (clear) {
298
            disp16 &= ~0x7;
299
        }
300
        tcg_gen_movi_i64(addr, disp16);
301
    }
302

    
303
    if (ra == 31) {
304
        va = tcg_const_i64(0);
305
    } else {
306
        va = (fp ? cpu_fir[ra] : cpu_ir[ra]);
307
    }
308
    tcg_gen_qemu_store(va, addr, ctx->mem_idx);
309

    
310
    tcg_temp_free(addr);
311
    if (ra == 31) {
312
        tcg_temp_free(va);
313
    }
314
}
315

    
316
static ExitStatus gen_store_conditional(DisasContext *ctx, int ra, int rb,
317
                                        int32_t disp16, int quad)
318
{
319
    TCGv addr;
320

    
321
    if (ra == 31) {
322
        /* ??? Don't bother storing anything.  The user can't tell
323
           the difference, since the zero register always reads zero.  */
324
        return NO_EXIT;
325
    }
326

    
327
#if defined(CONFIG_USER_ONLY)
328
    addr = cpu_lock_st_addr;
329
#else
330
    addr = tcg_temp_local_new();
331
#endif
332

    
333
    if (rb != 31) {
334
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
335
    } else {
336
        tcg_gen_movi_i64(addr, disp16);
337
    }
338

    
339
#if defined(CONFIG_USER_ONLY)
340
    /* ??? This is handled via a complicated version of compare-and-swap
341
       in the cpu_loop.  Hopefully one day we'll have a real CAS opcode
342
       in TCG so that this isn't necessary.  */
343
    return gen_excp(ctx, quad ? EXCP_STQ_C : EXCP_STL_C, ra);
344
#else
345
    /* ??? In system mode we are never multi-threaded, so CAS can be
346
       implemented via a non-atomic load-compare-store sequence.  */
347
    {
348
        int lab_fail, lab_done;
349
        TCGv val;
350

    
351
        lab_fail = gen_new_label();
352
        lab_done = gen_new_label();
353
        tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
354

    
355
        val = tcg_temp_new();
356
        if (quad) {
357
            tcg_gen_qemu_ld64(val, addr, ctx->mem_idx);
358
        } else {
359
            tcg_gen_qemu_ld32s(val, addr, ctx->mem_idx);
360
        }
361
        tcg_gen_brcond_i64(TCG_COND_NE, val, cpu_lock_value, lab_fail);
362

    
363
        if (quad) {
364
            tcg_gen_qemu_st64(cpu_ir[ra], addr, ctx->mem_idx);
365
        } else {
366
            tcg_gen_qemu_st32(cpu_ir[ra], addr, ctx->mem_idx);
367
        }
368
        tcg_gen_movi_i64(cpu_ir[ra], 1);
369
        tcg_gen_br(lab_done);
370

    
371
        gen_set_label(lab_fail);
372
        tcg_gen_movi_i64(cpu_ir[ra], 0);
373

    
374
        gen_set_label(lab_done);
375
        tcg_gen_movi_i64(cpu_lock_addr, -1);
376

    
377
        tcg_temp_free(addr);
378
        return NO_EXIT;
379
    }
380
#endif
381
}
382

    
383
static int use_goto_tb(DisasContext *ctx, uint64_t dest)
384
{
385
    /* Check for the dest on the same page as the start of the TB.  We
386
       also want to suppress goto_tb in the case of single-steping and IO.  */
387
    return (((ctx->tb->pc ^ dest) & TARGET_PAGE_MASK) == 0
388
            && !ctx->env->singlestep_enabled
389
            && !(ctx->tb->cflags & CF_LAST_IO));
390
}
391

    
392
static ExitStatus gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
393
{
394
    uint64_t dest = ctx->pc + (disp << 2);
395

    
396
    if (ra != 31) {
397
        tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
398
    }
399

    
400
    /* Notice branch-to-next; used to initialize RA with the PC.  */
401
    if (disp == 0) {
402
        return 0;
403
    } else if (use_goto_tb(ctx, dest)) {
404
        tcg_gen_goto_tb(0);
405
        tcg_gen_movi_i64(cpu_pc, dest);
406
        tcg_gen_exit_tb((tcg_target_long)ctx->tb);
407
        return EXIT_GOTO_TB;
408
    } else {
409
        tcg_gen_movi_i64(cpu_pc, dest);
410
        return EXIT_PC_UPDATED;
411
    }
412
}
413

    
414
static ExitStatus gen_bcond_internal(DisasContext *ctx, TCGCond cond,
415
                                     TCGv cmp, int32_t disp)
416
{
417
    uint64_t dest = ctx->pc + (disp << 2);
418
    int lab_true = gen_new_label();
419

    
420
    if (use_goto_tb(ctx, dest)) {
421
        tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
422

    
423
        tcg_gen_goto_tb(0);
424
        tcg_gen_movi_i64(cpu_pc, ctx->pc);
425
        tcg_gen_exit_tb((tcg_target_long)ctx->tb);
426

    
427
        gen_set_label(lab_true);
428
        tcg_gen_goto_tb(1);
429
        tcg_gen_movi_i64(cpu_pc, dest);
430
        tcg_gen_exit_tb((tcg_target_long)ctx->tb + 1);
431

    
432
        return EXIT_GOTO_TB;
433
    } else {
434
        int lab_over = gen_new_label();
435

    
436
        /* ??? Consider using either
437
             movi pc, next
438
             addi tmp, pc, disp
439
             movcond pc, cond, 0, tmp, pc
440
           or
441
             setcond tmp, cond, 0
442
             movi pc, next
443
             neg tmp, tmp
444
             andi tmp, tmp, disp
445
             add pc, pc, tmp
446
           The current diamond subgraph surely isn't efficient.  */
447

    
448
        tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
449
        tcg_gen_movi_i64(cpu_pc, ctx->pc);
450
        tcg_gen_br(lab_over);
451
        gen_set_label(lab_true);
452
        tcg_gen_movi_i64(cpu_pc, dest);
453
        gen_set_label(lab_over);
454

    
455
        return EXIT_PC_UPDATED;
456
    }
457
}
458

    
459
static ExitStatus gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
460
                            int32_t disp, int mask)
461
{
462
    TCGv cmp_tmp;
463

    
464
    if (unlikely(ra == 31)) {
465
        cmp_tmp = tcg_const_i64(0);
466
    } else {
467
        cmp_tmp = tcg_temp_new();
468
        if (mask) {
469
            tcg_gen_andi_i64(cmp_tmp, cpu_ir[ra], 1);
470
        } else {
471
            tcg_gen_mov_i64(cmp_tmp, cpu_ir[ra]);
472
        }
473
    }
474

    
475
    return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
476
}
477

    
478
/* Fold -0.0 for comparison with COND.  */
479

    
480
static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
481
{
482
    uint64_t mzero = 1ull << 63;
483

    
484
    switch (cond) {
485
    case TCG_COND_LE:
486
    case TCG_COND_GT:
487
        /* For <= or >, the -0.0 value directly compares the way we want.  */
488
        tcg_gen_mov_i64(dest, src);
489
        break;
490

    
491
    case TCG_COND_EQ:
492
    case TCG_COND_NE:
493
        /* For == or !=, we can simply mask off the sign bit and compare.  */
494
        tcg_gen_andi_i64(dest, src, mzero - 1);
495
        break;
496

    
497
    case TCG_COND_GE:
498
    case TCG_COND_LT:
499
        /* For >= or <, map -0.0 to +0.0 via comparison and mask.  */
500
        tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
501
        tcg_gen_neg_i64(dest, dest);
502
        tcg_gen_and_i64(dest, dest, src);
503
        break;
504

    
505
    default:
506
        abort();
507
    }
508
}
509

    
510
static ExitStatus gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
511
                             int32_t disp)
512
{
513
    TCGv cmp_tmp;
514

    
515
    if (unlikely(ra == 31)) {
516
        /* Very uncommon case, but easier to optimize it to an integer
517
           comparison than continuing with the floating point comparison.  */
518
        return gen_bcond(ctx, cond, ra, disp, 0);
519
    }
520

    
521
    cmp_tmp = tcg_temp_new();
522
    gen_fold_mzero(cond, cmp_tmp, cpu_fir[ra]);
523
    return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
524
}
525

    
526
static void gen_cmov(TCGCond cond, int ra, int rb, int rc,
527
                     int islit, uint8_t lit, int mask)
528
{
529
    TCGCond inv_cond = tcg_invert_cond(cond);
530
    int l1;
531

    
532
    if (unlikely(rc == 31))
533
        return;
534

    
535
    l1 = gen_new_label();
536

    
537
    if (ra != 31) {
538
        if (mask) {
539
            TCGv tmp = tcg_temp_new();
540
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
541
            tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
542
            tcg_temp_free(tmp);
543
        } else
544
            tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
545
    } else {
546
        /* Very uncommon case - Do not bother to optimize.  */
547
        TCGv tmp = tcg_const_i64(0);
548
        tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
549
        tcg_temp_free(tmp);
550
    }
551

    
552
    if (islit)
553
        tcg_gen_movi_i64(cpu_ir[rc], lit);
554
    else
555
        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
556
    gen_set_label(l1);
557
}
558

    
559
static void gen_fcmov(TCGCond cond, int ra, int rb, int rc)
560
{
561
    TCGv cmp_tmp;
562
    int l1;
563

    
564
    if (unlikely(rc == 31)) {
565
        return;
566
    }
567

    
568
    cmp_tmp = tcg_temp_new();
569
    if (unlikely(ra == 31)) {
570
        tcg_gen_movi_i64(cmp_tmp, 0);
571
    } else {
572
        gen_fold_mzero(cond, cmp_tmp, cpu_fir[ra]);
573
    }
574

    
575
    l1 = gen_new_label();
576
    tcg_gen_brcondi_i64(tcg_invert_cond(cond), cmp_tmp, 0, l1);
577
    tcg_temp_free(cmp_tmp);
578

    
579
    if (rb != 31)
580
        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[rb]);
581
    else
582
        tcg_gen_movi_i64(cpu_fir[rc], 0);
583
    gen_set_label(l1);
584
}
585

    
586
#define QUAL_RM_N       0x080   /* Round mode nearest even */
587
#define QUAL_RM_C       0x000   /* Round mode chopped */
588
#define QUAL_RM_M       0x040   /* Round mode minus infinity */
589
#define QUAL_RM_D       0x0c0   /* Round mode dynamic */
590
#define QUAL_RM_MASK    0x0c0
591

    
592
#define QUAL_U          0x100   /* Underflow enable (fp output) */
593
#define QUAL_V          0x100   /* Overflow enable (int output) */
594
#define QUAL_S          0x400   /* Software completion enable */
595
#define QUAL_I          0x200   /* Inexact detection enable */
596

    
597
static void gen_qual_roundmode(DisasContext *ctx, int fn11)
598
{
599
    TCGv_i32 tmp;
600

    
601
    fn11 &= QUAL_RM_MASK;
602
    if (fn11 == ctx->tb_rm) {
603
        return;
604
    }
605
    ctx->tb_rm = fn11;
606

    
607
    tmp = tcg_temp_new_i32();
608
    switch (fn11) {
609
    case QUAL_RM_N:
610
        tcg_gen_movi_i32(tmp, float_round_nearest_even);
611
        break;
612
    case QUAL_RM_C:
613
        tcg_gen_movi_i32(tmp, float_round_to_zero);
614
        break;
615
    case QUAL_RM_M:
616
        tcg_gen_movi_i32(tmp, float_round_down);
617
        break;
618
    case QUAL_RM_D:
619
        tcg_gen_ld8u_i32(tmp, cpu_env, offsetof(CPUState, fpcr_dyn_round));
620
        break;
621
    }
622

    
623
#if defined(CONFIG_SOFTFLOAT_INLINE)
624
    /* ??? The "softfloat.h" interface is to call set_float_rounding_mode.
625
       With CONFIG_SOFTFLOAT that expands to an out-of-line call that just
626
       sets the one field.  */
627
    tcg_gen_st8_i32(tmp, cpu_env,
628
                    offsetof(CPUState, fp_status.float_rounding_mode));
629
#else
630
    gen_helper_setroundmode(tmp);
631
#endif
632

    
633
    tcg_temp_free_i32(tmp);
634
}
635

    
636
static void gen_qual_flushzero(DisasContext *ctx, int fn11)
637
{
638
    TCGv_i32 tmp;
639

    
640
    fn11 &= QUAL_U;
641
    if (fn11 == ctx->tb_ftz) {
642
        return;
643
    }
644
    ctx->tb_ftz = fn11;
645

    
646
    tmp = tcg_temp_new_i32();
647
    if (fn11) {
648
        /* Underflow is enabled, use the FPCR setting.  */
649
        tcg_gen_ld8u_i32(tmp, cpu_env, offsetof(CPUState, fpcr_flush_to_zero));
650
    } else {
651
        /* Underflow is disabled, force flush-to-zero.  */
652
        tcg_gen_movi_i32(tmp, 1);
653
    }
654

    
655
#if defined(CONFIG_SOFTFLOAT_INLINE)
656
    tcg_gen_st8_i32(tmp, cpu_env,
657
                    offsetof(CPUState, fp_status.flush_to_zero));
658
#else
659
    gen_helper_setflushzero(tmp);
660
#endif
661

    
662
    tcg_temp_free_i32(tmp);
663
}
664

    
665
static TCGv gen_ieee_input(int reg, int fn11, int is_cmp)
666
{
667
    TCGv val = tcg_temp_new();
668
    if (reg == 31) {
669
        tcg_gen_movi_i64(val, 0);
670
    } else if (fn11 & QUAL_S) {
671
        gen_helper_ieee_input_s(val, cpu_fir[reg]);
672
    } else if (is_cmp) {
673
        gen_helper_ieee_input_cmp(val, cpu_fir[reg]);
674
    } else {
675
        gen_helper_ieee_input(val, cpu_fir[reg]);
676
    }
677
    return val;
678
}
679

    
680
static void gen_fp_exc_clear(void)
681
{
682
#if defined(CONFIG_SOFTFLOAT_INLINE)
683
    TCGv_i32 zero = tcg_const_i32(0);
684
    tcg_gen_st8_i32(zero, cpu_env,
685
                    offsetof(CPUState, fp_status.float_exception_flags));
686
    tcg_temp_free_i32(zero);
687
#else
688
    gen_helper_fp_exc_clear();
689
#endif
690
}
691

    
692
static void gen_fp_exc_raise_ignore(int rc, int fn11, int ignore)
693
{
694
    /* ??? We ought to be able to do something with imprecise exceptions.
695
       E.g. notice we're still in the trap shadow of something within the
696
       TB and do not generate the code to signal the exception; end the TB
697
       when an exception is forced to arrive, either by consumption of a
698
       register value or TRAPB or EXCB.  */
699
    TCGv_i32 exc = tcg_temp_new_i32();
700
    TCGv_i32 reg;
701

    
702
#if defined(CONFIG_SOFTFLOAT_INLINE)
703
    tcg_gen_ld8u_i32(exc, cpu_env,
704
                     offsetof(CPUState, fp_status.float_exception_flags));
705
#else
706
    gen_helper_fp_exc_get(exc);
707
#endif
708

    
709
    if (ignore) {
710
        tcg_gen_andi_i32(exc, exc, ~ignore);
711
    }
712

    
713
    /* ??? Pass in the regno of the destination so that the helper can
714
       set EXC_MASK, which contains a bitmask of destination registers
715
       that have caused arithmetic traps.  A simple userspace emulation
716
       does not require this.  We do need it for a guest kernel's entArith,
717
       or if we were to do something clever with imprecise exceptions.  */
718
    reg = tcg_const_i32(rc + 32);
719

    
720
    if (fn11 & QUAL_S) {
721
        gen_helper_fp_exc_raise_s(exc, reg);
722
    } else {
723
        gen_helper_fp_exc_raise(exc, reg);
724
    }
725

    
726
    tcg_temp_free_i32(reg);
727
    tcg_temp_free_i32(exc);
728
}
729

    
730
static inline void gen_fp_exc_raise(int rc, int fn11)
731
{
732
    gen_fp_exc_raise_ignore(rc, fn11, fn11 & QUAL_I ? 0 : float_flag_inexact);
733
}
734

    
735
static void gen_fcvtlq(int rb, int rc)
736
{
737
    if (unlikely(rc == 31)) {
738
        return;
739
    }
740
    if (unlikely(rb == 31)) {
741
        tcg_gen_movi_i64(cpu_fir[rc], 0);
742
    } else {
743
        TCGv tmp = tcg_temp_new();
744

    
745
        /* The arithmetic right shift here, plus the sign-extended mask below
746
           yields a sign-extended result without an explicit ext32s_i64.  */
747
        tcg_gen_sari_i64(tmp, cpu_fir[rb], 32);
748
        tcg_gen_shri_i64(cpu_fir[rc], cpu_fir[rb], 29);
749
        tcg_gen_andi_i64(tmp, tmp, (int32_t)0xc0000000);
750
        tcg_gen_andi_i64(cpu_fir[rc], cpu_fir[rc], 0x3fffffff);
751
        tcg_gen_or_i64(cpu_fir[rc], cpu_fir[rc], tmp);
752

    
753
        tcg_temp_free(tmp);
754
    }
755
}
756

    
757
static void gen_fcvtql(int rb, int rc)
758
{
759
    if (unlikely(rc == 31)) {
760
        return;
761
    }
762
    if (unlikely(rb == 31)) {
763
        tcg_gen_movi_i64(cpu_fir[rc], 0);
764
    } else {
765
        TCGv tmp = tcg_temp_new();
766

    
767
        tcg_gen_andi_i64(tmp, cpu_fir[rb], 0xC0000000);
768
        tcg_gen_andi_i64(cpu_fir[rc], cpu_fir[rb], 0x3FFFFFFF);
769
        tcg_gen_shli_i64(tmp, tmp, 32);
770
        tcg_gen_shli_i64(cpu_fir[rc], cpu_fir[rc], 29);
771
        tcg_gen_or_i64(cpu_fir[rc], cpu_fir[rc], tmp);
772

    
773
        tcg_temp_free(tmp);
774
    }
775
}
776

    
777
static void gen_fcvtql_v(DisasContext *ctx, int rb, int rc)
778
{
779
    if (rb != 31) {
780
        int lab = gen_new_label();
781
        TCGv tmp = tcg_temp_new();
782

    
783
        tcg_gen_ext32s_i64(tmp, cpu_fir[rb]);
784
        tcg_gen_brcond_i64(TCG_COND_EQ, tmp, cpu_fir[rb], lab);
785
        gen_excp(ctx, EXCP_ARITH, EXC_M_IOV);
786

    
787
        gen_set_label(lab);
788
    }
789
    gen_fcvtql(rb, rc);
790
}
791

    
792
#define FARITH2(name)                                   \
793
static inline void glue(gen_f, name)(int rb, int rc)    \
794
{                                                       \
795
    if (unlikely(rc == 31)) {                           \
796
        return;                                         \
797
    }                                                   \
798
    if (rb != 31) {                                     \
799
        gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]); \
800
    } else {                                                \
801
        TCGv tmp = tcg_const_i64(0);                    \
802
        gen_helper_ ## name (cpu_fir[rc], tmp);         \
803
        tcg_temp_free(tmp);                             \
804
    }                                                   \
805
}
806

    
807
/* ??? VAX instruction qualifiers ignored.  */
808
FARITH2(sqrtf)
809
FARITH2(sqrtg)
810
FARITH2(cvtgf)
811
FARITH2(cvtgq)
812
FARITH2(cvtqf)
813
FARITH2(cvtqg)
814

    
815
static void gen_ieee_arith2(DisasContext *ctx, void (*helper)(TCGv, TCGv),
816
                            int rb, int rc, int fn11)
817
{
818
    TCGv vb;
819

    
820
    /* ??? This is wrong: the instruction is not a nop, it still may
821
       raise exceptions.  */
822
    if (unlikely(rc == 31)) {
823
        return;
824
    }
825

    
826
    gen_qual_roundmode(ctx, fn11);
827
    gen_qual_flushzero(ctx, fn11);
828
    gen_fp_exc_clear();
829

    
830
    vb = gen_ieee_input(rb, fn11, 0);
831
    helper(cpu_fir[rc], vb);
832
    tcg_temp_free(vb);
833

    
834
    gen_fp_exc_raise(rc, fn11);
835
}
836

    
837
#define IEEE_ARITH2(name)                                       \
838
static inline void glue(gen_f, name)(DisasContext *ctx,         \
839
                                     int rb, int rc, int fn11)  \
840
{                                                               \
841
    gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11);      \
842
}
843
IEEE_ARITH2(sqrts)
844
IEEE_ARITH2(sqrtt)
845
IEEE_ARITH2(cvtst)
846
IEEE_ARITH2(cvtts)
847

    
848
static void gen_fcvttq(DisasContext *ctx, int rb, int rc, int fn11)
849
{
850
    TCGv vb;
851
    int ignore = 0;
852

    
853
    /* ??? This is wrong: the instruction is not a nop, it still may
854
       raise exceptions.  */
855
    if (unlikely(rc == 31)) {
856
        return;
857
    }
858

    
859
    /* No need to set flushzero, since we have an integer output.  */
860
    gen_fp_exc_clear();
861
    vb = gen_ieee_input(rb, fn11, 0);
862

    
863
    /* Almost all integer conversions use cropped rounding, and most
864
       also do not have integer overflow enabled.  Special case that.  */
865
    switch (fn11) {
866
    case QUAL_RM_C:
867
        gen_helper_cvttq_c(cpu_fir[rc], vb);
868
        break;
869
    case QUAL_V | QUAL_RM_C:
870
    case QUAL_S | QUAL_V | QUAL_RM_C:
871
        ignore = float_flag_inexact;
872
        /* FALLTHRU */
873
    case QUAL_S | QUAL_V | QUAL_I | QUAL_RM_C:
874
        gen_helper_cvttq_svic(cpu_fir[rc], vb);
875
        break;
876
    default:
877
        gen_qual_roundmode(ctx, fn11);
878
        gen_helper_cvttq(cpu_fir[rc], vb);
879
        ignore |= (fn11 & QUAL_V ? 0 : float_flag_overflow);
880
        ignore |= (fn11 & QUAL_I ? 0 : float_flag_inexact);
881
        break;
882
    }
883
    tcg_temp_free(vb);
884

    
885
    gen_fp_exc_raise_ignore(rc, fn11, ignore);
886
}
887

    
888
static void gen_ieee_intcvt(DisasContext *ctx, void (*helper)(TCGv, TCGv),
889
                            int rb, int rc, int fn11)
890
{
891
    TCGv vb;
892

    
893
    /* ??? This is wrong: the instruction is not a nop, it still may
894
       raise exceptions.  */
895
    if (unlikely(rc == 31)) {
896
        return;
897
    }
898

    
899
    gen_qual_roundmode(ctx, fn11);
900

    
901
    if (rb == 31) {
902
        vb = tcg_const_i64(0);
903
    } else {
904
        vb = cpu_fir[rb];
905
    }
906

    
907
    /* The only exception that can be raised by integer conversion
908
       is inexact.  Thus we only need to worry about exceptions when
909
       inexact handling is requested.  */
910
    if (fn11 & QUAL_I) {
911
        gen_fp_exc_clear();
912
        helper(cpu_fir[rc], vb);
913
        gen_fp_exc_raise(rc, fn11);
914
    } else {
915
        helper(cpu_fir[rc], vb);
916
    }
917

    
918
    if (rb == 31) {
919
        tcg_temp_free(vb);
920
    }
921
}
922

    
923
#define IEEE_INTCVT(name)                                       \
924
static inline void glue(gen_f, name)(DisasContext *ctx,         \
925
                                     int rb, int rc, int fn11)  \
926
{                                                               \
927
    gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11);      \
928
}
929
IEEE_INTCVT(cvtqs)
930
IEEE_INTCVT(cvtqt)
931

    
932
static void gen_cpys_internal(int ra, int rb, int rc, int inv_a, uint64_t mask)
933
{
934
    TCGv va, vb, vmask;
935
    int za = 0, zb = 0;
936

    
937
    if (unlikely(rc == 31)) {
938
        return;
939
    }
940

    
941
    vmask = tcg_const_i64(mask);
942

    
943
    TCGV_UNUSED_I64(va);
944
    if (ra == 31) {
945
        if (inv_a) {
946
            va = vmask;
947
        } else {
948
            za = 1;
949
        }
950
    } else {
951
        va = tcg_temp_new_i64();
952
        tcg_gen_mov_i64(va, cpu_fir[ra]);
953
        if (inv_a) {
954
            tcg_gen_andc_i64(va, vmask, va);
955
        } else {
956
            tcg_gen_and_i64(va, va, vmask);
957
        }
958
    }
959

    
960
    TCGV_UNUSED_I64(vb);
961
    if (rb == 31) {
962
        zb = 1;
963
    } else {
964
        vb = tcg_temp_new_i64();
965
        tcg_gen_andc_i64(vb, cpu_fir[rb], vmask);
966
    }
967

    
968
    switch (za << 1 | zb) {
969
    case 0 | 0:
970
        tcg_gen_or_i64(cpu_fir[rc], va, vb);
971
        break;
972
    case 0 | 1:
973
        tcg_gen_mov_i64(cpu_fir[rc], va);
974
        break;
975
    case 2 | 0:
976
        tcg_gen_mov_i64(cpu_fir[rc], vb);
977
        break;
978
    case 2 | 1:
979
        tcg_gen_movi_i64(cpu_fir[rc], 0);
980
        break;
981
    }
982

    
983
    tcg_temp_free(vmask);
984
    if (ra != 31) {
985
        tcg_temp_free(va);
986
    }
987
    if (rb != 31) {
988
        tcg_temp_free(vb);
989
    }
990
}
991

    
992
static inline void gen_fcpys(int ra, int rb, int rc)
993
{
994
    gen_cpys_internal(ra, rb, rc, 0, 0x8000000000000000ULL);
995
}
996

    
997
static inline void gen_fcpysn(int ra, int rb, int rc)
998
{
999
    gen_cpys_internal(ra, rb, rc, 1, 0x8000000000000000ULL);
1000
}
1001

    
1002
static inline void gen_fcpyse(int ra, int rb, int rc)
1003
{
1004
    gen_cpys_internal(ra, rb, rc, 0, 0xFFF0000000000000ULL);
1005
}
1006

    
1007
#define FARITH3(name)                                           \
1008
static inline void glue(gen_f, name)(int ra, int rb, int rc)    \
1009
{                                                               \
1010
    TCGv va, vb;                                                \
1011
                                                                \
1012
    if (unlikely(rc == 31)) {                                   \
1013
        return;                                                 \
1014
    }                                                           \
1015
    if (ra == 31) {                                             \
1016
        va = tcg_const_i64(0);                                  \
1017
    } else {                                                    \
1018
        va = cpu_fir[ra];                                       \
1019
    }                                                           \
1020
    if (rb == 31) {                                             \
1021
        vb = tcg_const_i64(0);                                  \
1022
    } else {                                                    \
1023
        vb = cpu_fir[rb];                                       \
1024
    }                                                           \
1025
                                                                \
1026
    gen_helper_ ## name (cpu_fir[rc], va, vb);                  \
1027
                                                                \
1028
    if (ra == 31) {                                             \
1029
        tcg_temp_free(va);                                      \
1030
    }                                                           \
1031
    if (rb == 31) {                                             \
1032
        tcg_temp_free(vb);                                      \
1033
    }                                                           \
1034
}
1035

    
1036
/* ??? VAX instruction qualifiers ignored.  */
1037
FARITH3(addf)
1038
FARITH3(subf)
1039
FARITH3(mulf)
1040
FARITH3(divf)
1041
FARITH3(addg)
1042
FARITH3(subg)
1043
FARITH3(mulg)
1044
FARITH3(divg)
1045
FARITH3(cmpgeq)
1046
FARITH3(cmpglt)
1047
FARITH3(cmpgle)
1048

    
1049
static void gen_ieee_arith3(DisasContext *ctx,
1050
                            void (*helper)(TCGv, TCGv, TCGv),
1051
                            int ra, int rb, int rc, int fn11)
1052
{
1053
    TCGv va, vb;
1054

    
1055
    /* ??? This is wrong: the instruction is not a nop, it still may
1056
       raise exceptions.  */
1057
    if (unlikely(rc == 31)) {
1058
        return;
1059
    }
1060

    
1061
    gen_qual_roundmode(ctx, fn11);
1062
    gen_qual_flushzero(ctx, fn11);
1063
    gen_fp_exc_clear();
1064

    
1065
    va = gen_ieee_input(ra, fn11, 0);
1066
    vb = gen_ieee_input(rb, fn11, 0);
1067
    helper(cpu_fir[rc], va, vb);
1068
    tcg_temp_free(va);
1069
    tcg_temp_free(vb);
1070

    
1071
    gen_fp_exc_raise(rc, fn11);
1072
}
1073

    
1074
#define IEEE_ARITH3(name)                                               \
1075
static inline void glue(gen_f, name)(DisasContext *ctx,                 \
1076
                                     int ra, int rb, int rc, int fn11)  \
1077
{                                                                       \
1078
    gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11);          \
1079
}
1080
IEEE_ARITH3(adds)
1081
IEEE_ARITH3(subs)
1082
IEEE_ARITH3(muls)
1083
IEEE_ARITH3(divs)
1084
IEEE_ARITH3(addt)
1085
IEEE_ARITH3(subt)
1086
IEEE_ARITH3(mult)
1087
IEEE_ARITH3(divt)
1088

    
1089
static void gen_ieee_compare(DisasContext *ctx,
1090
                             void (*helper)(TCGv, TCGv, TCGv),
1091
                             int ra, int rb, int rc, int fn11)
1092
{
1093
    TCGv va, vb;
1094

    
1095
    /* ??? This is wrong: the instruction is not a nop, it still may
1096
       raise exceptions.  */
1097
    if (unlikely(rc == 31)) {
1098
        return;
1099
    }
1100

    
1101
    gen_fp_exc_clear();
1102

    
1103
    va = gen_ieee_input(ra, fn11, 1);
1104
    vb = gen_ieee_input(rb, fn11, 1);
1105
    helper(cpu_fir[rc], va, vb);
1106
    tcg_temp_free(va);
1107
    tcg_temp_free(vb);
1108

    
1109
    gen_fp_exc_raise(rc, fn11);
1110
}
1111

    
1112
#define IEEE_CMP3(name)                                                 \
1113
static inline void glue(gen_f, name)(DisasContext *ctx,                 \
1114
                                     int ra, int rb, int rc, int fn11)  \
1115
{                                                                       \
1116
    gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11);         \
1117
}
1118
IEEE_CMP3(cmptun)
1119
IEEE_CMP3(cmpteq)
1120
IEEE_CMP3(cmptlt)
1121
IEEE_CMP3(cmptle)
1122

    
1123
static inline uint64_t zapnot_mask(uint8_t lit)
1124
{
1125
    uint64_t mask = 0;
1126
    int i;
1127

    
1128
    for (i = 0; i < 8; ++i) {
1129
        if ((lit >> i) & 1)
1130
            mask |= 0xffull << (i * 8);
1131
    }
1132
    return mask;
1133
}
1134

    
1135
/* Implement zapnot with an immediate operand, which expands to some
1136
   form of immediate AND.  This is a basic building block in the
1137
   definition of many of the other byte manipulation instructions.  */
1138
static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
1139
{
1140
    switch (lit) {
1141
    case 0x00:
1142
        tcg_gen_movi_i64(dest, 0);
1143
        break;
1144
    case 0x01:
1145
        tcg_gen_ext8u_i64(dest, src);
1146
        break;
1147
    case 0x03:
1148
        tcg_gen_ext16u_i64(dest, src);
1149
        break;
1150
    case 0x0f:
1151
        tcg_gen_ext32u_i64(dest, src);
1152
        break;
1153
    case 0xff:
1154
        tcg_gen_mov_i64(dest, src);
1155
        break;
1156
    default:
1157
        tcg_gen_andi_i64 (dest, src, zapnot_mask (lit));
1158
        break;
1159
    }
1160
}
1161

    
1162
static inline void gen_zapnot(int ra, int rb, int rc, int islit, uint8_t lit)
1163
{
1164
    if (unlikely(rc == 31))
1165
        return;
1166
    else if (unlikely(ra == 31))
1167
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1168
    else if (islit)
1169
        gen_zapnoti(cpu_ir[rc], cpu_ir[ra], lit);
1170
    else
1171
        gen_helper_zapnot (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1172
}
1173

    
1174
static inline void gen_zap(int ra, int rb, int rc, int islit, uint8_t lit)
1175
{
1176
    if (unlikely(rc == 31))
1177
        return;
1178
    else if (unlikely(ra == 31))
1179
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1180
    else if (islit)
1181
        gen_zapnoti(cpu_ir[rc], cpu_ir[ra], ~lit);
1182
    else
1183
        gen_helper_zap (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1184
}
1185

    
1186

    
1187
/* EXTWH, EXTLH, EXTQH */
1188
static void gen_ext_h(int ra, int rb, int rc, int islit,
1189
                      uint8_t lit, uint8_t byte_mask)
1190
{
1191
    if (unlikely(rc == 31))
1192
        return;
1193
    else if (unlikely(ra == 31))
1194
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1195
    else {
1196
        if (islit) {
1197
            lit = (64 - (lit & 7) * 8) & 0x3f;
1198
            tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1199
        } else {
1200
            TCGv tmp1 = tcg_temp_new();
1201
            tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
1202
            tcg_gen_shli_i64(tmp1, tmp1, 3);
1203
            tcg_gen_neg_i64(tmp1, tmp1);
1204
            tcg_gen_andi_i64(tmp1, tmp1, 0x3f);
1205
            tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
1206
            tcg_temp_free(tmp1);
1207
        }
1208
        gen_zapnoti(cpu_ir[rc], cpu_ir[rc], byte_mask);
1209
    }
1210
}
1211

    
1212
/* EXTBL, EXTWL, EXTLL, EXTQL */
1213
static void gen_ext_l(int ra, int rb, int rc, int islit,
1214
                      uint8_t lit, uint8_t byte_mask)
1215
{
1216
    if (unlikely(rc == 31))
1217
        return;
1218
    else if (unlikely(ra == 31))
1219
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1220
    else {
1221
        if (islit) {
1222
            tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
1223
        } else {
1224
            TCGv tmp = tcg_temp_new();
1225
            tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
1226
            tcg_gen_shli_i64(tmp, tmp, 3);
1227
            tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1228
            tcg_temp_free(tmp);
1229
        }
1230
        gen_zapnoti(cpu_ir[rc], cpu_ir[rc], byte_mask);
1231
    }
1232
}
1233

    
1234
/* INSWH, INSLH, INSQH */
1235
static void gen_ins_h(int ra, int rb, int rc, int islit,
1236
                      uint8_t lit, uint8_t byte_mask)
1237
{
1238
    if (unlikely(rc == 31))
1239
        return;
1240
    else if (unlikely(ra == 31) || (islit && (lit & 7) == 0))
1241
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1242
    else {
1243
        TCGv tmp = tcg_temp_new();
1244

    
1245
        /* The instruction description has us left-shift the byte mask
1246
           and extract bits <15:8> and apply that zap at the end.  This
1247
           is equivalent to simply performing the zap first and shifting
1248
           afterward.  */
1249
        gen_zapnoti (tmp, cpu_ir[ra], byte_mask);
1250

    
1251
        if (islit) {
1252
            /* Note that we have handled the lit==0 case above.  */
1253
            tcg_gen_shri_i64 (cpu_ir[rc], tmp, 64 - (lit & 7) * 8);
1254
        } else {
1255
            TCGv shift = tcg_temp_new();
1256

    
1257
            /* If (B & 7) == 0, we need to shift by 64 and leave a zero.
1258
               Do this portably by splitting the shift into two parts:
1259
               shift_count-1 and 1.  Arrange for the -1 by using
1260
               ones-complement instead of twos-complement in the negation:
1261
               ~((B & 7) * 8) & 63.  */
1262

    
1263
            tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
1264
            tcg_gen_shli_i64(shift, shift, 3);
1265
            tcg_gen_not_i64(shift, shift);
1266
            tcg_gen_andi_i64(shift, shift, 0x3f);
1267

    
1268
            tcg_gen_shr_i64(cpu_ir[rc], tmp, shift);
1269
            tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[rc], 1);
1270
            tcg_temp_free(shift);
1271
        }
1272
        tcg_temp_free(tmp);
1273
    }
1274
}
1275

    
1276
/* INSBL, INSWL, INSLL, INSQL */
1277
static void gen_ins_l(int ra, int rb, int rc, int islit,
1278
                      uint8_t lit, uint8_t byte_mask)
1279
{
1280
    if (unlikely(rc == 31))
1281
        return;
1282
    else if (unlikely(ra == 31))
1283
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1284
    else {
1285
        TCGv tmp = tcg_temp_new();
1286

    
1287
        /* The instruction description has us left-shift the byte mask
1288
           the same number of byte slots as the data and apply the zap
1289
           at the end.  This is equivalent to simply performing the zap
1290
           first and shifting afterward.  */
1291
        gen_zapnoti (tmp, cpu_ir[ra], byte_mask);
1292

    
1293
        if (islit) {
1294
            tcg_gen_shli_i64(cpu_ir[rc], tmp, (lit & 7) * 8);
1295
        } else {
1296
            TCGv shift = tcg_temp_new();
1297
            tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
1298
            tcg_gen_shli_i64(shift, shift, 3);
1299
            tcg_gen_shl_i64(cpu_ir[rc], tmp, shift);
1300
            tcg_temp_free(shift);
1301
        }
1302
        tcg_temp_free(tmp);
1303
    }
1304
}
1305

    
1306
/* MSKWH, MSKLH, MSKQH */
1307
static void gen_msk_h(int ra, int rb, int rc, int islit,
1308
                      uint8_t lit, uint8_t byte_mask)
1309
{
1310
    if (unlikely(rc == 31))
1311
        return;
1312
    else if (unlikely(ra == 31))
1313
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1314
    else if (islit) {
1315
        gen_zapnoti (cpu_ir[rc], cpu_ir[ra], ~((byte_mask << (lit & 7)) >> 8));
1316
    } else {
1317
        TCGv shift = tcg_temp_new();
1318
        TCGv mask = tcg_temp_new();
1319

    
1320
        /* The instruction description is as above, where the byte_mask
1321
           is shifted left, and then we extract bits <15:8>.  This can be
1322
           emulated with a right-shift on the expanded byte mask.  This
1323
           requires extra care because for an input <2:0> == 0 we need a
1324
           shift of 64 bits in order to generate a zero.  This is done by
1325
           splitting the shift into two parts, the variable shift - 1
1326
           followed by a constant 1 shift.  The code we expand below is
1327
           equivalent to ~((B & 7) * 8) & 63.  */
1328

    
1329
        tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
1330
        tcg_gen_shli_i64(shift, shift, 3);
1331
        tcg_gen_not_i64(shift, shift);
1332
        tcg_gen_andi_i64(shift, shift, 0x3f);
1333
        tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1334
        tcg_gen_shr_i64(mask, mask, shift);
1335
        tcg_gen_shri_i64(mask, mask, 1);
1336

    
1337
        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], mask);
1338

    
1339
        tcg_temp_free(mask);
1340
        tcg_temp_free(shift);
1341
    }
1342
}
1343

    
1344
/* MSKBL, MSKWL, MSKLL, MSKQL */
1345
static void gen_msk_l(int ra, int rb, int rc, int islit,
1346
                      uint8_t lit, uint8_t byte_mask)
1347
{
1348
    if (unlikely(rc == 31))
1349
        return;
1350
    else if (unlikely(ra == 31))
1351
        tcg_gen_movi_i64(cpu_ir[rc], 0);
1352
    else if (islit) {
1353
        gen_zapnoti (cpu_ir[rc], cpu_ir[ra], ~(byte_mask << (lit & 7)));
1354
    } else {
1355
        TCGv shift = tcg_temp_new();
1356
        TCGv mask = tcg_temp_new();
1357

    
1358
        tcg_gen_andi_i64(shift, cpu_ir[rb], 7);
1359
        tcg_gen_shli_i64(shift, shift, 3);
1360
        tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1361
        tcg_gen_shl_i64(mask, mask, shift);
1362

    
1363
        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], mask);
1364

    
1365
        tcg_temp_free(mask);
1366
        tcg_temp_free(shift);
1367
    }
1368
}
1369

    
1370
/* Code to call arith3 helpers */
1371
#define ARITH3(name)                                                  \
1372
static inline void glue(gen_, name)(int ra, int rb, int rc, int islit,\
1373
                                    uint8_t lit)                      \
1374
{                                                                     \
1375
    if (unlikely(rc == 31))                                           \
1376
        return;                                                       \
1377
                                                                      \
1378
    if (ra != 31) {                                                   \
1379
        if (islit) {                                                  \
1380
            TCGv tmp = tcg_const_i64(lit);                            \
1381
            gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp);         \
1382
            tcg_temp_free(tmp);                                       \
1383
        } else                                                        \
1384
            gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
1385
    } else {                                                          \
1386
        TCGv tmp1 = tcg_const_i64(0);                                 \
1387
        if (islit) {                                                  \
1388
            TCGv tmp2 = tcg_const_i64(lit);                           \
1389
            gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2);             \
1390
            tcg_temp_free(tmp2);                                      \
1391
        } else                                                        \
1392
            gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]);       \
1393
        tcg_temp_free(tmp1);                                          \
1394
    }                                                                 \
1395
}
1396
ARITH3(cmpbge)
1397
ARITH3(addlv)
1398
ARITH3(sublv)
1399
ARITH3(addqv)
1400
ARITH3(subqv)
1401
ARITH3(umulh)
1402
ARITH3(mullv)
1403
ARITH3(mulqv)
1404
ARITH3(minub8)
1405
ARITH3(minsb8)
1406
ARITH3(minuw4)
1407
ARITH3(minsw4)
1408
ARITH3(maxub8)
1409
ARITH3(maxsb8)
1410
ARITH3(maxuw4)
1411
ARITH3(maxsw4)
1412
ARITH3(perr)
1413

    
1414
#define MVIOP2(name)                                    \
1415
static inline void glue(gen_, name)(int rb, int rc)     \
1416
{                                                       \
1417
    if (unlikely(rc == 31))                             \
1418
        return;                                         \
1419
    if (unlikely(rb == 31))                             \
1420
        tcg_gen_movi_i64(cpu_ir[rc], 0);                \
1421
    else                                                \
1422
        gen_helper_ ## name (cpu_ir[rc], cpu_ir[rb]);   \
1423
}
1424
MVIOP2(pklb)
1425
MVIOP2(pkwb)
1426
MVIOP2(unpkbl)
1427
MVIOP2(unpkbw)
1428

    
1429
static void gen_cmp(TCGCond cond, int ra, int rb, int rc,
1430
                    int islit, uint8_t lit)
1431
{
1432
    TCGv va, vb;
1433

    
1434
    if (unlikely(rc == 31)) {
1435
        return;
1436
    }
1437

    
1438
    if (ra == 31) {
1439
        va = tcg_const_i64(0);
1440
    } else {
1441
        va = cpu_ir[ra];
1442
    }
1443
    if (islit) {
1444
        vb = tcg_const_i64(lit);
1445
    } else {
1446
        vb = cpu_ir[rb];
1447
    }
1448

    
1449
    tcg_gen_setcond_i64(cond, cpu_ir[rc], va, vb);
1450

    
1451
    if (ra == 31) {
1452
        tcg_temp_free(va);
1453
    }
1454
    if (islit) {
1455
        tcg_temp_free(vb);
1456
    }
1457
}
1458

    
1459
static void gen_rx(int ra, int set)
1460
{
1461
    TCGv_i32 tmp;
1462

    
1463
    if (ra != 31) {
1464
        tcg_gen_ld8u_i64(cpu_ir[ra], cpu_env, offsetof(CPUState, intr_flag));
1465
    }
1466

    
1467
    tmp = tcg_const_i32(set);
1468
    tcg_gen_st8_i32(tmp, cpu_env, offsetof(CPUState, intr_flag));
1469
    tcg_temp_free_i32(tmp);
1470
}
1471

    
1472
static ExitStatus gen_call_pal(DisasContext *ctx, int palcode)
1473
{
1474
    /* We're emulating OSF/1 PALcode.  Many of these are trivial access
1475
       to internal cpu registers.  */
1476

    
1477
    /* Unprivileged PAL call */
1478
    if (palcode >= 0x80 && palcode < 0xC0) {
1479
        switch (palcode) {
1480
        case 0x86:
1481
            /* IMB */
1482
            /* No-op inside QEMU.  */
1483
            break;
1484
        case 0x9E:
1485
            /* RDUNIQUE */
1486
            tcg_gen_mov_i64(cpu_ir[IR_V0], cpu_unique);
1487
            break;
1488
        case 0x9F:
1489
            /* WRUNIQUE */
1490
            tcg_gen_mov_i64(cpu_unique, cpu_ir[IR_A0]);
1491
            break;
1492
        default:
1493
            return gen_excp(ctx, EXCP_CALL_PAL, palcode & 0xbf);
1494
        }
1495
        return NO_EXIT;
1496
    }
1497

    
1498
#ifndef CONFIG_USER_ONLY
1499
    /* Privileged PAL code */
1500
    if (palcode < 0x40 && (ctx->tb->flags & TB_FLAGS_USER_MODE) == 0) {
1501
        switch (palcode) {
1502
        case 0x01:
1503
            /* CFLUSH */
1504
            /* No-op inside QEMU.  */
1505
            break;
1506
        case 0x02:
1507
            /* DRAINA */
1508
            /* No-op inside QEMU.  */
1509
            break;
1510
        case 0x2D:
1511
            /* WRVPTPTR */
1512
            tcg_gen_st_i64(cpu_ir[IR_A0], cpu_env, offsetof(CPUState, vptptr));
1513
            break;
1514
        case 0x31:
1515
            /* WRVAL */
1516
            tcg_gen_mov_i64(cpu_sysval, cpu_ir[IR_A0]);
1517
            break;
1518
        case 0x32:
1519
            /* RDVAL */
1520
            tcg_gen_mov_i64(cpu_ir[IR_V0], cpu_sysval);
1521
            break;
1522

    
1523
        case 0x35: {
1524
            /* SWPIPL */
1525
            TCGv tmp;
1526

    
1527
            /* Note that we already know we're in kernel mode, so we know
1528
               that PS only contains the 3 IPL bits.  */
1529
            tcg_gen_ld8u_i64(cpu_ir[IR_V0], cpu_env, offsetof(CPUState, ps));
1530

    
1531
            /* But make sure and store only the 3 IPL bits from the user.  */
1532
            tmp = tcg_temp_new();
1533
            tcg_gen_andi_i64(tmp, cpu_ir[IR_A0], PS_INT_MASK);
1534
            tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUState, ps));
1535
            tcg_temp_free(tmp);
1536
            break;
1537
        }
1538

    
1539
        case 0x36:
1540
            /* RDPS */
1541
            tcg_gen_ld8u_i64(cpu_ir[IR_V0], cpu_env, offsetof(CPUState, ps));
1542
            break;
1543
        case 0x38:
1544
            /* WRUSP */
1545
            tcg_gen_mov_i64(cpu_usp, cpu_ir[IR_A0]);
1546
            break;
1547
        case 0x3A:
1548
            /* RDUSP */
1549
            tcg_gen_mov_i64(cpu_ir[IR_V0], cpu_usp);
1550
            break;
1551
        case 0x3C:
1552
            /* WHAMI */
1553
            tcg_gen_ld32s_i64(cpu_ir[IR_V0], cpu_env,
1554
                              offsetof(CPUState, cpu_index));
1555
            break;
1556

    
1557
        default:
1558
            return gen_excp(ctx, EXCP_CALL_PAL, palcode & 0x3f);
1559
        }
1560
        return NO_EXIT;
1561
    }
1562
#endif
1563

    
1564
    return gen_invalid(ctx);
1565
}
1566

    
1567
#ifndef CONFIG_USER_ONLY
1568

    
1569
#define PR_BYTE         0x100000
1570
#define PR_LONG         0x200000
1571

    
1572
static int cpu_pr_data(int pr)
1573
{
1574
    switch (pr) {
1575
    case  0: return offsetof(CPUAlphaState, ps) | PR_BYTE;
1576
    case  1: return offsetof(CPUAlphaState, fen) | PR_BYTE;
1577
    case  2: return offsetof(CPUAlphaState, pcc_ofs) | PR_LONG;
1578
    case  3: return offsetof(CPUAlphaState, trap_arg0);
1579
    case  4: return offsetof(CPUAlphaState, trap_arg1);
1580
    case  5: return offsetof(CPUAlphaState, trap_arg2);
1581
    case  6: return offsetof(CPUAlphaState, exc_addr);
1582
    case  7: return offsetof(CPUAlphaState, palbr);
1583
    case  8: return offsetof(CPUAlphaState, ptbr);
1584
    case  9: return offsetof(CPUAlphaState, vptptr);
1585
    case 10: return offsetof(CPUAlphaState, unique);
1586
    case 11: return offsetof(CPUAlphaState, sysval);
1587
    case 12: return offsetof(CPUAlphaState, usp);
1588

    
1589
    case 32 ... 39:
1590
        return offsetof(CPUAlphaState, shadow[pr - 32]);
1591
    case 40 ... 63:
1592
        return offsetof(CPUAlphaState, scratch[pr - 40]);
1593
    }
1594
    return 0;
1595
}
1596

    
1597
static void gen_mfpr(int ra, int regno)
1598
{
1599
    int data = cpu_pr_data(regno);
1600

    
1601
    /* In our emulated PALcode, these processor registers have no
1602
       side effects from reading.  */
1603
    if (ra == 31) {
1604
        return;
1605
    }
1606

    
1607
    /* The basic registers are data only, and unknown registers
1608
       are read-zero, write-ignore.  */
1609
    if (data == 0) {
1610
        tcg_gen_movi_i64(cpu_ir[ra], 0);
1611
    } else if (data & PR_BYTE) {
1612
        tcg_gen_ld8u_i64(cpu_ir[ra], cpu_env, data & ~PR_BYTE);
1613
    } else if (data & PR_LONG) {
1614
        tcg_gen_ld32s_i64(cpu_ir[ra], cpu_env, data & ~PR_LONG);
1615
    } else {
1616
        tcg_gen_ld_i64(cpu_ir[ra], cpu_env, data);
1617
    }
1618
}
1619

    
1620
static ExitStatus gen_mtpr(DisasContext *ctx, int rb, int regno)
1621
{
1622
    TCGv tmp;
1623
    int data;
1624

    
1625
    if (rb == 31) {
1626
        tmp = tcg_const_i64(0);
1627
    } else {
1628
        tmp = cpu_ir[rb];
1629
    }
1630

    
1631
    switch (regno) {
1632
    case 255:
1633
        /* TBIA */
1634
        gen_helper_tbia();
1635
        break;
1636

    
1637
    case 254:
1638
        /* TBIS */
1639
        gen_helper_tbis(tmp);
1640
        break;
1641

    
1642
    case 253:
1643
        /* WAIT */
1644
        tmp = tcg_const_i64(1);
1645
        tcg_gen_st32_i64(tmp, cpu_env, offsetof(CPUState, halted));
1646
        return gen_excp(ctx, EXCP_HLT, 0);
1647

    
1648
    case 252:
1649
        /* HALT */
1650
        gen_helper_halt(tmp);
1651
        return EXIT_PC_STALE;
1652

    
1653
    default:
1654
        /* The basic registers are data only, and unknown registers
1655
           are read-zero, write-ignore.  */
1656
        data = cpu_pr_data(regno);
1657
        if (data != 0) {
1658
            if (data & PR_BYTE) {
1659
                tcg_gen_st8_i64(tmp, cpu_env, data & ~PR_BYTE);
1660
            } else if (data & PR_LONG) {
1661
                tcg_gen_st32_i64(tmp, cpu_env, data & ~PR_LONG);
1662
            } else {
1663
                tcg_gen_st_i64(tmp, cpu_env, data);
1664
            }
1665
        }
1666
        break;
1667
    }
1668

    
1669
    if (rb == 31) {
1670
        tcg_temp_free(tmp);
1671
    }
1672

    
1673
    return NO_EXIT;
1674
}
1675
#endif /* !USER_ONLY*/
1676

    
1677
static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
1678
{
1679
    uint32_t palcode;
1680
    int32_t disp21, disp16;
1681
#ifndef CONFIG_USER_ONLY
1682
    int32_t disp12;
1683
#endif
1684
    uint16_t fn11;
1685
    uint8_t opc, ra, rb, rc, fpfn, fn7, islit, real_islit;
1686
    uint8_t lit;
1687
    ExitStatus ret;
1688

    
1689
    /* Decode all instruction fields */
1690
    opc = insn >> 26;
1691
    ra = (insn >> 21) & 0x1F;
1692
    rb = (insn >> 16) & 0x1F;
1693
    rc = insn & 0x1F;
1694
    real_islit = islit = (insn >> 12) & 1;
1695
    if (rb == 31 && !islit) {
1696
        islit = 1;
1697
        lit = 0;
1698
    } else
1699
        lit = (insn >> 13) & 0xFF;
1700
    palcode = insn & 0x03FFFFFF;
1701
    disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
1702
    disp16 = (int16_t)(insn & 0x0000FFFF);
1703
#ifndef CONFIG_USER_ONLY
1704
    disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
1705
#endif
1706
    fn11 = (insn >> 5) & 0x000007FF;
1707
    fpfn = fn11 & 0x3F;
1708
    fn7 = (insn >> 5) & 0x0000007F;
1709
    LOG_DISAS("opc %02x ra %2d rb %2d rc %2d disp16 %6d\n",
1710
              opc, ra, rb, rc, disp16);
1711

    
1712
    ret = NO_EXIT;
1713
    switch (opc) {
1714
    case 0x00:
1715
        /* CALL_PAL */
1716
        ret = gen_call_pal(ctx, palcode);
1717
        break;
1718
    case 0x01:
1719
        /* OPC01 */
1720
        goto invalid_opc;
1721
    case 0x02:
1722
        /* OPC02 */
1723
        goto invalid_opc;
1724
    case 0x03:
1725
        /* OPC03 */
1726
        goto invalid_opc;
1727
    case 0x04:
1728
        /* OPC04 */
1729
        goto invalid_opc;
1730
    case 0x05:
1731
        /* OPC05 */
1732
        goto invalid_opc;
1733
    case 0x06:
1734
        /* OPC06 */
1735
        goto invalid_opc;
1736
    case 0x07:
1737
        /* OPC07 */
1738
        goto invalid_opc;
1739
    case 0x08:
1740
        /* LDA */
1741
        if (likely(ra != 31)) {
1742
            if (rb != 31)
1743
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
1744
            else
1745
                tcg_gen_movi_i64(cpu_ir[ra], disp16);
1746
        }
1747
        break;
1748
    case 0x09:
1749
        /* LDAH */
1750
        if (likely(ra != 31)) {
1751
            if (rb != 31)
1752
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
1753
            else
1754
                tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
1755
        }
1756
        break;
1757
    case 0x0A:
1758
        /* LDBU */
1759
        if (ctx->tb->flags & TB_FLAGS_AMASK_BWX) {
1760
            gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
1761
            break;
1762
        }
1763
        goto invalid_opc;
1764
    case 0x0B:
1765
        /* LDQ_U */
1766
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
1767
        break;
1768
    case 0x0C:
1769
        /* LDWU */
1770
        if (ctx->tb->flags & TB_FLAGS_AMASK_BWX) {
1771
            gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
1772
            break;
1773
        }
1774
        goto invalid_opc;
1775
    case 0x0D:
1776
        /* STW */
1777
        gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
1778
        break;
1779
    case 0x0E:
1780
        /* STB */
1781
        gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
1782
        break;
1783
    case 0x0F:
1784
        /* STQ_U */
1785
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
1786
        break;
1787
    case 0x10:
1788
        switch (fn7) {
1789
        case 0x00:
1790
            /* ADDL */
1791
            if (likely(rc != 31)) {
1792
                if (ra != 31) {
1793
                    if (islit) {
1794
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1795
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1796
                    } else {
1797
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1798
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1799
                    }
1800
                } else {
1801
                    if (islit)
1802
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1803
                    else
1804
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
1805
                }
1806
            }
1807
            break;
1808
        case 0x02:
1809
            /* S4ADDL */
1810
            if (likely(rc != 31)) {
1811
                if (ra != 31) {
1812
                    TCGv tmp = tcg_temp_new();
1813
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1814
                    if (islit)
1815
                        tcg_gen_addi_i64(tmp, tmp, lit);
1816
                    else
1817
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
1818
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1819
                    tcg_temp_free(tmp);
1820
                } else {
1821
                    if (islit)
1822
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1823
                    else
1824
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
1825
                }
1826
            }
1827
            break;
1828
        case 0x09:
1829
            /* SUBL */
1830
            if (likely(rc != 31)) {
1831
                if (ra != 31) {
1832
                    if (islit)
1833
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1834
                    else
1835
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1836
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1837
                } else {
1838
                    if (islit)
1839
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1840
                    else {
1841
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1842
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1843
                }
1844
            }
1845
            break;
1846
        case 0x0B:
1847
            /* S4SUBL */
1848
            if (likely(rc != 31)) {
1849
                if (ra != 31) {
1850
                    TCGv tmp = tcg_temp_new();
1851
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1852
                    if (islit)
1853
                        tcg_gen_subi_i64(tmp, tmp, lit);
1854
                    else
1855
                        tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
1856
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1857
                    tcg_temp_free(tmp);
1858
                } else {
1859
                    if (islit)
1860
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1861
                    else {
1862
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1863
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1864
                    }
1865
                }
1866
            }
1867
            break;
1868
        case 0x0F:
1869
            /* CMPBGE */
1870
            gen_cmpbge(ra, rb, rc, islit, lit);
1871
            break;
1872
        case 0x12:
1873
            /* S8ADDL */
1874
            if (likely(rc != 31)) {
1875
                if (ra != 31) {
1876
                    TCGv tmp = tcg_temp_new();
1877
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1878
                    if (islit)
1879
                        tcg_gen_addi_i64(tmp, tmp, lit);
1880
                    else
1881
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
1882
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1883
                    tcg_temp_free(tmp);
1884
                } else {
1885
                    if (islit)
1886
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1887
                    else
1888
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
1889
                }
1890
            }
1891
            break;
1892
        case 0x1B:
1893
            /* S8SUBL */
1894
            if (likely(rc != 31)) {
1895
                if (ra != 31) {
1896
                    TCGv tmp = tcg_temp_new();
1897
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1898
                    if (islit)
1899
                        tcg_gen_subi_i64(tmp, tmp, lit);
1900
                    else
1901
                       tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
1902
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
1903
                    tcg_temp_free(tmp);
1904
                } else {
1905
                    if (islit)
1906
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1907
                    else
1908
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1909
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1910
                    }
1911
                }
1912
            }
1913
            break;
1914
        case 0x1D:
1915
            /* CMPULT */
1916
            gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
1917
            break;
1918
        case 0x20:
1919
            /* ADDQ */
1920
            if (likely(rc != 31)) {
1921
                if (ra != 31) {
1922
                    if (islit)
1923
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1924
                    else
1925
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1926
                } else {
1927
                    if (islit)
1928
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1929
                    else
1930
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1931
                }
1932
            }
1933
            break;
1934
        case 0x22:
1935
            /* S4ADDQ */
1936
            if (likely(rc != 31)) {
1937
                if (ra != 31) {
1938
                    TCGv tmp = tcg_temp_new();
1939
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1940
                    if (islit)
1941
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
1942
                    else
1943
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1944
                    tcg_temp_free(tmp);
1945
                } else {
1946
                    if (islit)
1947
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1948
                    else
1949
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1950
                }
1951
            }
1952
            break;
1953
        case 0x29:
1954
            /* SUBQ */
1955
            if (likely(rc != 31)) {
1956
                if (ra != 31) {
1957
                    if (islit)
1958
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1959
                    else
1960
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1961
                } else {
1962
                    if (islit)
1963
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1964
                    else
1965
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1966
                }
1967
            }
1968
            break;
1969
        case 0x2B:
1970
            /* S4SUBQ */
1971
            if (likely(rc != 31)) {
1972
                if (ra != 31) {
1973
                    TCGv tmp = tcg_temp_new();
1974
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
1975
                    if (islit)
1976
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1977
                    else
1978
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1979
                    tcg_temp_free(tmp);
1980
                } else {
1981
                    if (islit)
1982
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1983
                    else
1984
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1985
                }
1986
            }
1987
            break;
1988
        case 0x2D:
1989
            /* CMPEQ */
1990
            gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
1991
            break;
1992
        case 0x32:
1993
            /* S8ADDQ */
1994
            if (likely(rc != 31)) {
1995
                if (ra != 31) {
1996
                    TCGv tmp = tcg_temp_new();
1997
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1998
                    if (islit)
1999
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
2000
                    else
2001
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
2002
                    tcg_temp_free(tmp);
2003
                } else {
2004
                    if (islit)
2005
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
2006
                    else
2007
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
2008
                }
2009
            }
2010
            break;
2011
        case 0x3B:
2012
            /* S8SUBQ */
2013
            if (likely(rc != 31)) {
2014
                if (ra != 31) {
2015
                    TCGv tmp = tcg_temp_new();
2016
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
2017
                    if (islit)
2018
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
2019
                    else
2020
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
2021
                    tcg_temp_free(tmp);
2022
                } else {
2023
                    if (islit)
2024
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
2025
                    else
2026
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
2027
                }
2028
            }
2029
            break;
2030
        case 0x3D:
2031
            /* CMPULE */
2032
            gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
2033
            break;
2034
        case 0x40:
2035
            /* ADDL/V */
2036
            gen_addlv(ra, rb, rc, islit, lit);
2037
            break;
2038
        case 0x49:
2039
            /* SUBL/V */
2040
            gen_sublv(ra, rb, rc, islit, lit);
2041
            break;
2042
        case 0x4D:
2043
            /* CMPLT */
2044
            gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
2045
            break;
2046
        case 0x60:
2047
            /* ADDQ/V */
2048
            gen_addqv(ra, rb, rc, islit, lit);
2049
            break;
2050
        case 0x69:
2051
            /* SUBQ/V */
2052
            gen_subqv(ra, rb, rc, islit, lit);
2053
            break;
2054
        case 0x6D:
2055
            /* CMPLE */
2056
            gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
2057
            break;
2058
        default:
2059
            goto invalid_opc;
2060
        }
2061
        break;
2062
    case 0x11:
2063
        switch (fn7) {
2064
        case 0x00:
2065
            /* AND */
2066
            if (likely(rc != 31)) {
2067
                if (ra == 31)
2068
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2069
                else if (islit)
2070
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
2071
                else
2072
                    tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
2073
            }
2074
            break;
2075
        case 0x08:
2076
            /* BIC */
2077
            if (likely(rc != 31)) {
2078
                if (ra != 31) {
2079
                    if (islit)
2080
                        tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
2081
                    else
2082
                        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
2083
                } else
2084
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2085
            }
2086
            break;
2087
        case 0x14:
2088
            /* CMOVLBS */
2089
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
2090
            break;
2091
        case 0x16:
2092
            /* CMOVLBC */
2093
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
2094
            break;
2095
        case 0x20:
2096
            /* BIS */
2097
            if (likely(rc != 31)) {
2098
                if (ra != 31) {
2099
                    if (islit)
2100
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
2101
                    else
2102
                        tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
2103
                } else {
2104
                    if (islit)
2105
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
2106
                    else
2107
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
2108
                }
2109
            }
2110
            break;
2111
        case 0x24:
2112
            /* CMOVEQ */
2113
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
2114
            break;
2115
        case 0x26:
2116
            /* CMOVNE */
2117
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
2118
            break;
2119
        case 0x28:
2120
            /* ORNOT */
2121
            if (likely(rc != 31)) {
2122
                if (ra != 31) {
2123
                    if (islit)
2124
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
2125
                    else
2126
                        tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
2127
                } else {
2128
                    if (islit)
2129
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
2130
                    else
2131
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
2132
                }
2133
            }
2134
            break;
2135
        case 0x40:
2136
            /* XOR */
2137
            if (likely(rc != 31)) {
2138
                if (ra != 31) {
2139
                    if (islit)
2140
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
2141
                    else
2142
                        tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
2143
                } else {
2144
                    if (islit)
2145
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
2146
                    else
2147
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
2148
                }
2149
            }
2150
            break;
2151
        case 0x44:
2152
            /* CMOVLT */
2153
            gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
2154
            break;
2155
        case 0x46:
2156
            /* CMOVGE */
2157
            gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
2158
            break;
2159
        case 0x48:
2160
            /* EQV */
2161
            if (likely(rc != 31)) {
2162
                if (ra != 31) {
2163
                    if (islit)
2164
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
2165
                    else
2166
                        tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
2167
                } else {
2168
                    if (islit)
2169
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
2170
                    else
2171
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
2172
                }
2173
            }
2174
            break;
2175
        case 0x61:
2176
            /* AMASK */
2177
            if (likely(rc != 31)) {
2178
                uint64_t amask = ctx->tb->flags >> TB_FLAGS_AMASK_SHIFT;
2179

    
2180
                if (islit) {
2181
                    tcg_gen_movi_i64(cpu_ir[rc], lit & ~amask);
2182
                } else {
2183
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[rb], ~amask);
2184
                }
2185
            }
2186
            break;
2187
        case 0x64:
2188
            /* CMOVLE */
2189
            gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
2190
            break;
2191
        case 0x66:
2192
            /* CMOVGT */
2193
            gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
2194
            break;
2195
        case 0x6C:
2196
            /* IMPLVER */
2197
            if (rc != 31)
2198
                tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
2199
            break;
2200
        default:
2201
            goto invalid_opc;
2202
        }
2203
        break;
2204
    case 0x12:
2205
        switch (fn7) {
2206
        case 0x02:
2207
            /* MSKBL */
2208
            gen_msk_l(ra, rb, rc, islit, lit, 0x01);
2209
            break;
2210
        case 0x06:
2211
            /* EXTBL */
2212
            gen_ext_l(ra, rb, rc, islit, lit, 0x01);
2213
            break;
2214
        case 0x0B:
2215
            /* INSBL */
2216
            gen_ins_l(ra, rb, rc, islit, lit, 0x01);
2217
            break;
2218
        case 0x12:
2219
            /* MSKWL */
2220
            gen_msk_l(ra, rb, rc, islit, lit, 0x03);
2221
            break;
2222
        case 0x16:
2223
            /* EXTWL */
2224
            gen_ext_l(ra, rb, rc, islit, lit, 0x03);
2225
            break;
2226
        case 0x1B:
2227
            /* INSWL */
2228
            gen_ins_l(ra, rb, rc, islit, lit, 0x03);
2229
            break;
2230
        case 0x22:
2231
            /* MSKLL */
2232
            gen_msk_l(ra, rb, rc, islit, lit, 0x0f);
2233
            break;
2234
        case 0x26:
2235
            /* EXTLL */
2236
            gen_ext_l(ra, rb, rc, islit, lit, 0x0f);
2237
            break;
2238
        case 0x2B:
2239
            /* INSLL */
2240
            gen_ins_l(ra, rb, rc, islit, lit, 0x0f);
2241
            break;
2242
        case 0x30:
2243
            /* ZAP */
2244
            gen_zap(ra, rb, rc, islit, lit);
2245
            break;
2246
        case 0x31:
2247
            /* ZAPNOT */
2248
            gen_zapnot(ra, rb, rc, islit, lit);
2249
            break;
2250
        case 0x32:
2251
            /* MSKQL */
2252
            gen_msk_l(ra, rb, rc, islit, lit, 0xff);
2253
            break;
2254
        case 0x34:
2255
            /* SRL */
2256
            if (likely(rc != 31)) {
2257
                if (ra != 31) {
2258
                    if (islit)
2259
                        tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
2260
                    else {
2261
                        TCGv shift = tcg_temp_new();
2262
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
2263
                        tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
2264
                        tcg_temp_free(shift);
2265
                    }
2266
                } else
2267
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2268
            }
2269
            break;
2270
        case 0x36:
2271
            /* EXTQL */
2272
            gen_ext_l(ra, rb, rc, islit, lit, 0xff);
2273
            break;
2274
        case 0x39:
2275
            /* SLL */
2276
            if (likely(rc != 31)) {
2277
                if (ra != 31) {
2278
                    if (islit)
2279
                        tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
2280
                    else {
2281
                        TCGv shift = tcg_temp_new();
2282
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
2283
                        tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
2284
                        tcg_temp_free(shift);
2285
                    }
2286
                } else
2287
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2288
            }
2289
            break;
2290
        case 0x3B:
2291
            /* INSQL */
2292
            gen_ins_l(ra, rb, rc, islit, lit, 0xff);
2293
            break;
2294
        case 0x3C:
2295
            /* SRA */
2296
            if (likely(rc != 31)) {
2297
                if (ra != 31) {
2298
                    if (islit)
2299
                        tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
2300
                    else {
2301
                        TCGv shift = tcg_temp_new();
2302
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
2303
                        tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
2304
                        tcg_temp_free(shift);
2305
                    }
2306
                } else
2307
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2308
            }
2309
            break;
2310
        case 0x52:
2311
            /* MSKWH */
2312
            gen_msk_h(ra, rb, rc, islit, lit, 0x03);
2313
            break;
2314
        case 0x57:
2315
            /* INSWH */
2316
            gen_ins_h(ra, rb, rc, islit, lit, 0x03);
2317
            break;
2318
        case 0x5A:
2319
            /* EXTWH */
2320
            gen_ext_h(ra, rb, rc, islit, lit, 0x03);
2321
            break;
2322
        case 0x62:
2323
            /* MSKLH */
2324
            gen_msk_h(ra, rb, rc, islit, lit, 0x0f);
2325
            break;
2326
        case 0x67:
2327
            /* INSLH */
2328
            gen_ins_h(ra, rb, rc, islit, lit, 0x0f);
2329
            break;
2330
        case 0x6A:
2331
            /* EXTLH */
2332
            gen_ext_h(ra, rb, rc, islit, lit, 0x0f);
2333
            break;
2334
        case 0x72:
2335
            /* MSKQH */
2336
            gen_msk_h(ra, rb, rc, islit, lit, 0xff);
2337
            break;
2338
        case 0x77:
2339
            /* INSQH */
2340
            gen_ins_h(ra, rb, rc, islit, lit, 0xff);
2341
            break;
2342
        case 0x7A:
2343
            /* EXTQH */
2344
            gen_ext_h(ra, rb, rc, islit, lit, 0xff);
2345
            break;
2346
        default:
2347
            goto invalid_opc;
2348
        }
2349
        break;
2350
    case 0x13:
2351
        switch (fn7) {
2352
        case 0x00:
2353
            /* MULL */
2354
            if (likely(rc != 31)) {
2355
                if (ra == 31)
2356
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2357
                else {
2358
                    if (islit)
2359
                        tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
2360
                    else
2361
                        tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
2362
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
2363
                }
2364
            }
2365
            break;
2366
        case 0x20:
2367
            /* MULQ */
2368
            if (likely(rc != 31)) {
2369
                if (ra == 31)
2370
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2371
                else if (islit)
2372
                    tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
2373
                else
2374
                    tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
2375
            }
2376
            break;
2377
        case 0x30:
2378
            /* UMULH */
2379
            gen_umulh(ra, rb, rc, islit, lit);
2380
            break;
2381
        case 0x40:
2382
            /* MULL/V */
2383
            gen_mullv(ra, rb, rc, islit, lit);
2384
            break;
2385
        case 0x60:
2386
            /* MULQ/V */
2387
            gen_mulqv(ra, rb, rc, islit, lit);
2388
            break;
2389
        default:
2390
            goto invalid_opc;
2391
        }
2392
        break;
2393
    case 0x14:
2394
        switch (fpfn) { /* fn11 & 0x3F */
2395
        case 0x04:
2396
            /* ITOFS */
2397
            if ((ctx->tb->flags & TB_FLAGS_AMASK_FIX) == 0) {
2398
                goto invalid_opc;
2399
            }
2400
            if (likely(rc != 31)) {
2401
                if (ra != 31) {
2402
                    TCGv_i32 tmp = tcg_temp_new_i32();
2403
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
2404
                    gen_helper_memory_to_s(cpu_fir[rc], tmp);
2405
                    tcg_temp_free_i32(tmp);
2406
                } else
2407
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
2408
            }
2409
            break;
2410
        case 0x0A:
2411
            /* SQRTF */
2412
            if (ctx->tb->flags & TB_FLAGS_AMASK_FIX) {
2413
                gen_fsqrtf(rb, rc);
2414
                break;
2415
            }
2416
            goto invalid_opc;
2417
        case 0x0B:
2418
            /* SQRTS */
2419
            if (ctx->tb->flags & TB_FLAGS_AMASK_FIX) {
2420
                gen_fsqrts(ctx, rb, rc, fn11);
2421
                break;
2422
            }
2423
            goto invalid_opc;
2424
        case 0x14:
2425
            /* ITOFF */
2426
            if ((ctx->tb->flags & TB_FLAGS_AMASK_FIX) == 0) {
2427
                goto invalid_opc;
2428
            }
2429
            if (likely(rc != 31)) {
2430
                if (ra != 31) {
2431
                    TCGv_i32 tmp = tcg_temp_new_i32();
2432
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
2433
                    gen_helper_memory_to_f(cpu_fir[rc], tmp);
2434
                    tcg_temp_free_i32(tmp);
2435
                } else
2436
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
2437
            }
2438
            break;
2439
        case 0x24:
2440
            /* ITOFT */
2441
            if ((ctx->tb->flags & TB_FLAGS_AMASK_FIX) == 0) {
2442
                goto invalid_opc;
2443
            }
2444
            if (likely(rc != 31)) {
2445
                if (ra != 31)
2446
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
2447
                else
2448
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
2449
            }
2450
            break;
2451
        case 0x2A:
2452
            /* SQRTG */
2453
            if (ctx->tb->flags & TB_FLAGS_AMASK_FIX) {
2454
                gen_fsqrtg(rb, rc);
2455
                break;
2456
            }
2457
            goto invalid_opc;
2458
        case 0x02B:
2459
            /* SQRTT */
2460
            if (ctx->tb->flags & TB_FLAGS_AMASK_FIX) {
2461
                gen_fsqrtt(ctx, rb, rc, fn11);
2462
                break;
2463
            }
2464
            goto invalid_opc;
2465
        default:
2466
            goto invalid_opc;
2467
        }
2468
        break;
2469
    case 0x15:
2470
        /* VAX floating point */
2471
        /* XXX: rounding mode and trap are ignored (!) */
2472
        switch (fpfn) { /* fn11 & 0x3F */
2473
        case 0x00:
2474
            /* ADDF */
2475
            gen_faddf(ra, rb, rc);
2476
            break;
2477
        case 0x01:
2478
            /* SUBF */
2479
            gen_fsubf(ra, rb, rc);
2480
            break;
2481
        case 0x02:
2482
            /* MULF */
2483
            gen_fmulf(ra, rb, rc);
2484
            break;
2485
        case 0x03:
2486
            /* DIVF */
2487
            gen_fdivf(ra, rb, rc);
2488
            break;
2489
        case 0x1E:
2490
            /* CVTDG */
2491
#if 0 // TODO
2492
            gen_fcvtdg(rb, rc);
2493
#else
2494
            goto invalid_opc;
2495
#endif
2496
            break;
2497
        case 0x20:
2498
            /* ADDG */
2499
            gen_faddg(ra, rb, rc);
2500
            break;
2501
        case 0x21:
2502
            /* SUBG */
2503
            gen_fsubg(ra, rb, rc);
2504
            break;
2505
        case 0x22:
2506
            /* MULG */
2507
            gen_fmulg(ra, rb, rc);
2508
            break;
2509
        case 0x23:
2510
            /* DIVG */
2511
            gen_fdivg(ra, rb, rc);
2512
            break;
2513
        case 0x25:
2514
            /* CMPGEQ */
2515
            gen_fcmpgeq(ra, rb, rc);
2516
            break;
2517
        case 0x26:
2518
            /* CMPGLT */
2519
            gen_fcmpglt(ra, rb, rc);
2520
            break;
2521
        case 0x27:
2522
            /* CMPGLE */
2523
            gen_fcmpgle(ra, rb, rc);
2524
            break;
2525
        case 0x2C:
2526
            /* CVTGF */
2527
            gen_fcvtgf(rb, rc);
2528
            break;
2529
        case 0x2D:
2530
            /* CVTGD */
2531
#if 0 // TODO
2532
            gen_fcvtgd(rb, rc);
2533
#else
2534
            goto invalid_opc;
2535
#endif
2536
            break;
2537
        case 0x2F:
2538
            /* CVTGQ */
2539
            gen_fcvtgq(rb, rc);
2540
            break;
2541
        case 0x3C:
2542
            /* CVTQF */
2543
            gen_fcvtqf(rb, rc);
2544
            break;
2545
        case 0x3E:
2546
            /* CVTQG */
2547
            gen_fcvtqg(rb, rc);
2548
            break;
2549
        default:
2550
            goto invalid_opc;
2551
        }
2552
        break;
2553
    case 0x16:
2554
        /* IEEE floating-point */
2555
        switch (fpfn) { /* fn11 & 0x3F */
2556
        case 0x00:
2557
            /* ADDS */
2558
            gen_fadds(ctx, ra, rb, rc, fn11);
2559
            break;
2560
        case 0x01:
2561
            /* SUBS */
2562
            gen_fsubs(ctx, ra, rb, rc, fn11);
2563
            break;
2564
        case 0x02:
2565
            /* MULS */
2566
            gen_fmuls(ctx, ra, rb, rc, fn11);
2567
            break;
2568
        case 0x03:
2569
            /* DIVS */
2570
            gen_fdivs(ctx, ra, rb, rc, fn11);
2571
            break;
2572
        case 0x20:
2573
            /* ADDT */
2574
            gen_faddt(ctx, ra, rb, rc, fn11);
2575
            break;
2576
        case 0x21:
2577
            /* SUBT */
2578
            gen_fsubt(ctx, ra, rb, rc, fn11);
2579
            break;
2580
        case 0x22:
2581
            /* MULT */
2582
            gen_fmult(ctx, ra, rb, rc, fn11);
2583
            break;
2584
        case 0x23:
2585
            /* DIVT */
2586
            gen_fdivt(ctx, ra, rb, rc, fn11);
2587
            break;
2588
        case 0x24:
2589
            /* CMPTUN */
2590
            gen_fcmptun(ctx, ra, rb, rc, fn11);
2591
            break;
2592
        case 0x25:
2593
            /* CMPTEQ */
2594
            gen_fcmpteq(ctx, ra, rb, rc, fn11);
2595
            break;
2596
        case 0x26:
2597
            /* CMPTLT */
2598
            gen_fcmptlt(ctx, ra, rb, rc, fn11);
2599
            break;
2600
        case 0x27:
2601
            /* CMPTLE */
2602
            gen_fcmptle(ctx, ra, rb, rc, fn11);
2603
            break;
2604
        case 0x2C:
2605
            if (fn11 == 0x2AC || fn11 == 0x6AC) {
2606
                /* CVTST */
2607
                gen_fcvtst(ctx, rb, rc, fn11);
2608
            } else {
2609
                /* CVTTS */
2610
                gen_fcvtts(ctx, rb, rc, fn11);
2611
            }
2612
            break;
2613
        case 0x2F:
2614
            /* CVTTQ */
2615
            gen_fcvttq(ctx, rb, rc, fn11);
2616
            break;
2617
        case 0x3C:
2618
            /* CVTQS */
2619
            gen_fcvtqs(ctx, rb, rc, fn11);
2620
            break;
2621
        case 0x3E:
2622
            /* CVTQT */
2623
            gen_fcvtqt(ctx, rb, rc, fn11);
2624
            break;
2625
        default:
2626
            goto invalid_opc;
2627
        }
2628
        break;
2629
    case 0x17:
2630
        switch (fn11) {
2631
        case 0x010:
2632
            /* CVTLQ */
2633
            gen_fcvtlq(rb, rc);
2634
            break;
2635
        case 0x020:
2636
            if (likely(rc != 31)) {
2637
                if (ra == rb) {
2638
                    /* FMOV */
2639
                    if (ra == 31)
2640
                        tcg_gen_movi_i64(cpu_fir[rc], 0);
2641
                    else
2642
                        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
2643
                } else {
2644
                    /* CPYS */
2645
                    gen_fcpys(ra, rb, rc);
2646
                }
2647
            }
2648
            break;
2649
        case 0x021:
2650
            /* CPYSN */
2651
            gen_fcpysn(ra, rb, rc);
2652
            break;
2653
        case 0x022:
2654
            /* CPYSE */
2655
            gen_fcpyse(ra, rb, rc);
2656
            break;
2657
        case 0x024:
2658
            /* MT_FPCR */
2659
            if (likely(ra != 31))
2660
                gen_helper_store_fpcr(cpu_fir[ra]);
2661
            else {
2662
                TCGv tmp = tcg_const_i64(0);
2663
                gen_helper_store_fpcr(tmp);
2664
                tcg_temp_free(tmp);
2665
            }
2666
            break;
2667
        case 0x025:
2668
            /* MF_FPCR */
2669
            if (likely(ra != 31))
2670
                gen_helper_load_fpcr(cpu_fir[ra]);
2671
            break;
2672
        case 0x02A:
2673
            /* FCMOVEQ */
2674
            gen_fcmov(TCG_COND_EQ, ra, rb, rc);
2675
            break;
2676
        case 0x02B:
2677
            /* FCMOVNE */
2678
            gen_fcmov(TCG_COND_NE, ra, rb, rc);
2679
            break;
2680
        case 0x02C:
2681
            /* FCMOVLT */
2682
            gen_fcmov(TCG_COND_LT, ra, rb, rc);
2683
            break;
2684
        case 0x02D:
2685
            /* FCMOVGE */
2686
            gen_fcmov(TCG_COND_GE, ra, rb, rc);
2687
            break;
2688
        case 0x02E:
2689
            /* FCMOVLE */
2690
            gen_fcmov(TCG_COND_LE, ra, rb, rc);
2691
            break;
2692
        case 0x02F:
2693
            /* FCMOVGT */
2694
            gen_fcmov(TCG_COND_GT, ra, rb, rc);
2695
            break;
2696
        case 0x030:
2697
            /* CVTQL */
2698
            gen_fcvtql(rb, rc);
2699
            break;
2700
        case 0x130:
2701
            /* CVTQL/V */
2702
        case 0x530:
2703
            /* CVTQL/SV */
2704
            /* ??? I'm pretty sure there's nothing that /sv needs to do that
2705
               /v doesn't do.  The only thing I can think is that /sv is a
2706
               valid instruction merely for completeness in the ISA.  */
2707
            gen_fcvtql_v(ctx, rb, rc);
2708
            break;
2709
        default:
2710
            goto invalid_opc;
2711
        }
2712
        break;
2713
    case 0x18:
2714
        switch ((uint16_t)disp16) {
2715
        case 0x0000:
2716
            /* TRAPB */
2717
            /* No-op.  */
2718
            break;
2719
        case 0x0400:
2720
            /* EXCB */
2721
            /* No-op.  */
2722
            break;
2723
        case 0x4000:
2724
            /* MB */
2725
            /* No-op */
2726
            break;
2727
        case 0x4400:
2728
            /* WMB */
2729
            /* No-op */
2730
            break;
2731
        case 0x8000:
2732
            /* FETCH */
2733
            /* No-op */
2734
            break;
2735
        case 0xA000:
2736
            /* FETCH_M */
2737
            /* No-op */
2738
            break;
2739
        case 0xC000:
2740
            /* RPCC */
2741
            if (ra != 31) {
2742
                if (use_icount) {
2743
                    gen_io_start();
2744
                    gen_helper_load_pcc(cpu_ir[ra]);
2745
                    gen_io_end();
2746
                    ret = EXIT_PC_STALE;
2747
                } else {
2748
                    gen_helper_load_pcc(cpu_ir[ra]);
2749
                }
2750
            }
2751
            break;
2752
        case 0xE000:
2753
            /* RC */
2754
            gen_rx(ra, 0);
2755
            break;
2756
        case 0xE800:
2757
            /* ECB */
2758
            break;
2759
        case 0xF000:
2760
            /* RS */
2761
            gen_rx(ra, 1);
2762
            break;
2763
        case 0xF800:
2764
            /* WH64 */
2765
            /* No-op */
2766
            break;
2767
        default:
2768
            goto invalid_opc;
2769
        }
2770
        break;
2771
    case 0x19:
2772
        /* HW_MFPR (PALcode) */
2773
#ifndef CONFIG_USER_ONLY
2774
        if (ctx->tb->flags & TB_FLAGS_PAL_MODE) {
2775
            gen_mfpr(ra, insn & 0xffff);
2776
            break;
2777
        }
2778
#endif
2779
        goto invalid_opc;
2780
    case 0x1A:
2781
        /* JMP, JSR, RET, JSR_COROUTINE.  These only differ by the branch
2782
           prediction stack action, which of course we don't implement.  */
2783
        if (rb != 31) {
2784
            tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
2785
        } else {
2786
            tcg_gen_movi_i64(cpu_pc, 0);
2787
        }
2788
        if (ra != 31) {
2789
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2790
        }
2791
        ret = EXIT_PC_UPDATED;
2792
        break;
2793
    case 0x1B:
2794
        /* HW_LD (PALcode) */
2795
#ifndef CONFIG_USER_ONLY
2796
        if (ctx->tb->flags & TB_FLAGS_PAL_MODE) {
2797
            TCGv addr;
2798

    
2799
            if (ra == 31) {
2800
                break;
2801
            }
2802

    
2803
            addr = tcg_temp_new();
2804
            if (rb != 31)
2805
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2806
            else
2807
                tcg_gen_movi_i64(addr, disp12);
2808
            switch ((insn >> 12) & 0xF) {
2809
            case 0x0:
2810
                /* Longword physical access (hw_ldl/p) */
2811
                gen_helper_ldl_phys(cpu_ir[ra], addr);
2812
                break;
2813
            case 0x1:
2814
                /* Quadword physical access (hw_ldq/p) */
2815
                gen_helper_ldq_phys(cpu_ir[ra], addr);
2816
                break;
2817
            case 0x2:
2818
                /* Longword physical access with lock (hw_ldl_l/p) */
2819
                gen_helper_ldl_l_phys(cpu_ir[ra], addr);
2820
                break;
2821
            case 0x3:
2822
                /* Quadword physical access with lock (hw_ldq_l/p) */
2823
                gen_helper_ldq_l_phys(cpu_ir[ra], addr);
2824
                break;
2825
            case 0x4:
2826
                /* Longword virtual PTE fetch (hw_ldl/v) */
2827
                goto invalid_opc;
2828
            case 0x5:
2829
                /* Quadword virtual PTE fetch (hw_ldq/v) */
2830
                goto invalid_opc;
2831
                break;
2832
            case 0x6:
2833
                /* Incpu_ir[ra]id */
2834
                goto invalid_opc;
2835
            case 0x7:
2836
                /* Incpu_ir[ra]id */
2837
                goto invalid_opc;
2838
            case 0x8:
2839
                /* Longword virtual access (hw_ldl) */
2840
                goto invalid_opc;
2841
            case 0x9:
2842
                /* Quadword virtual access (hw_ldq) */
2843
                goto invalid_opc;
2844
            case 0xA:
2845
                /* Longword virtual access with protection check (hw_ldl/w) */
2846
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, MMU_KERNEL_IDX);
2847
                break;
2848
            case 0xB:
2849
                /* Quadword virtual access with protection check (hw_ldq/w) */
2850
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, MMU_KERNEL_IDX);
2851
                break;
2852
            case 0xC:
2853
                /* Longword virtual access with alt access mode (hw_ldl/a)*/
2854
                goto invalid_opc;
2855
            case 0xD:
2856
                /* Quadword virtual access with alt access mode (hw_ldq/a) */
2857
                goto invalid_opc;
2858
            case 0xE:
2859
                /* Longword virtual access with alternate access mode and
2860
                   protection checks (hw_ldl/wa) */
2861
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, MMU_USER_IDX);
2862
                break;
2863
            case 0xF:
2864
                /* Quadword virtual access with alternate access mode and
2865
                   protection checks (hw_ldq/wa) */
2866
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, MMU_USER_IDX);
2867
                break;
2868
            }
2869
            tcg_temp_free(addr);
2870
            break;
2871
        }
2872
#endif
2873
        goto invalid_opc;
2874
    case 0x1C:
2875
        switch (fn7) {
2876
        case 0x00:
2877
            /* SEXTB */
2878
            if ((ctx->tb->flags & TB_FLAGS_AMASK_BWX) == 0) {
2879
                goto invalid_opc;
2880
            }
2881
            if (likely(rc != 31)) {
2882
                if (islit)
2883
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
2884
                else
2885
                    tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
2886
            }
2887
            break;
2888
        case 0x01:
2889
            /* SEXTW */
2890
            if (ctx->tb->flags & TB_FLAGS_AMASK_BWX) {
2891
                if (likely(rc != 31)) {
2892
                    if (islit) {
2893
                        tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
2894
                    } else {
2895
                        tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
2896
                    }
2897
                }
2898
                break;
2899
            }
2900
            goto invalid_opc;
2901
        case 0x30:
2902
            /* CTPOP */
2903
            if (ctx->tb->flags & TB_FLAGS_AMASK_CIX) {
2904
                if (likely(rc != 31)) {
2905
                    if (islit) {
2906
                        tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
2907
                    } else {
2908
                        gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
2909
                    }
2910
                }
2911
                break;
2912
            }
2913
            goto invalid_opc;
2914
        case 0x31:
2915
            /* PERR */
2916
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
2917
                gen_perr(ra, rb, rc, islit, lit);
2918
                break;
2919
            }
2920
            goto invalid_opc;
2921
        case 0x32:
2922
            /* CTLZ */
2923
            if (ctx->tb->flags & TB_FLAGS_AMASK_CIX) {
2924
                if (likely(rc != 31)) {
2925
                    if (islit) {
2926
                        tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
2927
                    } else {
2928
                        gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
2929
                    }
2930
                }
2931
                break;
2932
            }
2933
            goto invalid_opc;
2934
        case 0x33:
2935
            /* CTTZ */
2936
            if (ctx->tb->flags & TB_FLAGS_AMASK_CIX) {
2937
                if (likely(rc != 31)) {
2938
                    if (islit) {
2939
                        tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
2940
                    } else {
2941
                        gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
2942
                    }
2943
                }
2944
                break;
2945
            }
2946
            goto invalid_opc;
2947
        case 0x34:
2948
            /* UNPKBW */
2949
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
2950
                if (real_islit || ra != 31) {
2951
                    goto invalid_opc;
2952
                }
2953
                gen_unpkbw(rb, rc);
2954
                break;
2955
            }
2956
            goto invalid_opc;
2957
        case 0x35:
2958
            /* UNPKBL */
2959
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
2960
                if (real_islit || ra != 31) {
2961
                    goto invalid_opc;
2962
                }
2963
                gen_unpkbl(rb, rc);
2964
                break;
2965
            }
2966
            goto invalid_opc;
2967
        case 0x36:
2968
            /* PKWB */
2969
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
2970
                if (real_islit || ra != 31) {
2971
                    goto invalid_opc;
2972
                }
2973
                gen_pkwb(rb, rc);
2974
                break;
2975
            }
2976
            goto invalid_opc;
2977
        case 0x37:
2978
            /* PKLB */
2979
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
2980
                if (real_islit || ra != 31) {
2981
                    goto invalid_opc;
2982
                }
2983
                gen_pklb(rb, rc);
2984
                break;
2985
            }
2986
            goto invalid_opc;
2987
        case 0x38:
2988
            /* MINSB8 */
2989
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
2990
                gen_minsb8(ra, rb, rc, islit, lit);
2991
                break;
2992
            }
2993
            goto invalid_opc;
2994
        case 0x39:
2995
            /* MINSW4 */
2996
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
2997
                gen_minsw4(ra, rb, rc, islit, lit);
2998
                break;
2999
            }
3000
            goto invalid_opc;
3001
        case 0x3A:
3002
            /* MINUB8 */
3003
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
3004
                gen_minub8(ra, rb, rc, islit, lit);
3005
                break;
3006
            }
3007
            goto invalid_opc;
3008
        case 0x3B:
3009
            /* MINUW4 */
3010
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
3011
                gen_minuw4(ra, rb, rc, islit, lit);
3012
                break;
3013
            }
3014
            goto invalid_opc;
3015
        case 0x3C:
3016
            /* MAXUB8 */
3017
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
3018
                gen_maxub8(ra, rb, rc, islit, lit);
3019
                break;
3020
            }
3021
            goto invalid_opc;
3022
        case 0x3D:
3023
            /* MAXUW4 */
3024
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
3025
                gen_maxuw4(ra, rb, rc, islit, lit);
3026
                break;
3027
            }
3028
            goto invalid_opc;
3029
        case 0x3E:
3030
            /* MAXSB8 */
3031
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
3032
                gen_maxsb8(ra, rb, rc, islit, lit);
3033
                break;
3034
            }
3035
            goto invalid_opc;
3036
        case 0x3F:
3037
            /* MAXSW4 */
3038
            if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) {
3039
                gen_maxsw4(ra, rb, rc, islit, lit);
3040
                break;
3041
            }
3042
            goto invalid_opc;
3043
        case 0x70:
3044
            /* FTOIT */
3045
            if ((ctx->tb->flags & TB_FLAGS_AMASK_FIX) == 0) {
3046
                goto invalid_opc;
3047
            }
3048
            if (likely(rc != 31)) {
3049
                if (ra != 31)
3050
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
3051
                else
3052
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
3053
            }
3054
            break;
3055
        case 0x78:
3056
            /* FTOIS */
3057
            if ((ctx->tb->flags & TB_FLAGS_AMASK_FIX) == 0) {
3058
                goto invalid_opc;
3059
            }
3060
            if (rc != 31) {
3061
                TCGv_i32 tmp1 = tcg_temp_new_i32();
3062
                if (ra != 31)
3063
                    gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
3064
                else {
3065
                    TCGv tmp2 = tcg_const_i64(0);
3066
                    gen_helper_s_to_memory(tmp1, tmp2);
3067
                    tcg_temp_free(tmp2);
3068
                }
3069
                tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
3070
                tcg_temp_free_i32(tmp1);
3071
            }
3072
            break;
3073
        default:
3074
            goto invalid_opc;
3075
        }
3076
        break;
3077
    case 0x1D:
3078
        /* HW_MTPR (PALcode) */
3079
#ifndef CONFIG_USER_ONLY
3080
        if (ctx->tb->flags & TB_FLAGS_PAL_MODE) {
3081
            return gen_mtpr(ctx, rb, insn & 0xffff);
3082
        }
3083
#endif
3084
        goto invalid_opc;
3085
    case 0x1E:
3086
        /* HW_RET (PALcode) */
3087
#ifndef CONFIG_USER_ONLY
3088
        if (ctx->tb->flags & TB_FLAGS_PAL_MODE) {
3089
            if (rb == 31) {
3090
                /* Pre-EV6 CPUs interpreted this as HW_REI, loading the return
3091
                   address from EXC_ADDR.  This turns out to be useful for our
3092
                   emulation PALcode, so continue to accept it.  */
3093
                TCGv tmp = tcg_temp_new();
3094
                tcg_gen_ld_i64(tmp, cpu_env, offsetof(CPUState, exc_addr));
3095
                gen_helper_hw_ret(tmp);
3096
                tcg_temp_free(tmp);
3097
            } else {
3098
                gen_helper_hw_ret(cpu_ir[rb]);
3099
            }
3100
            ret = EXIT_PC_UPDATED;
3101
            break;
3102
        }
3103
#endif
3104
        goto invalid_opc;
3105
    case 0x1F:
3106
        /* HW_ST (PALcode) */
3107
#ifndef CONFIG_USER_ONLY
3108
        if (ctx->tb->flags & TB_FLAGS_PAL_MODE) {
3109
            TCGv addr, val;
3110
            addr = tcg_temp_new();
3111
            if (rb != 31)
3112
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
3113
            else
3114
                tcg_gen_movi_i64(addr, disp12);
3115
            if (ra != 31)
3116
                val = cpu_ir[ra];
3117
            else {
3118
                val = tcg_temp_new();
3119
                tcg_gen_movi_i64(val, 0);
3120
            }
3121
            switch ((insn >> 12) & 0xF) {
3122
            case 0x0:
3123
                /* Longword physical access */
3124
                gen_helper_stl_phys(addr, val);
3125
                break;
3126
            case 0x1:
3127
                /* Quadword physical access */
3128
                gen_helper_stq_phys(addr, val);
3129
                break;
3130
            case 0x2:
3131
                /* Longword physical access with lock */
3132
                gen_helper_stl_c_phys(val, addr, val);
3133
                break;
3134
            case 0x3:
3135
                /* Quadword physical access with lock */
3136
                gen_helper_stq_c_phys(val, addr, val);
3137
                break;
3138
            case 0x4:
3139
                /* Longword virtual access */
3140
                goto invalid_opc;
3141
            case 0x5:
3142
                /* Quadword virtual access */
3143
                goto invalid_opc;
3144
            case 0x6:
3145
                /* Invalid */
3146
                goto invalid_opc;
3147
            case 0x7:
3148
                /* Invalid */
3149
                goto invalid_opc;
3150
            case 0x8:
3151
                /* Invalid */
3152
                goto invalid_opc;
3153
            case 0x9:
3154
                /* Invalid */
3155
                goto invalid_opc;
3156
            case 0xA:
3157
                /* Invalid */
3158
                goto invalid_opc;
3159
            case 0xB:
3160
                /* Invalid */
3161
                goto invalid_opc;
3162
            case 0xC:
3163
                /* Longword virtual access with alternate access mode */
3164
                goto invalid_opc;
3165
            case 0xD:
3166
                /* Quadword virtual access with alternate access mode */
3167
                goto invalid_opc;
3168
            case 0xE:
3169
                /* Invalid */
3170
                goto invalid_opc;
3171
            case 0xF:
3172
                /* Invalid */
3173
                goto invalid_opc;
3174
            }
3175
            if (ra == 31)
3176
                tcg_temp_free(val);
3177
            tcg_temp_free(addr);
3178
            break;
3179
        }
3180
#endif
3181
        goto invalid_opc;
3182
    case 0x20:
3183
        /* LDF */
3184
        gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
3185
        break;
3186
    case 0x21:
3187
        /* LDG */
3188
        gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
3189
        break;
3190
    case 0x22:
3191
        /* LDS */
3192
        gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
3193
        break;
3194
    case 0x23:
3195
        /* LDT */
3196
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
3197
        break;
3198
    case 0x24:
3199
        /* STF */
3200
        gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
3201
        break;
3202
    case 0x25:
3203
        /* STG */
3204
        gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
3205
        break;
3206
    case 0x26:
3207
        /* STS */
3208
        gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
3209
        break;
3210
    case 0x27:
3211
        /* STT */
3212
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
3213
        break;
3214
    case 0x28:
3215
        /* LDL */
3216
        gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
3217
        break;
3218
    case 0x29:
3219
        /* LDQ */
3220
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
3221
        break;
3222
    case 0x2A:
3223
        /* LDL_L */
3224
        gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
3225
        break;
3226
    case 0x2B:
3227
        /* LDQ_L */
3228
        gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
3229
        break;
3230
    case 0x2C:
3231
        /* STL */
3232
        gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
3233
        break;
3234
    case 0x2D:
3235
        /* STQ */
3236
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
3237
        break;
3238
    case 0x2E:
3239
        /* STL_C */
3240
        ret = gen_store_conditional(ctx, ra, rb, disp16, 0);
3241
        break;
3242
    case 0x2F:
3243
        /* STQ_C */
3244
        ret = gen_store_conditional(ctx, ra, rb, disp16, 1);
3245
        break;
3246
    case 0x30:
3247
        /* BR */
3248
        ret = gen_bdirect(ctx, ra, disp21);
3249
        break;
3250
    case 0x31: /* FBEQ */
3251
        ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
3252
        break;
3253
    case 0x32: /* FBLT */
3254
        ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
3255
        break;
3256
    case 0x33: /* FBLE */
3257
        ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
3258
        break;
3259
    case 0x34:
3260
        /* BSR */
3261
        ret = gen_bdirect(ctx, ra, disp21);
3262
        break;
3263
    case 0x35: /* FBNE */
3264
        ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
3265
        break;
3266
    case 0x36: /* FBGE */
3267
        ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
3268
        break;
3269
    case 0x37: /* FBGT */
3270
        ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
3271
        break;
3272
    case 0x38:
3273
        /* BLBC */
3274
        ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
3275
        break;
3276
    case 0x39:
3277
        /* BEQ */
3278
        ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
3279
        break;
3280
    case 0x3A:
3281
        /* BLT */
3282
        ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
3283
        break;
3284
    case 0x3B:
3285
        /* BLE */
3286
        ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
3287
        break;
3288
    case 0x3C:
3289
        /* BLBS */
3290
        ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
3291
        break;
3292
    case 0x3D:
3293
        /* BNE */
3294
        ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
3295
        break;
3296
    case 0x3E:
3297
        /* BGE */
3298
        ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
3299
        break;
3300
    case 0x3F:
3301
        /* BGT */
3302
        ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
3303
        break;
3304
    invalid_opc:
3305
        ret = gen_invalid(ctx);
3306
        break;
3307
    }
3308

    
3309
    return ret;
3310
}
3311

    
3312
static inline void gen_intermediate_code_internal(CPUState *env,
3313
                                                  TranslationBlock *tb,
3314
                                                  int search_pc)
3315
{
3316
    DisasContext ctx, *ctxp = &ctx;
3317
    target_ulong pc_start;
3318
    uint32_t insn;
3319
    uint16_t *gen_opc_end;
3320
    CPUBreakpoint *bp;
3321
    int j, lj = -1;
3322
    ExitStatus ret;
3323
    int num_insns;
3324
    int max_insns;
3325

    
3326
    pc_start = tb->pc;
3327
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
3328

    
3329
    ctx.tb = tb;
3330
    ctx.env = env;
3331
    ctx.pc = pc_start;
3332
    ctx.mem_idx = cpu_mmu_index(env);
3333

    
3334
    /* ??? Every TB begins with unset rounding mode, to be initialized on
3335
       the first fp insn of the TB.  Alternately we could define a proper
3336
       default for every TB (e.g. QUAL_RM_N or QUAL_RM_D) and make sure
3337
       to reset the FP_STATUS to that default at the end of any TB that
3338
       changes the default.  We could even (gasp) dynamiclly figure out
3339
       what default would be most efficient given the running program.  */
3340
    ctx.tb_rm = -1;
3341
    /* Similarly for flush-to-zero.  */
3342
    ctx.tb_ftz = -1;
3343

    
3344
    num_insns = 0;
3345
    max_insns = tb->cflags & CF_COUNT_MASK;
3346
    if (max_insns == 0)
3347
        max_insns = CF_COUNT_MASK;
3348

    
3349
    gen_icount_start();
3350
    do {
3351
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
3352
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
3353
                if (bp->pc == ctx.pc) {
3354
                    gen_excp(&ctx, EXCP_DEBUG, 0);
3355
                    break;
3356
                }
3357
            }
3358
        }
3359
        if (search_pc) {
3360
            j = gen_opc_ptr - gen_opc_buf;
3361
            if (lj < j) {
3362
                lj++;
3363
                while (lj < j)
3364
                    gen_opc_instr_start[lj++] = 0;
3365
            }
3366
            gen_opc_pc[lj] = ctx.pc;
3367
            gen_opc_instr_start[lj] = 1;
3368
            gen_opc_icount[lj] = num_insns;
3369
        }
3370
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
3371
            gen_io_start();
3372
        insn = ldl_code(ctx.pc);
3373
        num_insns++;
3374

    
3375
        if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP))) {
3376
            tcg_gen_debug_insn_start(ctx.pc);
3377
        }
3378

    
3379
        ctx.pc += 4;
3380
        ret = translate_one(ctxp, insn);
3381

    
3382
        /* If we reach a page boundary, are single stepping,
3383
           or exhaust instruction count, stop generation.  */
3384
        if (ret == NO_EXIT
3385
            && ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0
3386
                || gen_opc_ptr >= gen_opc_end
3387
                || num_insns >= max_insns
3388
                || singlestep
3389
                || env->singlestep_enabled)) {
3390
            ret = EXIT_PC_STALE;
3391
        }
3392
    } while (ret == NO_EXIT);
3393

    
3394
    if (tb->cflags & CF_LAST_IO) {
3395
        gen_io_end();
3396
    }
3397

    
3398
    switch (ret) {
3399
    case EXIT_GOTO_TB:
3400
    case EXIT_NORETURN:
3401
        break;
3402
    case EXIT_PC_STALE:
3403
        tcg_gen_movi_i64(cpu_pc, ctx.pc);
3404
        /* FALLTHRU */
3405
    case EXIT_PC_UPDATED:
3406
        if (env->singlestep_enabled) {
3407
            gen_excp_1(EXCP_DEBUG, 0);
3408
        } else {
3409
            tcg_gen_exit_tb(0);
3410
        }
3411
        break;
3412
    default:
3413
        abort();
3414
    }
3415

    
3416
    gen_icount_end(tb, num_insns);
3417
    *gen_opc_ptr = INDEX_op_end;
3418
    if (search_pc) {
3419
        j = gen_opc_ptr - gen_opc_buf;
3420
        lj++;
3421
        while (lj <= j)
3422
            gen_opc_instr_start[lj++] = 0;
3423
    } else {
3424
        tb->size = ctx.pc - pc_start;
3425
        tb->icount = num_insns;
3426
    }
3427

    
3428
#ifdef DEBUG_DISAS
3429
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
3430
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
3431
        log_target_disas(pc_start, ctx.pc - pc_start, 1);
3432
        qemu_log("\n");
3433
    }
3434
#endif
3435
}
3436

    
3437
void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
3438
{
3439
    gen_intermediate_code_internal(env, tb, 0);
3440
}
3441

    
3442
void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
3443
{
3444
    gen_intermediate_code_internal(env, tb, 1);
3445
}
3446

    
3447
struct cpu_def_t {
3448
    const char *name;
3449
    int implver, amask;
3450
};
3451

    
3452
static const struct cpu_def_t cpu_defs[] = {
3453
    { "ev4",   IMPLVER_2106x, 0 },
3454
    { "ev5",   IMPLVER_21164, 0 },
3455
    { "ev56",  IMPLVER_21164, AMASK_BWX },
3456
    { "pca56", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
3457
    { "ev6",   IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
3458
    { "ev67",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
3459
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
3460
    { "ev68",  IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
3461
                               | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), },
3462
    { "21064", IMPLVER_2106x, 0 },
3463
    { "21164", IMPLVER_21164, 0 },
3464
    { "21164a", IMPLVER_21164, AMASK_BWX },
3465
    { "21164pc", IMPLVER_21164, AMASK_BWX | AMASK_MVI },
3466
    { "21264", IMPLVER_21264, AMASK_BWX | AMASK_FIX | AMASK_MVI | AMASK_TRAP },
3467
    { "21264a", IMPLVER_21264, (AMASK_BWX | AMASK_FIX | AMASK_CIX
3468
                                | AMASK_MVI | AMASK_TRAP | AMASK_PREFETCH), }
3469
};
3470

    
3471
CPUAlphaState * cpu_alpha_init (const char *cpu_model)
3472
{
3473
    CPUAlphaState *env;
3474
    int implver, amask, i, max;
3475

    
3476
    env = g_malloc0(sizeof(CPUAlphaState));
3477
    cpu_exec_init(env);
3478
    alpha_translate_init();
3479
    tlb_flush(env, 1);
3480

    
3481
    /* Default to ev67; no reason not to emulate insns by default.  */
3482
    implver = IMPLVER_21264;
3483
    amask = (AMASK_BWX | AMASK_FIX | AMASK_CIX | AMASK_MVI
3484
             | AMASK_TRAP | AMASK_PREFETCH);
3485

    
3486
    max = ARRAY_SIZE(cpu_defs);
3487
    for (i = 0; i < max; i++) {
3488
        if (strcmp (cpu_model, cpu_defs[i].name) == 0) {
3489
            implver = cpu_defs[i].implver;
3490
            amask = cpu_defs[i].amask;
3491
            break;
3492
        }
3493
    }
3494
    env->implver = implver;
3495
    env->amask = amask;
3496

    
3497
#if defined (CONFIG_USER_ONLY)
3498
    env->ps = PS_USER_MODE;
3499
    cpu_alpha_store_fpcr(env, (FPCR_INVD | FPCR_DZED | FPCR_OVFD
3500
                               | FPCR_UNFD | FPCR_INED | FPCR_DNOD));
3501
#endif
3502
    env->lock_addr = -1;
3503
    env->fen = 1;
3504

    
3505
    qemu_init_vcpu(env);
3506
    return env;
3507
}
3508

    
3509
void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
3510
{
3511
    env->pc = gen_opc_pc[pc_pos];
3512
}