Statistics
| Branch: | Revision:

root / target-alpha / translate.c @ d12d51d5

History | View | Annotate | Download (74.9 kB)

1
/*
2
 *  Alpha emulation cpu translation for qemu.
3
 *
4
 *  Copyright (c) 2007 Jocelyn Mayer
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA  02110-1301 USA
19
 */
20

    
21
#include <stdint.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24

    
25
#include "cpu.h"
26
#include "exec-all.h"
27
#include "disas.h"
28
#include "host-utils.h"
29
#include "tcg-op.h"
30
#include "qemu-common.h"
31

    
32
#include "helper.h"
33
#define GEN_HELPER 1
34
#include "helper.h"
35

    
36
/* #define DO_SINGLE_STEP */
37
#define ALPHA_DEBUG_DISAS
38
/* #define DO_TB_FLUSH */
39

    
40

    
41
#ifdef ALPHA_DEBUG_DISAS
42
#  define LOG_DISAS(...) do {            \
43
     if (logfile)                        \
44
       fprintf(logfile, ## __VA_ARGS__); \
45
   } while (0)
46
#else
47
#  define LOG_DISAS(...) do { } while (0)
48
#endif
49

    
50
typedef struct DisasContext DisasContext;
51
struct DisasContext {
52
    uint64_t pc;
53
    int mem_idx;
54
#if !defined (CONFIG_USER_ONLY)
55
    int pal_mode;
56
#endif
57
    uint32_t amask;
58
};
59

    
60
/* global register indexes */
61
static TCGv_ptr cpu_env;
62
static TCGv cpu_ir[31];
63
static TCGv cpu_fir[31];
64
static TCGv cpu_pc;
65
static TCGv cpu_lock;
66

    
67
/* register names */
68
static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
69

    
70
#include "gen-icount.h"
71

    
72
static void alpha_translate_init(void)
73
{
74
    int i;
75
    char *p;
76
    static int done_init = 0;
77

    
78
    if (done_init)
79
        return;
80

    
81
    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
82

    
83
    p = cpu_reg_names;
84
    for (i = 0; i < 31; i++) {
85
        sprintf(p, "ir%d", i);
86
        cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
87
                                           offsetof(CPUState, ir[i]), p);
88
        p += (i < 10) ? 4 : 5;
89

    
90
        sprintf(p, "fir%d", i);
91
        cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
92
                                            offsetof(CPUState, fir[i]), p);
93
        p += (i < 10) ? 5 : 6;
94
    }
95

    
96
    cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
97
                                    offsetof(CPUState, pc), "pc");
98

    
99
    cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
100
                                      offsetof(CPUState, lock), "lock");
101

    
102
    /* register helpers */
103
#define GEN_HELPER 2
104
#include "helper.h"
105

    
106
    done_init = 1;
107
}
108

    
109
static always_inline void gen_excp (DisasContext *ctx,
110
                                    int exception, int error_code)
111
{
112
    TCGv_i32 tmp1, tmp2;
113

    
114
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
115
    tmp1 = tcg_const_i32(exception);
116
    tmp2 = tcg_const_i32(error_code);
117
    gen_helper_excp(tmp1, tmp2);
118
    tcg_temp_free_i32(tmp2);
119
    tcg_temp_free_i32(tmp1);
120
}
121

    
122
static always_inline void gen_invalid (DisasContext *ctx)
123
{
124
    gen_excp(ctx, EXCP_OPCDEC, 0);
125
}
126

    
127
static always_inline void gen_qemu_ldf (TCGv t0, TCGv t1, int flags)
128
{
129
    TCGv tmp = tcg_temp_new();
130
    TCGv_i32 tmp32 = tcg_temp_new_i32();
131
    tcg_gen_qemu_ld32u(tmp, t1, flags);
132
    tcg_gen_trunc_i64_i32(tmp32, tmp);
133
    gen_helper_memory_to_f(t0, tmp32);
134
    tcg_temp_free_i32(tmp32);
135
    tcg_temp_free(tmp);
136
}
137

    
138
static always_inline void gen_qemu_ldg (TCGv t0, TCGv t1, int flags)
139
{
140
    TCGv tmp = tcg_temp_new();
141
    tcg_gen_qemu_ld64(tmp, t1, flags);
142
    gen_helper_memory_to_g(t0, tmp);
143
    tcg_temp_free(tmp);
144
}
145

    
146
static always_inline void gen_qemu_lds (TCGv t0, TCGv t1, int flags)
147
{
148
    TCGv tmp = tcg_temp_new();
149
    TCGv_i32 tmp32 = tcg_temp_new_i32();
150
    tcg_gen_qemu_ld32u(tmp, t1, flags);
151
    tcg_gen_trunc_i64_i32(tmp32, tmp);
152
    gen_helper_memory_to_s(t0, tmp32);
153
    tcg_temp_free_i32(tmp32);
154
    tcg_temp_free(tmp);
155
}
156

    
157
static always_inline void gen_qemu_ldl_l (TCGv t0, TCGv t1, int flags)
158
{
159
    tcg_gen_mov_i64(cpu_lock, t1);
160
    tcg_gen_qemu_ld32s(t0, t1, flags);
161
}
162

    
163
static always_inline void gen_qemu_ldq_l (TCGv t0, TCGv t1, int flags)
164
{
165
    tcg_gen_mov_i64(cpu_lock, t1);
166
    tcg_gen_qemu_ld64(t0, t1, flags);
167
}
168

    
169
static always_inline void gen_load_mem (DisasContext *ctx,
170
                                        void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1, int flags),
171
                                        int ra, int rb, int32_t disp16,
172
                                        int fp, int clear)
173
{
174
    TCGv addr;
175

    
176
    if (unlikely(ra == 31))
177
        return;
178

    
179
    addr = tcg_temp_new();
180
    if (rb != 31) {
181
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
182
        if (clear)
183
            tcg_gen_andi_i64(addr, addr, ~0x7);
184
    } else {
185
        if (clear)
186
            disp16 &= ~0x7;
187
        tcg_gen_movi_i64(addr, disp16);
188
    }
189
    if (fp)
190
        tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
191
    else
192
        tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
193
    tcg_temp_free(addr);
194
}
195

    
196
static always_inline void gen_qemu_stf (TCGv t0, TCGv t1, int flags)
197
{
198
    TCGv_i32 tmp32 = tcg_temp_new_i32();
199
    TCGv tmp = tcg_temp_new();
200
    gen_helper_f_to_memory(tmp32, t0);
201
    tcg_gen_extu_i32_i64(tmp, tmp32);
202
    tcg_gen_qemu_st32(tmp, t1, flags);
203
    tcg_temp_free(tmp);
204
    tcg_temp_free_i32(tmp32);
205
}
206

    
207
static always_inline void gen_qemu_stg (TCGv t0, TCGv t1, int flags)
208
{
209
    TCGv tmp = tcg_temp_new();
210
    gen_helper_g_to_memory(tmp, t0);
211
    tcg_gen_qemu_st64(tmp, t1, flags);
212
    tcg_temp_free(tmp);
213
}
214

    
215
static always_inline void gen_qemu_sts (TCGv t0, TCGv t1, int flags)
216
{
217
    TCGv_i32 tmp32 = tcg_temp_new_i32();
218
    TCGv tmp = tcg_temp_new();
219
    gen_helper_s_to_memory(tmp32, t0);
220
    tcg_gen_extu_i32_i64(tmp, tmp32);
221
    tcg_gen_qemu_st32(tmp, t1, flags);
222
    tcg_temp_free(tmp);
223
    tcg_temp_free_i32(tmp32);
224
}
225

    
226
static always_inline void gen_qemu_stl_c (TCGv t0, TCGv t1, int flags)
227
{
228
    int l1, l2;
229

    
230
    l1 = gen_new_label();
231
    l2 = gen_new_label();
232
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
233
    tcg_gen_qemu_st32(t0, t1, flags);
234
    tcg_gen_movi_i64(t0, 1);
235
    tcg_gen_br(l2);
236
    gen_set_label(l1);
237
    tcg_gen_movi_i64(t0, 0);
238
    gen_set_label(l2);
239
    tcg_gen_movi_i64(cpu_lock, -1);
240
}
241

    
242
static always_inline void gen_qemu_stq_c (TCGv t0, TCGv t1, int flags)
243
{
244
    int l1, l2;
245

    
246
    l1 = gen_new_label();
247
    l2 = gen_new_label();
248
    tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
249
    tcg_gen_qemu_st64(t0, t1, flags);
250
    tcg_gen_movi_i64(t0, 1);
251
    tcg_gen_br(l2);
252
    gen_set_label(l1);
253
    tcg_gen_movi_i64(t0, 0);
254
    gen_set_label(l2);
255
    tcg_gen_movi_i64(cpu_lock, -1);
256
}
257

    
258
static always_inline void gen_store_mem (DisasContext *ctx,
259
                                         void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1, int flags),
260
                                         int ra, int rb, int32_t disp16,
261
                                         int fp, int clear, int local)
262
{
263
    TCGv addr;
264
    if (local)
265
        addr = tcg_temp_local_new();
266
    else
267
        addr = tcg_temp_new();
268
    if (rb != 31) {
269
        tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
270
        if (clear)
271
            tcg_gen_andi_i64(addr, addr, ~0x7);
272
    } else {
273
        if (clear)
274
            disp16 &= ~0x7;
275
        tcg_gen_movi_i64(addr, disp16);
276
    }
277
    if (ra != 31) {
278
        if (fp)
279
            tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
280
        else
281
            tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
282
    } else {
283
        TCGv zero;
284
        if (local)
285
            zero = tcg_const_local_i64(0);
286
        else
287
            zero = tcg_const_i64(0);
288
        tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
289
        tcg_temp_free(zero);
290
    }
291
    tcg_temp_free(addr);
292
}
293

    
294
static always_inline void gen_bcond (DisasContext *ctx,
295
                                     TCGCond cond,
296
                                     int ra, int32_t disp16, int mask)
297
{
298
    int l1, l2;
299

    
300
    l1 = gen_new_label();
301
    l2 = gen_new_label();
302
    if (likely(ra != 31)) {
303
        if (mask) {
304
            TCGv tmp = tcg_temp_new();
305
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
306
            tcg_gen_brcondi_i64(cond, tmp, 0, l1);
307
            tcg_temp_free(tmp);
308
        } else
309
            tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
310
    } else {
311
        /* Very uncommon case - Do not bother to optimize.  */
312
        TCGv tmp = tcg_const_i64(0);
313
        tcg_gen_brcondi_i64(cond, tmp, 0, l1);
314
        tcg_temp_free(tmp);
315
    }
316
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
317
    tcg_gen_br(l2);
318
    gen_set_label(l1);
319
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
320
    gen_set_label(l2);
321
}
322

    
323
static always_inline void gen_fbcond (DisasContext *ctx, int opc,
324
                                      int ra, int32_t disp16)
325
{
326
    int l1, l2;
327
    TCGv tmp;
328
    TCGv src;
329

    
330
    l1 = gen_new_label();
331
    l2 = gen_new_label();
332
    if (ra != 31) {
333
        tmp = tcg_temp_new();
334
        src = cpu_fir[ra];
335
    } else  {
336
        tmp = tcg_const_i64(0);
337
        src = tmp;
338
    }
339
    switch (opc) {
340
    case 0x31: /* FBEQ */
341
        gen_helper_cmpfeq(tmp, src);
342
        break;
343
    case 0x32: /* FBLT */
344
        gen_helper_cmpflt(tmp, src);
345
        break;
346
    case 0x33: /* FBLE */
347
        gen_helper_cmpfle(tmp, src);
348
        break;
349
    case 0x35: /* FBNE */
350
        gen_helper_cmpfne(tmp, src);
351
        break;
352
    case 0x36: /* FBGE */
353
        gen_helper_cmpfge(tmp, src);
354
        break;
355
    case 0x37: /* FBGT */
356
        gen_helper_cmpfgt(tmp, src);
357
        break;
358
    default:
359
        abort();
360
    }
361
    tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
362
    tcg_gen_movi_i64(cpu_pc, ctx->pc);
363
    tcg_gen_br(l2);
364
    gen_set_label(l1);
365
    tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
366
    gen_set_label(l2);
367
}
368

    
369
static always_inline void gen_cmov (TCGCond inv_cond,
370
                                    int ra, int rb, int rc,
371
                                    int islit, uint8_t lit, int mask)
372
{
373
    int l1;
374

    
375
    if (unlikely(rc == 31))
376
        return;
377

    
378
    l1 = gen_new_label();
379

    
380
    if (ra != 31) {
381
        if (mask) {
382
            TCGv tmp = tcg_temp_new();
383
            tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
384
            tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
385
            tcg_temp_free(tmp);
386
        } else
387
            tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
388
    } else {
389
        /* Very uncommon case - Do not bother to optimize.  */
390
        TCGv tmp = tcg_const_i64(0);
391
        tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
392
        tcg_temp_free(tmp);
393
    }
394

    
395
    if (islit)
396
        tcg_gen_movi_i64(cpu_ir[rc], lit);
397
    else
398
        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
399
    gen_set_label(l1);
400
}
401

    
402
#define FARITH2(name)                                       \
403
static always_inline void glue(gen_f, name)(int rb, int rc) \
404
{                                                           \
405
    if (unlikely(rc == 31))                                 \
406
      return;                                               \
407
                                                            \
408
    if (rb != 31)                                           \
409
        gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]);    \
410
    else {                                                  \
411
        TCGv tmp = tcg_const_i64(0);                        \
412
        gen_helper_ ## name (cpu_fir[rc], tmp);            \
413
        tcg_temp_free(tmp);                                 \
414
    }                                                       \
415
}
416
FARITH2(sqrts)
417
FARITH2(sqrtf)
418
FARITH2(sqrtg)
419
FARITH2(sqrtt)
420
FARITH2(cvtgf)
421
FARITH2(cvtgq)
422
FARITH2(cvtqf)
423
FARITH2(cvtqg)
424
FARITH2(cvtst)
425
FARITH2(cvtts)
426
FARITH2(cvttq)
427
FARITH2(cvtqs)
428
FARITH2(cvtqt)
429
FARITH2(cvtlq)
430
FARITH2(cvtql)
431
FARITH2(cvtqlv)
432
FARITH2(cvtqlsv)
433

    
434
#define FARITH3(name)                                                     \
435
static always_inline void glue(gen_f, name) (int ra, int rb, int rc)      \
436
{                                                                         \
437
    if (unlikely(rc == 31))                                               \
438
        return;                                                           \
439
                                                                          \
440
    if (ra != 31) {                                                       \
441
        if (rb != 31)                                                     \
442
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);  \
443
        else {                                                            \
444
            TCGv tmp = tcg_const_i64(0);                                  \
445
            gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp);          \
446
            tcg_temp_free(tmp);                                           \
447
        }                                                                 \
448
    } else {                                                              \
449
        TCGv tmp = tcg_const_i64(0);                                      \
450
        if (rb != 31)                                                     \
451
            gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]);          \
452
        else                                                              \
453
            gen_helper_ ## name (cpu_fir[rc], tmp, tmp);                   \
454
        tcg_temp_free(tmp);                                               \
455
    }                                                                     \
456
}
457

    
458
FARITH3(addf)
459
FARITH3(subf)
460
FARITH3(mulf)
461
FARITH3(divf)
462
FARITH3(addg)
463
FARITH3(subg)
464
FARITH3(mulg)
465
FARITH3(divg)
466
FARITH3(cmpgeq)
467
FARITH3(cmpglt)
468
FARITH3(cmpgle)
469
FARITH3(adds)
470
FARITH3(subs)
471
FARITH3(muls)
472
FARITH3(divs)
473
FARITH3(addt)
474
FARITH3(subt)
475
FARITH3(mult)
476
FARITH3(divt)
477
FARITH3(cmptun)
478
FARITH3(cmpteq)
479
FARITH3(cmptlt)
480
FARITH3(cmptle)
481
FARITH3(cpys)
482
FARITH3(cpysn)
483
FARITH3(cpyse)
484

    
485
#define FCMOV(name)                                                   \
486
static always_inline void glue(gen_f, name) (int ra, int rb, int rc)  \
487
{                                                                     \
488
    int l1;                                                           \
489
    TCGv tmp;                                                         \
490
                                                                      \
491
    if (unlikely(rc == 31))                                           \
492
        return;                                                       \
493
                                                                      \
494
    l1 = gen_new_label();                                             \
495
    tmp = tcg_temp_new();                                 \
496
    if (ra != 31) {                                                   \
497
        tmp = tcg_temp_new();                             \
498
        gen_helper_ ## name (tmp, cpu_fir[ra]);                       \
499
    } else  {                                                         \
500
        tmp = tcg_const_i64(0);                                       \
501
        gen_helper_ ## name (tmp, tmp);                               \
502
    }                                                                 \
503
    tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);                     \
504
    if (rb != 31)                                                     \
505
        tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);                    \
506
    else                                                              \
507
        tcg_gen_movi_i64(cpu_fir[rc], 0);                             \
508
    gen_set_label(l1);                                                \
509
}
510
FCMOV(cmpfeq)
511
FCMOV(cmpfne)
512
FCMOV(cmpflt)
513
FCMOV(cmpfge)
514
FCMOV(cmpfle)
515
FCMOV(cmpfgt)
516

    
517
/* EXTWH, EXTWH, EXTLH, EXTQH */
518
static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
519
                                    int ra, int rb, int rc,
520
                                    int islit, uint8_t lit)
521
{
522
    if (unlikely(rc == 31))
523
        return;
524

    
525
    if (ra != 31) {
526
        if (islit) {
527
            if (lit != 0)
528
                tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
529
            else
530
                tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
531
        } else {
532
            TCGv tmp1, tmp2;
533
            tmp1 = tcg_temp_new();
534
            tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
535
            tcg_gen_shli_i64(tmp1, tmp1, 3);
536
            tmp2 = tcg_const_i64(64);
537
            tcg_gen_sub_i64(tmp1, tmp2, tmp1);
538
            tcg_temp_free(tmp2);
539
            tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
540
            tcg_temp_free(tmp1);
541
        }
542
        if (tcg_gen_ext_i64)
543
            tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
544
    } else
545
        tcg_gen_movi_i64(cpu_ir[rc], 0);
546
}
547

    
548
/* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
549
static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
550
                                    int ra, int rb, int rc,
551
                                    int islit, uint8_t lit)
552
{
553
    if (unlikely(rc == 31))
554
        return;
555

    
556
    if (ra != 31) {
557
        if (islit) {
558
                tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
559
        } else {
560
            TCGv tmp = tcg_temp_new();
561
            tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
562
            tcg_gen_shli_i64(tmp, tmp, 3);
563
            tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
564
            tcg_temp_free(tmp);
565
        }
566
        if (tcg_gen_ext_i64)
567
            tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
568
    } else
569
        tcg_gen_movi_i64(cpu_ir[rc], 0);
570
}
571

    
572
/* Code to call arith3 helpers */
573
#define ARITH3(name)                                                  \
574
static always_inline void glue(gen_, name) (int ra, int rb, int rc,   \
575
                                            int islit, uint8_t lit)   \
576
{                                                                     \
577
    if (unlikely(rc == 31))                                           \
578
        return;                                                       \
579
                                                                      \
580
    if (ra != 31) {                                                   \
581
        if (islit) {                                                  \
582
            TCGv tmp = tcg_const_i64(lit);                            \
583
            gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp);         \
584
            tcg_temp_free(tmp);                                       \
585
        } else                                                        \
586
            gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
587
    } else {                                                          \
588
        TCGv tmp1 = tcg_const_i64(0);                                 \
589
        if (islit) {                                                  \
590
            TCGv tmp2 = tcg_const_i64(lit);                           \
591
            gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2);             \
592
            tcg_temp_free(tmp2);                                      \
593
        } else                                                        \
594
            gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]);       \
595
        tcg_temp_free(tmp1);                                          \
596
    }                                                                 \
597
}
598
ARITH3(cmpbge)
599
ARITH3(addlv)
600
ARITH3(sublv)
601
ARITH3(addqv)
602
ARITH3(subqv)
603
ARITH3(mskbl)
604
ARITH3(insbl)
605
ARITH3(mskwl)
606
ARITH3(inswl)
607
ARITH3(mskll)
608
ARITH3(insll)
609
ARITH3(zap)
610
ARITH3(zapnot)
611
ARITH3(mskql)
612
ARITH3(insql)
613
ARITH3(mskwh)
614
ARITH3(inswh)
615
ARITH3(msklh)
616
ARITH3(inslh)
617
ARITH3(mskqh)
618
ARITH3(insqh)
619
ARITH3(umulh)
620
ARITH3(mullv)
621
ARITH3(mulqv)
622

    
623
static always_inline void gen_cmp(TCGCond cond,
624
                                  int ra, int rb, int rc,
625
                                  int islit, uint8_t lit)
626
{
627
    int l1, l2;
628
    TCGv tmp;
629

    
630
    if (unlikely(rc == 31))
631
    return;
632

    
633
    l1 = gen_new_label();
634
    l2 = gen_new_label();
635

    
636
    if (ra != 31) {
637
        tmp = tcg_temp_new();
638
        tcg_gen_mov_i64(tmp, cpu_ir[ra]);
639
    } else
640
        tmp = tcg_const_i64(0);
641
    if (islit)
642
        tcg_gen_brcondi_i64(cond, tmp, lit, l1);
643
    else
644
        tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
645

    
646
    tcg_gen_movi_i64(cpu_ir[rc], 0);
647
    tcg_gen_br(l2);
648
    gen_set_label(l1);
649
    tcg_gen_movi_i64(cpu_ir[rc], 1);
650
    gen_set_label(l2);
651
}
652

    
653
static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
654
{
655
    uint32_t palcode;
656
    int32_t disp21, disp16, disp12;
657
    uint16_t fn11, fn16;
658
    uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
659
    uint8_t lit;
660
    int ret;
661

    
662
    /* Decode all instruction fields */
663
    opc = insn >> 26;
664
    ra = (insn >> 21) & 0x1F;
665
    rb = (insn >> 16) & 0x1F;
666
    rc = insn & 0x1F;
667
    sbz = (insn >> 13) & 0x07;
668
    islit = (insn >> 12) & 1;
669
    if (rb == 31 && !islit) {
670
        islit = 1;
671
        lit = 0;
672
    } else
673
        lit = (insn >> 13) & 0xFF;
674
    palcode = insn & 0x03FFFFFF;
675
    disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
676
    disp16 = (int16_t)(insn & 0x0000FFFF);
677
    disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
678
    fn16 = insn & 0x0000FFFF;
679
    fn11 = (insn >> 5) & 0x000007FF;
680
    fpfn = fn11 & 0x3F;
681
    fn7 = (insn >> 5) & 0x0000007F;
682
    fn2 = (insn >> 5) & 0x00000003;
683
    ret = 0;
684
    LOG_DISAS("opc %02x ra %d rb %d rc %d disp16 %04x\n",
685
              opc, ra, rb, rc, disp16);
686
    switch (opc) {
687
    case 0x00:
688
        /* CALL_PAL */
689
        if (palcode >= 0x80 && palcode < 0xC0) {
690
            /* Unprivileged PAL call */
691
            gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
692
#if !defined (CONFIG_USER_ONLY)
693
        } else if (palcode < 0x40) {
694
            /* Privileged PAL code */
695
            if (ctx->mem_idx & 1)
696
                goto invalid_opc;
697
            else
698
                gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
699
#endif
700
        } else {
701
            /* Invalid PAL call */
702
            goto invalid_opc;
703
        }
704
        ret = 3;
705
        break;
706
    case 0x01:
707
        /* OPC01 */
708
        goto invalid_opc;
709
    case 0x02:
710
        /* OPC02 */
711
        goto invalid_opc;
712
    case 0x03:
713
        /* OPC03 */
714
        goto invalid_opc;
715
    case 0x04:
716
        /* OPC04 */
717
        goto invalid_opc;
718
    case 0x05:
719
        /* OPC05 */
720
        goto invalid_opc;
721
    case 0x06:
722
        /* OPC06 */
723
        goto invalid_opc;
724
    case 0x07:
725
        /* OPC07 */
726
        goto invalid_opc;
727
    case 0x08:
728
        /* LDA */
729
        if (likely(ra != 31)) {
730
            if (rb != 31)
731
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
732
            else
733
                tcg_gen_movi_i64(cpu_ir[ra], disp16);
734
        }
735
        break;
736
    case 0x09:
737
        /* LDAH */
738
        if (likely(ra != 31)) {
739
            if (rb != 31)
740
                tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
741
            else
742
                tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
743
        }
744
        break;
745
    case 0x0A:
746
        /* LDBU */
747
        if (!(ctx->amask & AMASK_BWX))
748
            goto invalid_opc;
749
        gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
750
        break;
751
    case 0x0B:
752
        /* LDQ_U */
753
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
754
        break;
755
    case 0x0C:
756
        /* LDWU */
757
        if (!(ctx->amask & AMASK_BWX))
758
            goto invalid_opc;
759
        gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 1);
760
        break;
761
    case 0x0D:
762
        /* STW */
763
        gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
764
        break;
765
    case 0x0E:
766
        /* STB */
767
        gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
768
        break;
769
    case 0x0F:
770
        /* STQ_U */
771
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
772
        break;
773
    case 0x10:
774
        switch (fn7) {
775
        case 0x00:
776
            /* ADDL */
777
            if (likely(rc != 31)) {
778
                if (ra != 31) {
779
                    if (islit) {
780
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
781
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
782
                    } else {
783
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
784
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
785
                    }
786
                } else {
787
                    if (islit)
788
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
789
                    else
790
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
791
                }
792
            }
793
            break;
794
        case 0x02:
795
            /* S4ADDL */
796
            if (likely(rc != 31)) {
797
                if (ra != 31) {
798
                    TCGv tmp = tcg_temp_new();
799
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
800
                    if (islit)
801
                        tcg_gen_addi_i64(tmp, tmp, lit);
802
                    else
803
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
804
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
805
                    tcg_temp_free(tmp);
806
                } else {
807
                    if (islit)
808
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
809
                    else
810
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
811
                }
812
            }
813
            break;
814
        case 0x09:
815
            /* SUBL */
816
            if (likely(rc != 31)) {
817
                if (ra != 31) {
818
                    if (islit)
819
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
820
                    else
821
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
822
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
823
                } else {
824
                    if (islit)
825
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
826
                    else {
827
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
828
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
829
                }
830
            }
831
            break;
832
        case 0x0B:
833
            /* S4SUBL */
834
            if (likely(rc != 31)) {
835
                if (ra != 31) {
836
                    TCGv tmp = tcg_temp_new();
837
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
838
                    if (islit)
839
                        tcg_gen_subi_i64(tmp, tmp, lit);
840
                    else
841
                        tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
842
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
843
                    tcg_temp_free(tmp);
844
                } else {
845
                    if (islit)
846
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
847
                    else {
848
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
849
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
850
                    }
851
                }
852
            }
853
            break;
854
        case 0x0F:
855
            /* CMPBGE */
856
            gen_cmpbge(ra, rb, rc, islit, lit);
857
            break;
858
        case 0x12:
859
            /* S8ADDL */
860
            if (likely(rc != 31)) {
861
                if (ra != 31) {
862
                    TCGv tmp = tcg_temp_new();
863
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
864
                    if (islit)
865
                        tcg_gen_addi_i64(tmp, tmp, lit);
866
                    else
867
                        tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
868
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
869
                    tcg_temp_free(tmp);
870
                } else {
871
                    if (islit)
872
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
873
                    else
874
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
875
                }
876
            }
877
            break;
878
        case 0x1B:
879
            /* S8SUBL */
880
            if (likely(rc != 31)) {
881
                if (ra != 31) {
882
                    TCGv tmp = tcg_temp_new();
883
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
884
                    if (islit)
885
                        tcg_gen_subi_i64(tmp, tmp, lit);
886
                    else
887
                       tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
888
                    tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
889
                    tcg_temp_free(tmp);
890
                } else {
891
                    if (islit)
892
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
893
                    else
894
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
895
                        tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
896
                    }
897
                }
898
            }
899
            break;
900
        case 0x1D:
901
            /* CMPULT */
902
            gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
903
            break;
904
        case 0x20:
905
            /* ADDQ */
906
            if (likely(rc != 31)) {
907
                if (ra != 31) {
908
                    if (islit)
909
                        tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
910
                    else
911
                        tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
912
                } else {
913
                    if (islit)
914
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
915
                    else
916
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
917
                }
918
            }
919
            break;
920
        case 0x22:
921
            /* S4ADDQ */
922
            if (likely(rc != 31)) {
923
                if (ra != 31) {
924
                    TCGv tmp = tcg_temp_new();
925
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
926
                    if (islit)
927
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
928
                    else
929
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
930
                    tcg_temp_free(tmp);
931
                } else {
932
                    if (islit)
933
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
934
                    else
935
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
936
                }
937
            }
938
            break;
939
        case 0x29:
940
            /* SUBQ */
941
            if (likely(rc != 31)) {
942
                if (ra != 31) {
943
                    if (islit)
944
                        tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
945
                    else
946
                        tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
947
                } else {
948
                    if (islit)
949
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
950
                    else
951
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
952
                }
953
            }
954
            break;
955
        case 0x2B:
956
            /* S4SUBQ */
957
            if (likely(rc != 31)) {
958
                if (ra != 31) {
959
                    TCGv tmp = tcg_temp_new();
960
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
961
                    if (islit)
962
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
963
                    else
964
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
965
                    tcg_temp_free(tmp);
966
                } else {
967
                    if (islit)
968
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
969
                    else
970
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
971
                }
972
            }
973
            break;
974
        case 0x2D:
975
            /* CMPEQ */
976
            gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
977
            break;
978
        case 0x32:
979
            /* S8ADDQ */
980
            if (likely(rc != 31)) {
981
                if (ra != 31) {
982
                    TCGv tmp = tcg_temp_new();
983
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
984
                    if (islit)
985
                        tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
986
                    else
987
                        tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
988
                    tcg_temp_free(tmp);
989
                } else {
990
                    if (islit)
991
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
992
                    else
993
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
994
                }
995
            }
996
            break;
997
        case 0x3B:
998
            /* S8SUBQ */
999
            if (likely(rc != 31)) {
1000
                if (ra != 31) {
1001
                    TCGv tmp = tcg_temp_new();
1002
                    tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1003
                    if (islit)
1004
                        tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1005
                    else
1006
                        tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1007
                    tcg_temp_free(tmp);
1008
                } else {
1009
                    if (islit)
1010
                        tcg_gen_movi_i64(cpu_ir[rc], -lit);
1011
                    else
1012
                        tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1013
                }
1014
            }
1015
            break;
1016
        case 0x3D:
1017
            /* CMPULE */
1018
            gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1019
            break;
1020
        case 0x40:
1021
            /* ADDL/V */
1022
            gen_addlv(ra, rb, rc, islit, lit);
1023
            break;
1024
        case 0x49:
1025
            /* SUBL/V */
1026
            gen_sublv(ra, rb, rc, islit, lit);
1027
            break;
1028
        case 0x4D:
1029
            /* CMPLT */
1030
            gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1031
            break;
1032
        case 0x60:
1033
            /* ADDQ/V */
1034
            gen_addqv(ra, rb, rc, islit, lit);
1035
            break;
1036
        case 0x69:
1037
            /* SUBQ/V */
1038
            gen_subqv(ra, rb, rc, islit, lit);
1039
            break;
1040
        case 0x6D:
1041
            /* CMPLE */
1042
            gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1043
            break;
1044
        default:
1045
            goto invalid_opc;
1046
        }
1047
        break;
1048
    case 0x11:
1049
        switch (fn7) {
1050
        case 0x00:
1051
            /* AND */
1052
            if (likely(rc != 31)) {
1053
                if (ra == 31)
1054
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1055
                else if (islit)
1056
                    tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1057
                else
1058
                    tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1059
            }
1060
            break;
1061
        case 0x08:
1062
            /* BIC */
1063
            if (likely(rc != 31)) {
1064
                if (ra != 31) {
1065
                    if (islit)
1066
                        tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1067
                    else
1068
                        tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1069
                } else
1070
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1071
            }
1072
            break;
1073
        case 0x14:
1074
            /* CMOVLBS */
1075
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1076
            break;
1077
        case 0x16:
1078
            /* CMOVLBC */
1079
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1080
            break;
1081
        case 0x20:
1082
            /* BIS */
1083
            if (likely(rc != 31)) {
1084
                if (ra != 31) {
1085
                    if (islit)
1086
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1087
                    else
1088
                        tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1089
                } else {
1090
                    if (islit)
1091
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1092
                    else
1093
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1094
                }
1095
            }
1096
            break;
1097
        case 0x24:
1098
            /* CMOVEQ */
1099
            gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1100
            break;
1101
        case 0x26:
1102
            /* CMOVNE */
1103
            gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1104
            break;
1105
        case 0x28:
1106
            /* ORNOT */
1107
            if (likely(rc != 31)) {
1108
                if (ra != 31) {
1109
                    if (islit)
1110
                        tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1111
                    else
1112
                        tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1113
                } else {
1114
                    if (islit)
1115
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1116
                    else
1117
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1118
                }
1119
            }
1120
            break;
1121
        case 0x40:
1122
            /* XOR */
1123
            if (likely(rc != 31)) {
1124
                if (ra != 31) {
1125
                    if (islit)
1126
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1127
                    else
1128
                        tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1129
                } else {
1130
                    if (islit)
1131
                        tcg_gen_movi_i64(cpu_ir[rc], lit);
1132
                    else
1133
                        tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1134
                }
1135
            }
1136
            break;
1137
        case 0x44:
1138
            /* CMOVLT */
1139
            gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1140
            break;
1141
        case 0x46:
1142
            /* CMOVGE */
1143
            gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1144
            break;
1145
        case 0x48:
1146
            /* EQV */
1147
            if (likely(rc != 31)) {
1148
                if (ra != 31) {
1149
                    if (islit)
1150
                        tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1151
                    else
1152
                        tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1153
                } else {
1154
                    if (islit)
1155
                        tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1156
                    else
1157
                        tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1158
                }
1159
            }
1160
            break;
1161
        case 0x61:
1162
            /* AMASK */
1163
            if (likely(rc != 31)) {
1164
                if (islit)
1165
                    tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1166
                else
1167
                    gen_helper_amask(cpu_ir[rc], cpu_ir[rb]);
1168
            }
1169
            break;
1170
        case 0x64:
1171
            /* CMOVLE */
1172
            gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1173
            break;
1174
        case 0x66:
1175
            /* CMOVGT */
1176
            gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1177
            break;
1178
        case 0x6C:
1179
            /* IMPLVER */
1180
            if (rc != 31)
1181
                gen_helper_load_implver(cpu_ir[rc]);
1182
            break;
1183
        default:
1184
            goto invalid_opc;
1185
        }
1186
        break;
1187
    case 0x12:
1188
        switch (fn7) {
1189
        case 0x02:
1190
            /* MSKBL */
1191
            gen_mskbl(ra, rb, rc, islit, lit);
1192
            break;
1193
        case 0x06:
1194
            /* EXTBL */
1195
            gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1196
            break;
1197
        case 0x0B:
1198
            /* INSBL */
1199
            gen_insbl(ra, rb, rc, islit, lit);
1200
            break;
1201
        case 0x12:
1202
            /* MSKWL */
1203
            gen_mskwl(ra, rb, rc, islit, lit);
1204
            break;
1205
        case 0x16:
1206
            /* EXTWL */
1207
            gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1208
            break;
1209
        case 0x1B:
1210
            /* INSWL */
1211
            gen_inswl(ra, rb, rc, islit, lit);
1212
            break;
1213
        case 0x22:
1214
            /* MSKLL */
1215
            gen_mskll(ra, rb, rc, islit, lit);
1216
            break;
1217
        case 0x26:
1218
            /* EXTLL */
1219
            gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1220
            break;
1221
        case 0x2B:
1222
            /* INSLL */
1223
            gen_insll(ra, rb, rc, islit, lit);
1224
            break;
1225
        case 0x30:
1226
            /* ZAP */
1227
            gen_zap(ra, rb, rc, islit, lit);
1228
            break;
1229
        case 0x31:
1230
            /* ZAPNOT */
1231
            gen_zapnot(ra, rb, rc, islit, lit);
1232
            break;
1233
        case 0x32:
1234
            /* MSKQL */
1235
            gen_mskql(ra, rb, rc, islit, lit);
1236
            break;
1237
        case 0x34:
1238
            /* SRL */
1239
            if (likely(rc != 31)) {
1240
                if (ra != 31) {
1241
                    if (islit)
1242
                        tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1243
                    else {
1244
                        TCGv shift = tcg_temp_new();
1245
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1246
                        tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1247
                        tcg_temp_free(shift);
1248
                    }
1249
                } else
1250
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1251
            }
1252
            break;
1253
        case 0x36:
1254
            /* EXTQL */
1255
            gen_ext_l(NULL, ra, rb, rc, islit, lit);
1256
            break;
1257
        case 0x39:
1258
            /* SLL */
1259
            if (likely(rc != 31)) {
1260
                if (ra != 31) {
1261
                    if (islit)
1262
                        tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1263
                    else {
1264
                        TCGv shift = tcg_temp_new();
1265
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1266
                        tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1267
                        tcg_temp_free(shift);
1268
                    }
1269
                } else
1270
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1271
            }
1272
            break;
1273
        case 0x3B:
1274
            /* INSQL */
1275
            gen_insql(ra, rb, rc, islit, lit);
1276
            break;
1277
        case 0x3C:
1278
            /* SRA */
1279
            if (likely(rc != 31)) {
1280
                if (ra != 31) {
1281
                    if (islit)
1282
                        tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1283
                    else {
1284
                        TCGv shift = tcg_temp_new();
1285
                        tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1286
                        tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1287
                        tcg_temp_free(shift);
1288
                    }
1289
                } else
1290
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1291
            }
1292
            break;
1293
        case 0x52:
1294
            /* MSKWH */
1295
            gen_mskwh(ra, rb, rc, islit, lit);
1296
            break;
1297
        case 0x57:
1298
            /* INSWH */
1299
            gen_inswh(ra, rb, rc, islit, lit);
1300
            break;
1301
        case 0x5A:
1302
            /* EXTWH */
1303
            gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1304
            break;
1305
        case 0x62:
1306
            /* MSKLH */
1307
            gen_msklh(ra, rb, rc, islit, lit);
1308
            break;
1309
        case 0x67:
1310
            /* INSLH */
1311
            gen_inslh(ra, rb, rc, islit, lit);
1312
            break;
1313
        case 0x6A:
1314
            /* EXTLH */
1315
            gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1316
            break;
1317
        case 0x72:
1318
            /* MSKQH */
1319
            gen_mskqh(ra, rb, rc, islit, lit);
1320
            break;
1321
        case 0x77:
1322
            /* INSQH */
1323
            gen_insqh(ra, rb, rc, islit, lit);
1324
            break;
1325
        case 0x7A:
1326
            /* EXTQH */
1327
            gen_ext_h(NULL, ra, rb, rc, islit, lit);
1328
            break;
1329
        default:
1330
            goto invalid_opc;
1331
        }
1332
        break;
1333
    case 0x13:
1334
        switch (fn7) {
1335
        case 0x00:
1336
            /* MULL */
1337
            if (likely(rc != 31)) {
1338
                if (ra == 31)
1339
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1340
                else {
1341
                    if (islit)
1342
                        tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1343
                    else
1344
                        tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1345
                    tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1346
                }
1347
            }
1348
            break;
1349
        case 0x20:
1350
            /* MULQ */
1351
            if (likely(rc != 31)) {
1352
                if (ra == 31)
1353
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
1354
                else if (islit)
1355
                    tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1356
                else
1357
                    tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1358
            }
1359
            break;
1360
        case 0x30:
1361
            /* UMULH */
1362
            gen_umulh(ra, rb, rc, islit, lit);
1363
            break;
1364
        case 0x40:
1365
            /* MULL/V */
1366
            gen_mullv(ra, rb, rc, islit, lit);
1367
            break;
1368
        case 0x60:
1369
            /* MULQ/V */
1370
            gen_mulqv(ra, rb, rc, islit, lit);
1371
            break;
1372
        default:
1373
            goto invalid_opc;
1374
        }
1375
        break;
1376
    case 0x14:
1377
        switch (fpfn) { /* f11 & 0x3F */
1378
        case 0x04:
1379
            /* ITOFS */
1380
            if (!(ctx->amask & AMASK_FIX))
1381
                goto invalid_opc;
1382
            if (likely(rc != 31)) {
1383
                if (ra != 31) {
1384
                    TCGv_i32 tmp = tcg_temp_new_i32();
1385
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1386
                    gen_helper_memory_to_s(cpu_fir[rc], tmp);
1387
                    tcg_temp_free_i32(tmp);
1388
                } else
1389
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1390
            }
1391
            break;
1392
        case 0x0A:
1393
            /* SQRTF */
1394
            if (!(ctx->amask & AMASK_FIX))
1395
                goto invalid_opc;
1396
            gen_fsqrtf(rb, rc);
1397
            break;
1398
        case 0x0B:
1399
            /* SQRTS */
1400
            if (!(ctx->amask & AMASK_FIX))
1401
                goto invalid_opc;
1402
            gen_fsqrts(rb, rc);
1403
            break;
1404
        case 0x14:
1405
            /* ITOFF */
1406
            if (!(ctx->amask & AMASK_FIX))
1407
                goto invalid_opc;
1408
            if (likely(rc != 31)) {
1409
                if (ra != 31) {
1410
                    TCGv_i32 tmp = tcg_temp_new_i32();
1411
                    tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1412
                    gen_helper_memory_to_f(cpu_fir[rc], tmp);
1413
                    tcg_temp_free_i32(tmp);
1414
                } else
1415
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1416
            }
1417
            break;
1418
        case 0x24:
1419
            /* ITOFT */
1420
            if (!(ctx->amask & AMASK_FIX))
1421
                goto invalid_opc;
1422
            if (likely(rc != 31)) {
1423
                if (ra != 31)
1424
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1425
                else
1426
                    tcg_gen_movi_i64(cpu_fir[rc], 0);
1427
            }
1428
            break;
1429
        case 0x2A:
1430
            /* SQRTG */
1431
            if (!(ctx->amask & AMASK_FIX))
1432
                goto invalid_opc;
1433
            gen_fsqrtg(rb, rc);
1434
            break;
1435
        case 0x02B:
1436
            /* SQRTT */
1437
            if (!(ctx->amask & AMASK_FIX))
1438
                goto invalid_opc;
1439
            gen_fsqrtt(rb, rc);
1440
            break;
1441
        default:
1442
            goto invalid_opc;
1443
        }
1444
        break;
1445
    case 0x15:
1446
        /* VAX floating point */
1447
        /* XXX: rounding mode and trap are ignored (!) */
1448
        switch (fpfn) { /* f11 & 0x3F */
1449
        case 0x00:
1450
            /* ADDF */
1451
            gen_faddf(ra, rb, rc);
1452
            break;
1453
        case 0x01:
1454
            /* SUBF */
1455
            gen_fsubf(ra, rb, rc);
1456
            break;
1457
        case 0x02:
1458
            /* MULF */
1459
            gen_fmulf(ra, rb, rc);
1460
            break;
1461
        case 0x03:
1462
            /* DIVF */
1463
            gen_fdivf(ra, rb, rc);
1464
            break;
1465
        case 0x1E:
1466
            /* CVTDG */
1467
#if 0 // TODO
1468
            gen_fcvtdg(rb, rc);
1469
#else
1470
            goto invalid_opc;
1471
#endif
1472
            break;
1473
        case 0x20:
1474
            /* ADDG */
1475
            gen_faddg(ra, rb, rc);
1476
            break;
1477
        case 0x21:
1478
            /* SUBG */
1479
            gen_fsubg(ra, rb, rc);
1480
            break;
1481
        case 0x22:
1482
            /* MULG */
1483
            gen_fmulg(ra, rb, rc);
1484
            break;
1485
        case 0x23:
1486
            /* DIVG */
1487
            gen_fdivg(ra, rb, rc);
1488
            break;
1489
        case 0x25:
1490
            /* CMPGEQ */
1491
            gen_fcmpgeq(ra, rb, rc);
1492
            break;
1493
        case 0x26:
1494
            /* CMPGLT */
1495
            gen_fcmpglt(ra, rb, rc);
1496
            break;
1497
        case 0x27:
1498
            /* CMPGLE */
1499
            gen_fcmpgle(ra, rb, rc);
1500
            break;
1501
        case 0x2C:
1502
            /* CVTGF */
1503
            gen_fcvtgf(rb, rc);
1504
            break;
1505
        case 0x2D:
1506
            /* CVTGD */
1507
#if 0 // TODO
1508
            gen_fcvtgd(rb, rc);
1509
#else
1510
            goto invalid_opc;
1511
#endif
1512
            break;
1513
        case 0x2F:
1514
            /* CVTGQ */
1515
            gen_fcvtgq(rb, rc);
1516
            break;
1517
        case 0x3C:
1518
            /* CVTQF */
1519
            gen_fcvtqf(rb, rc);
1520
            break;
1521
        case 0x3E:
1522
            /* CVTQG */
1523
            gen_fcvtqg(rb, rc);
1524
            break;
1525
        default:
1526
            goto invalid_opc;
1527
        }
1528
        break;
1529
    case 0x16:
1530
        /* IEEE floating-point */
1531
        /* XXX: rounding mode and traps are ignored (!) */
1532
        switch (fpfn) { /* f11 & 0x3F */
1533
        case 0x00:
1534
            /* ADDS */
1535
            gen_fadds(ra, rb, rc);
1536
            break;
1537
        case 0x01:
1538
            /* SUBS */
1539
            gen_fsubs(ra, rb, rc);
1540
            break;
1541
        case 0x02:
1542
            /* MULS */
1543
            gen_fmuls(ra, rb, rc);
1544
            break;
1545
        case 0x03:
1546
            /* DIVS */
1547
            gen_fdivs(ra, rb, rc);
1548
            break;
1549
        case 0x20:
1550
            /* ADDT */
1551
            gen_faddt(ra, rb, rc);
1552
            break;
1553
        case 0x21:
1554
            /* SUBT */
1555
            gen_fsubt(ra, rb, rc);
1556
            break;
1557
        case 0x22:
1558
            /* MULT */
1559
            gen_fmult(ra, rb, rc);
1560
            break;
1561
        case 0x23:
1562
            /* DIVT */
1563
            gen_fdivt(ra, rb, rc);
1564
            break;
1565
        case 0x24:
1566
            /* CMPTUN */
1567
            gen_fcmptun(ra, rb, rc);
1568
            break;
1569
        case 0x25:
1570
            /* CMPTEQ */
1571
            gen_fcmpteq(ra, rb, rc);
1572
            break;
1573
        case 0x26:
1574
            /* CMPTLT */
1575
            gen_fcmptlt(ra, rb, rc);
1576
            break;
1577
        case 0x27:
1578
            /* CMPTLE */
1579
            gen_fcmptle(ra, rb, rc);
1580
            break;
1581
        case 0x2C:
1582
            /* XXX: incorrect */
1583
            if (fn11 == 0x2AC || fn11 == 0x6AC) {
1584
                /* CVTST */
1585
                gen_fcvtst(rb, rc);
1586
            } else {
1587
                /* CVTTS */
1588
                gen_fcvtts(rb, rc);
1589
            }
1590
            break;
1591
        case 0x2F:
1592
            /* CVTTQ */
1593
            gen_fcvttq(rb, rc);
1594
            break;
1595
        case 0x3C:
1596
            /* CVTQS */
1597
            gen_fcvtqs(rb, rc);
1598
            break;
1599
        case 0x3E:
1600
            /* CVTQT */
1601
            gen_fcvtqt(rb, rc);
1602
            break;
1603
        default:
1604
            goto invalid_opc;
1605
        }
1606
        break;
1607
    case 0x17:
1608
        switch (fn11) {
1609
        case 0x010:
1610
            /* CVTLQ */
1611
            gen_fcvtlq(rb, rc);
1612
            break;
1613
        case 0x020:
1614
            if (likely(rc != 31)) {
1615
                if (ra == rb)
1616
                    /* FMOV */
1617
                    tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1618
                else
1619
                    /* CPYS */
1620
                    gen_fcpys(ra, rb, rc);
1621
            }
1622
            break;
1623
        case 0x021:
1624
            /* CPYSN */
1625
            gen_fcpysn(ra, rb, rc);
1626
            break;
1627
        case 0x022:
1628
            /* CPYSE */
1629
            gen_fcpyse(ra, rb, rc);
1630
            break;
1631
        case 0x024:
1632
            /* MT_FPCR */
1633
            if (likely(ra != 31))
1634
                gen_helper_store_fpcr(cpu_fir[ra]);
1635
            else {
1636
                TCGv tmp = tcg_const_i64(0);
1637
                gen_helper_store_fpcr(tmp);
1638
                tcg_temp_free(tmp);
1639
            }
1640
            break;
1641
        case 0x025:
1642
            /* MF_FPCR */
1643
            if (likely(ra != 31))
1644
                gen_helper_load_fpcr(cpu_fir[ra]);
1645
            break;
1646
        case 0x02A:
1647
            /* FCMOVEQ */
1648
            gen_fcmpfeq(ra, rb, rc);
1649
            break;
1650
        case 0x02B:
1651
            /* FCMOVNE */
1652
            gen_fcmpfne(ra, rb, rc);
1653
            break;
1654
        case 0x02C:
1655
            /* FCMOVLT */
1656
            gen_fcmpflt(ra, rb, rc);
1657
            break;
1658
        case 0x02D:
1659
            /* FCMOVGE */
1660
            gen_fcmpfge(ra, rb, rc);
1661
            break;
1662
        case 0x02E:
1663
            /* FCMOVLE */
1664
            gen_fcmpfle(ra, rb, rc);
1665
            break;
1666
        case 0x02F:
1667
            /* FCMOVGT */
1668
            gen_fcmpfgt(ra, rb, rc);
1669
            break;
1670
        case 0x030:
1671
            /* CVTQL */
1672
            gen_fcvtql(rb, rc);
1673
            break;
1674
        case 0x130:
1675
            /* CVTQL/V */
1676
            gen_fcvtqlv(rb, rc);
1677
            break;
1678
        case 0x530:
1679
            /* CVTQL/SV */
1680
            gen_fcvtqlsv(rb, rc);
1681
            break;
1682
        default:
1683
            goto invalid_opc;
1684
        }
1685
        break;
1686
    case 0x18:
1687
        switch ((uint16_t)disp16) {
1688
        case 0x0000:
1689
            /* TRAPB */
1690
            /* No-op. Just exit from the current tb */
1691
            ret = 2;
1692
            break;
1693
        case 0x0400:
1694
            /* EXCB */
1695
            /* No-op. Just exit from the current tb */
1696
            ret = 2;
1697
            break;
1698
        case 0x4000:
1699
            /* MB */
1700
            /* No-op */
1701
            break;
1702
        case 0x4400:
1703
            /* WMB */
1704
            /* No-op */
1705
            break;
1706
        case 0x8000:
1707
            /* FETCH */
1708
            /* No-op */
1709
            break;
1710
        case 0xA000:
1711
            /* FETCH_M */
1712
            /* No-op */
1713
            break;
1714
        case 0xC000:
1715
            /* RPCC */
1716
            if (ra != 31)
1717
                gen_helper_load_pcc(cpu_ir[ra]);
1718
            break;
1719
        case 0xE000:
1720
            /* RC */
1721
            if (ra != 31)
1722
                gen_helper_rc(cpu_ir[ra]);
1723
            break;
1724
        case 0xE800:
1725
            /* ECB */
1726
            /* XXX: TODO: evict tb cache at address rb */
1727
#if 0
1728
            ret = 2;
1729
#else
1730
            goto invalid_opc;
1731
#endif
1732
            break;
1733
        case 0xF000:
1734
            /* RS */
1735
            if (ra != 31)
1736
                gen_helper_rs(cpu_ir[ra]);
1737
            break;
1738
        case 0xF800:
1739
            /* WH64 */
1740
            /* No-op */
1741
            break;
1742
        default:
1743
            goto invalid_opc;
1744
        }
1745
        break;
1746
    case 0x19:
1747
        /* HW_MFPR (PALcode) */
1748
#if defined (CONFIG_USER_ONLY)
1749
        goto invalid_opc;
1750
#else
1751
        if (!ctx->pal_mode)
1752
            goto invalid_opc;
1753
        if (ra != 31) {
1754
            TCGv tmp = tcg_const_i32(insn & 0xFF);
1755
            gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1756
            tcg_temp_free(tmp);
1757
        }
1758
        break;
1759
#endif
1760
    case 0x1A:
1761
        if (rb != 31)
1762
            tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1763
        else
1764
            tcg_gen_movi_i64(cpu_pc, 0);
1765
        if (ra != 31)
1766
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1767
        /* Those four jumps only differ by the branch prediction hint */
1768
        switch (fn2) {
1769
        case 0x0:
1770
            /* JMP */
1771
            break;
1772
        case 0x1:
1773
            /* JSR */
1774
            break;
1775
        case 0x2:
1776
            /* RET */
1777
            break;
1778
        case 0x3:
1779
            /* JSR_COROUTINE */
1780
            break;
1781
        }
1782
        ret = 1;
1783
        break;
1784
    case 0x1B:
1785
        /* HW_LD (PALcode) */
1786
#if defined (CONFIG_USER_ONLY)
1787
        goto invalid_opc;
1788
#else
1789
        if (!ctx->pal_mode)
1790
            goto invalid_opc;
1791
        if (ra != 31) {
1792
            TCGv addr = tcg_temp_new();
1793
            if (rb != 31)
1794
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1795
            else
1796
                tcg_gen_movi_i64(addr, disp12);
1797
            switch ((insn >> 12) & 0xF) {
1798
            case 0x0:
1799
                /* Longword physical access */
1800
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1801
                break;
1802
            case 0x1:
1803
                /* Quadword physical access */
1804
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1805
                break;
1806
            case 0x2:
1807
                /* Longword physical access with lock */
1808
                gen_helper_ldl_l_raw(cpu_ir[ra], addr);
1809
                break;
1810
            case 0x3:
1811
                /* Quadword physical access with lock */
1812
                gen_helper_ldq_l_raw(cpu_ir[ra], addr);
1813
                break;
1814
            case 0x4:
1815
                /* Longword virtual PTE fetch */
1816
                gen_helper_ldl_kernel(cpu_ir[ra], addr);
1817
                break;
1818
            case 0x5:
1819
                /* Quadword virtual PTE fetch */
1820
                gen_helper_ldq_kernel(cpu_ir[ra], addr);
1821
                break;
1822
            case 0x6:
1823
                /* Incpu_ir[ra]id */
1824
                goto incpu_ir[ra]id_opc;
1825
            case 0x7:
1826
                /* Incpu_ir[ra]id */
1827
                goto incpu_ir[ra]id_opc;
1828
            case 0x8:
1829
                /* Longword virtual access */
1830
                gen_helper_st_virt_to_phys(addr, addr);
1831
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1832
                break;
1833
            case 0x9:
1834
                /* Quadword virtual access */
1835
                gen_helper_st_virt_to_phys(addr, addr);
1836
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1837
                break;
1838
            case 0xA:
1839
                /* Longword virtual access with protection check */
1840
                tcg_gen_qemu_ld32s(cpu_ir[ra], addr, ctx->flags);
1841
                break;
1842
            case 0xB:
1843
                /* Quadword virtual access with protection check */
1844
                tcg_gen_qemu_ld64(cpu_ir[ra], addr, ctx->flags);
1845
                break;
1846
            case 0xC:
1847
                /* Longword virtual access with altenate access mode */
1848
                gen_helper_set_alt_mode();
1849
                gen_helper_st_virt_to_phys(addr, addr);
1850
                gen_helper_ldl_raw(cpu_ir[ra], addr);
1851
                gen_helper_restore_mode();
1852
                break;
1853
            case 0xD:
1854
                /* Quadword virtual access with altenate access mode */
1855
                gen_helper_set_alt_mode();
1856
                gen_helper_st_virt_to_phys(addr, addr);
1857
                gen_helper_ldq_raw(cpu_ir[ra], addr);
1858
                gen_helper_restore_mode();
1859
                break;
1860
            case 0xE:
1861
                /* Longword virtual access with alternate access mode and
1862
                 * protection checks
1863
                 */
1864
                gen_helper_set_alt_mode();
1865
                gen_helper_ldl_data(cpu_ir[ra], addr);
1866
                gen_helper_restore_mode();
1867
                break;
1868
            case 0xF:
1869
                /* Quadword virtual access with alternate access mode and
1870
                 * protection checks
1871
                 */
1872
                gen_helper_set_alt_mode();
1873
                gen_helper_ldq_data(cpu_ir[ra], addr);
1874
                gen_helper_restore_mode();
1875
                break;
1876
            }
1877
            tcg_temp_free(addr);
1878
        }
1879
        break;
1880
#endif
1881
    case 0x1C:
1882
        switch (fn7) {
1883
        case 0x00:
1884
            /* SEXTB */
1885
            if (!(ctx->amask & AMASK_BWX))
1886
                goto invalid_opc;
1887
            if (likely(rc != 31)) {
1888
                if (islit)
1889
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1890
                else
1891
                    tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1892
            }
1893
            break;
1894
        case 0x01:
1895
            /* SEXTW */
1896
            if (!(ctx->amask & AMASK_BWX))
1897
                goto invalid_opc;
1898
            if (likely(rc != 31)) {
1899
                if (islit)
1900
                    tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1901
                else
1902
                    tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1903
            }
1904
            break;
1905
        case 0x30:
1906
            /* CTPOP */
1907
            if (!(ctx->amask & AMASK_CIX))
1908
                goto invalid_opc;
1909
            if (likely(rc != 31)) {
1910
                if (islit)
1911
                    tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1912
                else
1913
                    gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
1914
            }
1915
            break;
1916
        case 0x31:
1917
            /* PERR */
1918
            if (!(ctx->amask & AMASK_MVI))
1919
                goto invalid_opc;
1920
            /* XXX: TODO */
1921
            goto invalid_opc;
1922
            break;
1923
        case 0x32:
1924
            /* CTLZ */
1925
            if (!(ctx->amask & AMASK_CIX))
1926
                goto invalid_opc;
1927
            if (likely(rc != 31)) {
1928
                if (islit)
1929
                    tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1930
                else
1931
                    gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
1932
            }
1933
            break;
1934
        case 0x33:
1935
            /* CTTZ */
1936
            if (!(ctx->amask & AMASK_CIX))
1937
                goto invalid_opc;
1938
            if (likely(rc != 31)) {
1939
                if (islit)
1940
                    tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1941
                else
1942
                    gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
1943
            }
1944
            break;
1945
        case 0x34:
1946
            /* UNPKBW */
1947
            if (!(ctx->amask & AMASK_MVI))
1948
                goto invalid_opc;
1949
            /* XXX: TODO */
1950
            goto invalid_opc;
1951
            break;
1952
        case 0x35:
1953
            /* UNPKWL */
1954
            if (!(ctx->amask & AMASK_MVI))
1955
                goto invalid_opc;
1956
            /* XXX: TODO */
1957
            goto invalid_opc;
1958
            break;
1959
        case 0x36:
1960
            /* PKWB */
1961
            if (!(ctx->amask & AMASK_MVI))
1962
                goto invalid_opc;
1963
            /* XXX: TODO */
1964
            goto invalid_opc;
1965
            break;
1966
        case 0x37:
1967
            /* PKLB */
1968
            if (!(ctx->amask & AMASK_MVI))
1969
                goto invalid_opc;
1970
            /* XXX: TODO */
1971
            goto invalid_opc;
1972
            break;
1973
        case 0x38:
1974
            /* MINSB8 */
1975
            if (!(ctx->amask & AMASK_MVI))
1976
                goto invalid_opc;
1977
            /* XXX: TODO */
1978
            goto invalid_opc;
1979
            break;
1980
        case 0x39:
1981
            /* MINSW4 */
1982
            if (!(ctx->amask & AMASK_MVI))
1983
                goto invalid_opc;
1984
            /* XXX: TODO */
1985
            goto invalid_opc;
1986
            break;
1987
        case 0x3A:
1988
            /* MINUB8 */
1989
            if (!(ctx->amask & AMASK_MVI))
1990
                goto invalid_opc;
1991
            /* XXX: TODO */
1992
            goto invalid_opc;
1993
            break;
1994
        case 0x3B:
1995
            /* MINUW4 */
1996
            if (!(ctx->amask & AMASK_MVI))
1997
                goto invalid_opc;
1998
            /* XXX: TODO */
1999
            goto invalid_opc;
2000
            break;
2001
        case 0x3C:
2002
            /* MAXUB8 */
2003
            if (!(ctx->amask & AMASK_MVI))
2004
                goto invalid_opc;
2005
            /* XXX: TODO */
2006
            goto invalid_opc;
2007
            break;
2008
        case 0x3D:
2009
            /* MAXUW4 */
2010
            if (!(ctx->amask & AMASK_MVI))
2011
                goto invalid_opc;
2012
            /* XXX: TODO */
2013
            goto invalid_opc;
2014
            break;
2015
        case 0x3E:
2016
            /* MAXSB8 */
2017
            if (!(ctx->amask & AMASK_MVI))
2018
                goto invalid_opc;
2019
            /* XXX: TODO */
2020
            goto invalid_opc;
2021
            break;
2022
        case 0x3F:
2023
            /* MAXSW4 */
2024
            if (!(ctx->amask & AMASK_MVI))
2025
                goto invalid_opc;
2026
            /* XXX: TODO */
2027
            goto invalid_opc;
2028
            break;
2029
        case 0x70:
2030
            /* FTOIT */
2031
            if (!(ctx->amask & AMASK_FIX))
2032
                goto invalid_opc;
2033
            if (likely(rc != 31)) {
2034
                if (ra != 31)
2035
                    tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2036
                else
2037
                    tcg_gen_movi_i64(cpu_ir[rc], 0);
2038
            }
2039
            break;
2040
        case 0x78:
2041
            /* FTOIS */
2042
            if (!(ctx->amask & AMASK_FIX))
2043
                goto invalid_opc;
2044
            if (rc != 31) {
2045
                TCGv_i32 tmp1 = tcg_temp_new_i32();
2046
                if (ra != 31)
2047
                    gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2048
                else {
2049
                    TCGv tmp2 = tcg_const_i64(0);
2050
                    gen_helper_s_to_memory(tmp1, tmp2);
2051
                    tcg_temp_free(tmp2);
2052
                }
2053
                tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2054
                tcg_temp_free_i32(tmp1);
2055
            }
2056
            break;
2057
        default:
2058
            goto invalid_opc;
2059
        }
2060
        break;
2061
    case 0x1D:
2062
        /* HW_MTPR (PALcode) */
2063
#if defined (CONFIG_USER_ONLY)
2064
        goto invalid_opc;
2065
#else
2066
        if (!ctx->pal_mode)
2067
            goto invalid_opc;
2068
        else {
2069
            TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2070
            if (ra != 31)
2071
                gen_helper_mtpr(tmp1, cpu_ir[ra]);
2072
            else {
2073
                TCGv tmp2 = tcg_const_i64(0);
2074
                gen_helper_mtpr(tmp1, tmp2);
2075
                tcg_temp_free(tmp2);
2076
            }
2077
            tcg_temp_free(tmp1);
2078
            ret = 2;
2079
        }
2080
        break;
2081
#endif
2082
    case 0x1E:
2083
        /* HW_REI (PALcode) */
2084
#if defined (CONFIG_USER_ONLY)
2085
        goto invalid_opc;
2086
#else
2087
        if (!ctx->pal_mode)
2088
            goto invalid_opc;
2089
        if (rb == 31) {
2090
            /* "Old" alpha */
2091
            gen_helper_hw_rei();
2092
        } else {
2093
            TCGv tmp;
2094

    
2095
            if (ra != 31) {
2096
                tmp = tcg_temp_new();
2097
                tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2098
            } else
2099
                tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2100
            gen_helper_hw_ret(tmp);
2101
            tcg_temp_free(tmp);
2102
        }
2103
        ret = 2;
2104
        break;
2105
#endif
2106
    case 0x1F:
2107
        /* HW_ST (PALcode) */
2108
#if defined (CONFIG_USER_ONLY)
2109
        goto invalid_opc;
2110
#else
2111
        if (!ctx->pal_mode)
2112
            goto invalid_opc;
2113
        else {
2114
            TCGv addr, val;
2115
            addr = tcg_temp_new();
2116
            if (rb != 31)
2117
                tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2118
            else
2119
                tcg_gen_movi_i64(addr, disp12);
2120
            if (ra != 31)
2121
                val = cpu_ir[ra];
2122
            else {
2123
                val = tcg_temp_new();
2124
                tcg_gen_movi_i64(val, 0);
2125
            }
2126
            switch ((insn >> 12) & 0xF) {
2127
            case 0x0:
2128
                /* Longword physical access */
2129
                gen_helper_stl_raw(val, addr);
2130
                break;
2131
            case 0x1:
2132
                /* Quadword physical access */
2133
                gen_helper_stq_raw(val, addr);
2134
                break;
2135
            case 0x2:
2136
                /* Longword physical access with lock */
2137
                gen_helper_stl_c_raw(val, val, addr);
2138
                break;
2139
            case 0x3:
2140
                /* Quadword physical access with lock */
2141
                gen_helper_stq_c_raw(val, val, addr);
2142
                break;
2143
            case 0x4:
2144
                /* Longword virtual access */
2145
                gen_helper_st_virt_to_phys(addr, addr);
2146
                gen_helper_stl_raw(val, addr);
2147
                break;
2148
            case 0x5:
2149
                /* Quadword virtual access */
2150
                gen_helper_st_virt_to_phys(addr, addr);
2151
                gen_helper_stq_raw(val, addr);
2152
                break;
2153
            case 0x6:
2154
                /* Invalid */
2155
                goto invalid_opc;
2156
            case 0x7:
2157
                /* Invalid */
2158
                goto invalid_opc;
2159
            case 0x8:
2160
                /* Invalid */
2161
                goto invalid_opc;
2162
            case 0x9:
2163
                /* Invalid */
2164
                goto invalid_opc;
2165
            case 0xA:
2166
                /* Invalid */
2167
                goto invalid_opc;
2168
            case 0xB:
2169
                /* Invalid */
2170
                goto invalid_opc;
2171
            case 0xC:
2172
                /* Longword virtual access with alternate access mode */
2173
                gen_helper_set_alt_mode();
2174
                gen_helper_st_virt_to_phys(addr, addr);
2175
                gen_helper_stl_raw(val, addr);
2176
                gen_helper_restore_mode();
2177
                break;
2178
            case 0xD:
2179
                /* Quadword virtual access with alternate access mode */
2180
                gen_helper_set_alt_mode();
2181
                gen_helper_st_virt_to_phys(addr, addr);
2182
                gen_helper_stl_raw(val, addr);
2183
                gen_helper_restore_mode();
2184
                break;
2185
            case 0xE:
2186
                /* Invalid */
2187
                goto invalid_opc;
2188
            case 0xF:
2189
                /* Invalid */
2190
                goto invalid_opc;
2191
            }
2192
            if (ra != 31)
2193
                tcg_temp_free(val);
2194
            tcg_temp_free(addr);
2195
        }
2196
        ret = 2;
2197
        break;
2198
#endif
2199
    case 0x20:
2200
        /* LDF */
2201
        gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2202
        break;
2203
    case 0x21:
2204
        /* LDG */
2205
        gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2206
        break;
2207
    case 0x22:
2208
        /* LDS */
2209
        gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2210
        break;
2211
    case 0x23:
2212
        /* LDT */
2213
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2214
        break;
2215
    case 0x24:
2216
        /* STF */
2217
        gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2218
        break;
2219
    case 0x25:
2220
        /* STG */
2221
        gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2222
        break;
2223
    case 0x26:
2224
        /* STS */
2225
        gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2226
        break;
2227
    case 0x27:
2228
        /* STT */
2229
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2230
        break;
2231
    case 0x28:
2232
        /* LDL */
2233
        gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2234
        break;
2235
    case 0x29:
2236
        /* LDQ */
2237
        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2238
        break;
2239
    case 0x2A:
2240
        /* LDL_L */
2241
        gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2242
        break;
2243
    case 0x2B:
2244
        /* LDQ_L */
2245
        gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2246
        break;
2247
    case 0x2C:
2248
        /* STL */
2249
        gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2250
        break;
2251
    case 0x2D:
2252
        /* STQ */
2253
        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2254
        break;
2255
    case 0x2E:
2256
        /* STL_C */
2257
        gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2258
        break;
2259
    case 0x2F:
2260
        /* STQ_C */
2261
        gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2262
        break;
2263
    case 0x30:
2264
        /* BR */
2265
        if (ra != 31)
2266
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2267
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2268
        ret = 1;
2269
        break;
2270
    case 0x31: /* FBEQ */
2271
    case 0x32: /* FBLT */
2272
    case 0x33: /* FBLE */
2273
        gen_fbcond(ctx, opc, ra, disp16);
2274
        ret = 1;
2275
        break;
2276
    case 0x34:
2277
        /* BSR */
2278
        if (ra != 31)
2279
            tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2280
        tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2281
        ret = 1;
2282
        break;
2283
    case 0x35: /* FBNE */
2284
    case 0x36: /* FBGE */
2285
    case 0x37: /* FBGT */
2286
        gen_fbcond(ctx, opc, ra, disp16);
2287
        ret = 1;
2288
        break;
2289
    case 0x38:
2290
        /* BLBC */
2291
        gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 1);
2292
        ret = 1;
2293
        break;
2294
    case 0x39:
2295
        /* BEQ */
2296
        gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 0);
2297
        ret = 1;
2298
        break;
2299
    case 0x3A:
2300
        /* BLT */
2301
        gen_bcond(ctx, TCG_COND_LT, ra, disp16, 0);
2302
        ret = 1;
2303
        break;
2304
    case 0x3B:
2305
        /* BLE */
2306
        gen_bcond(ctx, TCG_COND_LE, ra, disp16, 0);
2307
        ret = 1;
2308
        break;
2309
    case 0x3C:
2310
        /* BLBS */
2311
        gen_bcond(ctx, TCG_COND_NE, ra, disp16, 1);
2312
        ret = 1;
2313
        break;
2314
    case 0x3D:
2315
        /* BNE */
2316
        gen_bcond(ctx, TCG_COND_NE, ra, disp16, 0);
2317
        ret = 1;
2318
        break;
2319
    case 0x3E:
2320
        /* BGE */
2321
        gen_bcond(ctx, TCG_COND_GE, ra, disp16, 0);
2322
        ret = 1;
2323
        break;
2324
    case 0x3F:
2325
        /* BGT */
2326
        gen_bcond(ctx, TCG_COND_GT, ra, disp16, 0);
2327
        ret = 1;
2328
        break;
2329
    invalid_opc:
2330
        gen_invalid(ctx);
2331
        ret = 3;
2332
        break;
2333
    }
2334

    
2335
    return ret;
2336
}
2337

    
2338
static always_inline void gen_intermediate_code_internal (CPUState *env,
2339
                                                          TranslationBlock *tb,
2340
                                                          int search_pc)
2341
{
2342
#if defined ALPHA_DEBUG_DISAS
2343
    static int insn_count;
2344
#endif
2345
    DisasContext ctx, *ctxp = &ctx;
2346
    target_ulong pc_start;
2347
    uint32_t insn;
2348
    uint16_t *gen_opc_end;
2349
    CPUBreakpoint *bp;
2350
    int j, lj = -1;
2351
    int ret;
2352
    int num_insns;
2353
    int max_insns;
2354

    
2355
    pc_start = tb->pc;
2356
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2357
    ctx.pc = pc_start;
2358
    ctx.amask = env->amask;
2359
#if defined (CONFIG_USER_ONLY)
2360
    ctx.mem_idx = 0;
2361
#else
2362
    ctx.mem_idx = ((env->ps >> 3) & 3);
2363
    ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2364
#endif
2365
    num_insns = 0;
2366
    max_insns = tb->cflags & CF_COUNT_MASK;
2367
    if (max_insns == 0)
2368
        max_insns = CF_COUNT_MASK;
2369

    
2370
    gen_icount_start();
2371
    for (ret = 0; ret == 0;) {
2372
        if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
2373
            TAILQ_FOREACH(bp, &env->breakpoints, entry) {
2374
                if (bp->pc == ctx.pc) {
2375
                    gen_excp(&ctx, EXCP_DEBUG, 0);
2376
                    break;
2377
                }
2378
            }
2379
        }
2380
        if (search_pc) {
2381
            j = gen_opc_ptr - gen_opc_buf;
2382
            if (lj < j) {
2383
                lj++;
2384
                while (lj < j)
2385
                    gen_opc_instr_start[lj++] = 0;
2386
                gen_opc_pc[lj] = ctx.pc;
2387
                gen_opc_instr_start[lj] = 1;
2388
                gen_opc_icount[lj] = num_insns;
2389
            }
2390
        }
2391
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2392
            gen_io_start();
2393
#if defined ALPHA_DEBUG_DISAS
2394
        insn_count++;
2395
        LOG_DISAS("pc " TARGET_FMT_lx " mem_idx %d\n",
2396
                  ctx.pc, ctx.mem_idx);
2397
#endif
2398
        insn = ldl_code(ctx.pc);
2399
#if defined ALPHA_DEBUG_DISAS
2400
        insn_count++;
2401
        LOG_DISAS("opcode %08x %d\n", insn, insn_count);
2402
#endif
2403
        num_insns++;
2404
        ctx.pc += 4;
2405
        ret = translate_one(ctxp, insn);
2406
        if (ret != 0)
2407
            break;
2408
        /* if we reach a page boundary or are single stepping, stop
2409
         * generation
2410
         */
2411
        if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
2412
            num_insns >= max_insns) {
2413
            break;
2414
        }
2415

    
2416
        if (env->singlestep_enabled) {
2417
            gen_excp(&ctx, EXCP_DEBUG, 0);
2418
            break;
2419
        }
2420

    
2421
#if defined (DO_SINGLE_STEP)
2422
        break;
2423
#endif
2424
    }
2425
    if (ret != 1 && ret != 3) {
2426
        tcg_gen_movi_i64(cpu_pc, ctx.pc);
2427
    }
2428
#if defined (DO_TB_FLUSH)
2429
    gen_helper_tb_flush();
2430
#endif
2431
    if (tb->cflags & CF_LAST_IO)
2432
        gen_io_end();
2433
    /* Generate the return instruction */
2434
    tcg_gen_exit_tb(0);
2435
    gen_icount_end(tb, num_insns);
2436
    *gen_opc_ptr = INDEX_op_end;
2437
    if (search_pc) {
2438
        j = gen_opc_ptr - gen_opc_buf;
2439
        lj++;
2440
        while (lj <= j)
2441
            gen_opc_instr_start[lj++] = 0;
2442
    } else {
2443
        tb->size = ctx.pc - pc_start;
2444
        tb->icount = num_insns;
2445
    }
2446
#if defined ALPHA_DEBUG_DISAS
2447
    if (loglevel & CPU_LOG_TB_CPU) {
2448
        cpu_dump_state(env, logfile, fprintf, 0);
2449
    }
2450
    if (loglevel & CPU_LOG_TB_IN_ASM) {
2451
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2452
        target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2453
        fprintf(logfile, "\n");
2454
    }
2455
#endif
2456
}
2457

    
2458
void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2459
{
2460
    gen_intermediate_code_internal(env, tb, 0);
2461
}
2462

    
2463
void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2464
{
2465
    gen_intermediate_code_internal(env, tb, 1);
2466
}
2467

    
2468
CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2469
{
2470
    CPUAlphaState *env;
2471
    uint64_t hwpcb;
2472

    
2473
    env = qemu_mallocz(sizeof(CPUAlphaState));
2474
    if (!env)
2475
        return NULL;
2476
    cpu_exec_init(env);
2477
    alpha_translate_init();
2478
    tlb_flush(env, 1);
2479
    /* XXX: should not be hardcoded */
2480
    env->implver = IMPLVER_2106x;
2481
    env->ps = 0x1F00;
2482
#if defined (CONFIG_USER_ONLY)
2483
    env->ps |= 1 << 3;
2484
#endif
2485
    pal_init(env);
2486
    /* Initialize IPR */
2487
    hwpcb = env->ipr[IPR_PCBB];
2488
    env->ipr[IPR_ASN] = 0;
2489
    env->ipr[IPR_ASTEN] = 0;
2490
    env->ipr[IPR_ASTSR] = 0;
2491
    env->ipr[IPR_DATFX] = 0;
2492
    /* XXX: fix this */
2493
    //    env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2494
    //    env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2495
    //    env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2496
    //    env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2497
    env->ipr[IPR_FEN] = 0;
2498
    env->ipr[IPR_IPL] = 31;
2499
    env->ipr[IPR_MCES] = 0;
2500
    env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2501
    //    env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2502
    env->ipr[IPR_SISR] = 0;
2503
    env->ipr[IPR_VIRBND] = -1ULL;
2504

    
2505
    return env;
2506
}
2507

    
2508
void gen_pc_load(CPUState *env, TranslationBlock *tb,
2509
                unsigned long searched_pc, int pc_pos, void *puc)
2510
{
2511
    env->pc = gen_opc_pc[pc_pos];
2512
}