Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ 2dedf314

History | View | Annotate | Download (189.9 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20

    
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26

    
27
#include "cpu.h"
28
#include "disas.h"
29
#include "helper.h"
30
#include "tcg-op.h"
31

    
32
#define GEN_HELPER 1
33
#include "helper.h"
34

    
35
#define DEBUG_DISAS
36

    
37
#define DYNAMIC_PC  1 /* dynamic pc value */
38
#define JUMP_PC     2 /* dynamic pc value which takes only two values
39
                         according to jump_pc[T2] */
40

    
41
/* global register indexes */
42
static TCGv_ptr cpu_env, cpu_regwptr;
43
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44
static TCGv_i32 cpu_cc_op;
45
static TCGv_i32 cpu_psr;
46
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47
static TCGv cpu_y;
48
#ifndef CONFIG_USER_ONLY
49
static TCGv cpu_tbr;
50
#endif
51
static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52
#ifdef TARGET_SPARC64
53
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54
static TCGv cpu_gsr;
55
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57
static TCGv_i32 cpu_softint;
58
#else
59
static TCGv cpu_wim;
60
#endif
61
/* local register indexes (only used inside old micro ops) */
62
static TCGv cpu_tmp0;
63
static TCGv_i32 cpu_tmp32;
64
static TCGv_i64 cpu_tmp64;
65
/* Floating point registers */
66
static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67

    
68
static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69
static target_ulong gen_opc_jump_pc[2];
70

    
71
#include "gen-icount.h"
72

    
73
typedef struct DisasContext {
74
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
75
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77
    int is_br;
78
    int mem_idx;
79
    int fpu_enabled;
80
    int address_mask_32bit;
81
    int singlestep;
82
    uint32_t cc_op;  /* current CC operation */
83
    struct TranslationBlock *tb;
84
    sparc_def_t *def;
85
    TCGv_i32 t32[3];
86
    int n_t32;
87
} DisasContext;
88

    
89
// This function uses non-native bit order
90
#define GET_FIELD(X, FROM, TO)                                  \
91
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92

    
93
// This function uses the order in the manuals, i.e. bit 0 is 2^0
94
#define GET_FIELD_SP(X, FROM, TO)               \
95
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
96

    
97
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99

    
100
#ifdef TARGET_SPARC64
101
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103
#else
104
#define DFPREG(r) (r & 0x1e)
105
#define QFPREG(r) (r & 0x1c)
106
#endif
107

    
108
#define UA2005_HTRAP_MASK 0xff
109
#define V8_TRAP_MASK 0x7f
110

    
111
static int sign_extend(int x, int len)
112
{
113
    len = 32 - len;
114
    return (x << len) >> len;
115
}
116

    
117
#define IS_IMM (insn & (1<<13))
118

    
119
static inline void gen_update_fprs_dirty(int rd)
120
{
121
#if defined(TARGET_SPARC64)
122
    tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
123
#endif
124
}
125

    
126
/* floating point registers moves */
127
static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
128
{
129
#if TCG_TARGET_REG_BITS == 32
130
    if (src & 1) {
131
        return TCGV_LOW(cpu_fpr[src / 2]);
132
    } else {
133
        return TCGV_HIGH(cpu_fpr[src / 2]);
134
    }
135
#else
136
    if (src & 1) {
137
        return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
138
    } else {
139
        TCGv_i32 ret = tcg_temp_local_new_i32();
140
        TCGv_i64 t = tcg_temp_new_i64();
141

    
142
        tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
143
        tcg_gen_trunc_i64_i32(ret, t);
144
        tcg_temp_free_i64(t);
145

    
146
        dc->t32[dc->n_t32++] = ret;
147
        assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
148

    
149
        return ret;
150
    }
151
#endif
152
}
153

    
154
static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
155
{
156
#if TCG_TARGET_REG_BITS == 32
157
    if (dst & 1) {
158
        tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
159
    } else {
160
        tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
161
    }
162
#else
163
    TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
164
    tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
165
                        (dst & 1 ? 0 : 32), 32);
166
#endif
167
    gen_update_fprs_dirty(dst);
168
}
169

    
170
static TCGv_i32 gen_dest_fpr_F(void)
171
{
172
    return cpu_tmp32;
173
}
174

    
175
static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
176
{
177
    src = DFPREG(src);
178
    return cpu_fpr[src / 2];
179
}
180

    
181
static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
182
{
183
    dst = DFPREG(dst);
184
    tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
185
    gen_update_fprs_dirty(dst);
186
}
187

    
188
static TCGv_i64 gen_dest_fpr_D(void)
189
{
190
    return cpu_tmp64;
191
}
192

    
193
static void gen_op_load_fpr_QT0(unsigned int src)
194
{
195
    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
196
                   offsetof(CPU_QuadU, ll.upper));
197
    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
198
                   offsetof(CPU_QuadU, ll.lower));
199
}
200

    
201
static void gen_op_load_fpr_QT1(unsigned int src)
202
{
203
    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
204
                   offsetof(CPU_QuadU, ll.upper));
205
    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
206
                   offsetof(CPU_QuadU, ll.lower));
207
}
208

    
209
static void gen_op_store_QT0_fpr(unsigned int dst)
210
{
211
    tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
212
                   offsetof(CPU_QuadU, ll.upper));
213
    tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
214
                   offsetof(CPU_QuadU, ll.lower));
215
}
216

    
217
#ifdef TARGET_SPARC64
218
static void gen_move_Q(unsigned int rd, unsigned int rs)
219
{
220
    rd = QFPREG(rd);
221
    rs = QFPREG(rs);
222

    
223
    tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
224
    tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
225
    gen_update_fprs_dirty(rd);
226
}
227
#endif
228

    
229
/* moves */
230
#ifdef CONFIG_USER_ONLY
231
#define supervisor(dc) 0
232
#ifdef TARGET_SPARC64
233
#define hypervisor(dc) 0
234
#endif
235
#else
236
#define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237
#ifdef TARGET_SPARC64
238
#define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
239
#else
240
#endif
241
#endif
242

    
243
#ifdef TARGET_SPARC64
244
#ifndef TARGET_ABI32
245
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
246
#else
247
#define AM_CHECK(dc) (1)
248
#endif
249
#endif
250

    
251
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
252
{
253
#ifdef TARGET_SPARC64
254
    if (AM_CHECK(dc))
255
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
256
#endif
257
}
258

    
259
static inline void gen_movl_reg_TN(int reg, TCGv tn)
260
{
261
    if (reg == 0)
262
        tcg_gen_movi_tl(tn, 0);
263
    else if (reg < 8)
264
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
265
    else {
266
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
267
    }
268
}
269

    
270
static inline void gen_movl_TN_reg(int reg, TCGv tn)
271
{
272
    if (reg == 0)
273
        return;
274
    else if (reg < 8)
275
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
276
    else {
277
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
278
    }
279
}
280

    
281
static inline void gen_goto_tb(DisasContext *s, int tb_num,
282
                               target_ulong pc, target_ulong npc)
283
{
284
    TranslationBlock *tb;
285

    
286
    tb = s->tb;
287
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
288
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
289
        !s->singlestep)  {
290
        /* jump to same page: we can use a direct jump */
291
        tcg_gen_goto_tb(tb_num);
292
        tcg_gen_movi_tl(cpu_pc, pc);
293
        tcg_gen_movi_tl(cpu_npc, npc);
294
        tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
295
    } else {
296
        /* jump to another page: currently not optimized */
297
        tcg_gen_movi_tl(cpu_pc, pc);
298
        tcg_gen_movi_tl(cpu_npc, npc);
299
        tcg_gen_exit_tb(0);
300
    }
301
}
302

    
303
// XXX suboptimal
304
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
305
{
306
    tcg_gen_extu_i32_tl(reg, src);
307
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
308
    tcg_gen_andi_tl(reg, reg, 0x1);
309
}
310

    
311
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
312
{
313
    tcg_gen_extu_i32_tl(reg, src);
314
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
315
    tcg_gen_andi_tl(reg, reg, 0x1);
316
}
317

    
318
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
319
{
320
    tcg_gen_extu_i32_tl(reg, src);
321
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
322
    tcg_gen_andi_tl(reg, reg, 0x1);
323
}
324

    
325
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
326
{
327
    tcg_gen_extu_i32_tl(reg, src);
328
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
329
    tcg_gen_andi_tl(reg, reg, 0x1);
330
}
331

    
332
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
333
{
334
    TCGv r_temp;
335
    TCGv_i32 r_const;
336
    int l1;
337

    
338
    l1 = gen_new_label();
339

    
340
    r_temp = tcg_temp_new();
341
    tcg_gen_xor_tl(r_temp, src1, src2);
342
    tcg_gen_not_tl(r_temp, r_temp);
343
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
344
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
345
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
346
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
347
    r_const = tcg_const_i32(TT_TOVF);
348
    gen_helper_raise_exception(cpu_env, r_const);
349
    tcg_temp_free_i32(r_const);
350
    gen_set_label(l1);
351
    tcg_temp_free(r_temp);
352
}
353

    
354
static inline void gen_tag_tv(TCGv src1, TCGv src2)
355
{
356
    int l1;
357
    TCGv_i32 r_const;
358

    
359
    l1 = gen_new_label();
360
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
361
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
362
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
363
    r_const = tcg_const_i32(TT_TOVF);
364
    gen_helper_raise_exception(cpu_env, r_const);
365
    tcg_temp_free_i32(r_const);
366
    gen_set_label(l1);
367
}
368

    
369
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
370
{
371
    tcg_gen_mov_tl(cpu_cc_src, src1);
372
    tcg_gen_movi_tl(cpu_cc_src2, src2);
373
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
374
    tcg_gen_mov_tl(dst, cpu_cc_dst);
375
}
376

    
377
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
378
{
379
    tcg_gen_mov_tl(cpu_cc_src, src1);
380
    tcg_gen_mov_tl(cpu_cc_src2, src2);
381
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
382
    tcg_gen_mov_tl(dst, cpu_cc_dst);
383
}
384

    
385
static TCGv_i32 gen_add32_carry32(void)
386
{
387
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
388

    
389
    /* Carry is computed from a previous add: (dst < src)  */
390
#if TARGET_LONG_BITS == 64
391
    cc_src1_32 = tcg_temp_new_i32();
392
    cc_src2_32 = tcg_temp_new_i32();
393
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
394
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
395
#else
396
    cc_src1_32 = cpu_cc_dst;
397
    cc_src2_32 = cpu_cc_src;
398
#endif
399

    
400
    carry_32 = tcg_temp_new_i32();
401
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
402

    
403
#if TARGET_LONG_BITS == 64
404
    tcg_temp_free_i32(cc_src1_32);
405
    tcg_temp_free_i32(cc_src2_32);
406
#endif
407

    
408
    return carry_32;
409
}
410

    
411
static TCGv_i32 gen_sub32_carry32(void)
412
{
413
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
414

    
415
    /* Carry is computed from a previous borrow: (src1 < src2)  */
416
#if TARGET_LONG_BITS == 64
417
    cc_src1_32 = tcg_temp_new_i32();
418
    cc_src2_32 = tcg_temp_new_i32();
419
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
420
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
421
#else
422
    cc_src1_32 = cpu_cc_src;
423
    cc_src2_32 = cpu_cc_src2;
424
#endif
425

    
426
    carry_32 = tcg_temp_new_i32();
427
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
428

    
429
#if TARGET_LONG_BITS == 64
430
    tcg_temp_free_i32(cc_src1_32);
431
    tcg_temp_free_i32(cc_src2_32);
432
#endif
433

    
434
    return carry_32;
435
}
436

    
437
static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
438
                            TCGv src2, int update_cc)
439
{
440
    TCGv_i32 carry_32;
441
    TCGv carry;
442

    
443
    switch (dc->cc_op) {
444
    case CC_OP_DIV:
445
    case CC_OP_LOGIC:
446
        /* Carry is known to be zero.  Fall back to plain ADD.  */
447
        if (update_cc) {
448
            gen_op_add_cc(dst, src1, src2);
449
        } else {
450
            tcg_gen_add_tl(dst, src1, src2);
451
        }
452
        return;
453

    
454
    case CC_OP_ADD:
455
    case CC_OP_TADD:
456
    case CC_OP_TADDTV:
457
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
458
        {
459
            /* For 32-bit hosts, we can re-use the host's hardware carry
460
               generation by using an ADD2 opcode.  We discard the low
461
               part of the output.  Ideally we'd combine this operation
462
               with the add that generated the carry in the first place.  */
463
            TCGv dst_low = tcg_temp_new();
464
            tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
465
                            cpu_cc_src, src1, cpu_cc_src2, src2);
466
            tcg_temp_free(dst_low);
467
            goto add_done;
468
        }
469
#endif
470
        carry_32 = gen_add32_carry32();
471
        break;
472

    
473
    case CC_OP_SUB:
474
    case CC_OP_TSUB:
475
    case CC_OP_TSUBTV:
476
        carry_32 = gen_sub32_carry32();
477
        break;
478

    
479
    default:
480
        /* We need external help to produce the carry.  */
481
        carry_32 = tcg_temp_new_i32();
482
        gen_helper_compute_C_icc(carry_32, cpu_env);
483
        break;
484
    }
485

    
486
#if TARGET_LONG_BITS == 64
487
    carry = tcg_temp_new();
488
    tcg_gen_extu_i32_i64(carry, carry_32);
489
#else
490
    carry = carry_32;
491
#endif
492

    
493
    tcg_gen_add_tl(dst, src1, src2);
494
    tcg_gen_add_tl(dst, dst, carry);
495

    
496
    tcg_temp_free_i32(carry_32);
497
#if TARGET_LONG_BITS == 64
498
    tcg_temp_free(carry);
499
#endif
500

    
501
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
502
 add_done:
503
#endif
504
    if (update_cc) {
505
        tcg_gen_mov_tl(cpu_cc_src, src1);
506
        tcg_gen_mov_tl(cpu_cc_src2, src2);
507
        tcg_gen_mov_tl(cpu_cc_dst, dst);
508
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
509
        dc->cc_op = CC_OP_ADDX;
510
    }
511
}
512

    
513
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
514
{
515
    tcg_gen_mov_tl(cpu_cc_src, src1);
516
    tcg_gen_mov_tl(cpu_cc_src2, src2);
517
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
518
    tcg_gen_mov_tl(dst, cpu_cc_dst);
519
}
520

    
521
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
522
{
523
    tcg_gen_mov_tl(cpu_cc_src, src1);
524
    tcg_gen_mov_tl(cpu_cc_src2, src2);
525
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
526
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
527
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528
    tcg_gen_mov_tl(dst, cpu_cc_dst);
529
}
530

    
531
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
532
{
533
    TCGv r_temp;
534
    TCGv_i32 r_const;
535
    int l1;
536

    
537
    l1 = gen_new_label();
538

    
539
    r_temp = tcg_temp_new();
540
    tcg_gen_xor_tl(r_temp, src1, src2);
541
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
542
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
543
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
544
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
545
    r_const = tcg_const_i32(TT_TOVF);
546
    gen_helper_raise_exception(cpu_env, r_const);
547
    tcg_temp_free_i32(r_const);
548
    gen_set_label(l1);
549
    tcg_temp_free(r_temp);
550
}
551

    
552
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
553
{
554
    tcg_gen_mov_tl(cpu_cc_src, src1);
555
    tcg_gen_movi_tl(cpu_cc_src2, src2);
556
    if (src2 == 0) {
557
        tcg_gen_mov_tl(cpu_cc_dst, src1);
558
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
559
        dc->cc_op = CC_OP_LOGIC;
560
    } else {
561
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
562
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
563
        dc->cc_op = CC_OP_SUB;
564
    }
565
    tcg_gen_mov_tl(dst, cpu_cc_dst);
566
}
567

    
568
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
569
{
570
    tcg_gen_mov_tl(cpu_cc_src, src1);
571
    tcg_gen_mov_tl(cpu_cc_src2, src2);
572
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
573
    tcg_gen_mov_tl(dst, cpu_cc_dst);
574
}
575

    
576
static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
577
                            TCGv src2, int update_cc)
578
{
579
    TCGv_i32 carry_32;
580
    TCGv carry;
581

    
582
    switch (dc->cc_op) {
583
    case CC_OP_DIV:
584
    case CC_OP_LOGIC:
585
        /* Carry is known to be zero.  Fall back to plain SUB.  */
586
        if (update_cc) {
587
            gen_op_sub_cc(dst, src1, src2);
588
        } else {
589
            tcg_gen_sub_tl(dst, src1, src2);
590
        }
591
        return;
592

    
593
    case CC_OP_ADD:
594
    case CC_OP_TADD:
595
    case CC_OP_TADDTV:
596
        carry_32 = gen_add32_carry32();
597
        break;
598

    
599
    case CC_OP_SUB:
600
    case CC_OP_TSUB:
601
    case CC_OP_TSUBTV:
602
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
603
        {
604
            /* For 32-bit hosts, we can re-use the host's hardware carry
605
               generation by using a SUB2 opcode.  We discard the low
606
               part of the output.  Ideally we'd combine this operation
607
               with the add that generated the carry in the first place.  */
608
            TCGv dst_low = tcg_temp_new();
609
            tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
610
                            cpu_cc_src, src1, cpu_cc_src2, src2);
611
            tcg_temp_free(dst_low);
612
            goto sub_done;
613
        }
614
#endif
615
        carry_32 = gen_sub32_carry32();
616
        break;
617

    
618
    default:
619
        /* We need external help to produce the carry.  */
620
        carry_32 = tcg_temp_new_i32();
621
        gen_helper_compute_C_icc(carry_32, cpu_env);
622
        break;
623
    }
624

    
625
#if TARGET_LONG_BITS == 64
626
    carry = tcg_temp_new();
627
    tcg_gen_extu_i32_i64(carry, carry_32);
628
#else
629
    carry = carry_32;
630
#endif
631

    
632
    tcg_gen_sub_tl(dst, src1, src2);
633
    tcg_gen_sub_tl(dst, dst, carry);
634

    
635
    tcg_temp_free_i32(carry_32);
636
#if TARGET_LONG_BITS == 64
637
    tcg_temp_free(carry);
638
#endif
639

    
640
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
641
 sub_done:
642
#endif
643
    if (update_cc) {
644
        tcg_gen_mov_tl(cpu_cc_src, src1);
645
        tcg_gen_mov_tl(cpu_cc_src2, src2);
646
        tcg_gen_mov_tl(cpu_cc_dst, dst);
647
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
648
        dc->cc_op = CC_OP_SUBX;
649
    }
650
}
651

    
652
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
653
{
654
    tcg_gen_mov_tl(cpu_cc_src, src1);
655
    tcg_gen_mov_tl(cpu_cc_src2, src2);
656
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657
    tcg_gen_mov_tl(dst, cpu_cc_dst);
658
}
659

    
660
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
661
{
662
    tcg_gen_mov_tl(cpu_cc_src, src1);
663
    tcg_gen_mov_tl(cpu_cc_src2, src2);
664
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
665
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
666
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667
    tcg_gen_mov_tl(dst, cpu_cc_dst);
668
}
669

    
670
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
671
{
672
    TCGv r_temp;
673
    int l1;
674

    
675
    l1 = gen_new_label();
676
    r_temp = tcg_temp_new();
677

    
678
    /* old op:
679
    if (!(env->y & 1))
680
        T1 = 0;
681
    */
682
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
683
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
684
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
685
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
686
    tcg_gen_movi_tl(cpu_cc_src2, 0);
687
    gen_set_label(l1);
688

    
689
    // b2 = T0 & 1;
690
    // env->y = (b2 << 31) | (env->y >> 1);
691
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
692
    tcg_gen_shli_tl(r_temp, r_temp, 31);
693
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
694
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
695
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
696
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
697

    
698
    // b1 = N ^ V;
699
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
700
    gen_mov_reg_V(r_temp, cpu_psr);
701
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
702
    tcg_temp_free(r_temp);
703

    
704
    // T0 = (b1 << 31) | (T0 >> 1);
705
    // src1 = T0;
706
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
707
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
708
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
709

    
710
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
711

    
712
    tcg_gen_mov_tl(dst, cpu_cc_dst);
713
}
714

    
715
static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
716
{
717
    TCGv_i32 r_src1, r_src2;
718
    TCGv_i64 r_temp, r_temp2;
719

    
720
    r_src1 = tcg_temp_new_i32();
721
    r_src2 = tcg_temp_new_i32();
722

    
723
    tcg_gen_trunc_tl_i32(r_src1, src1);
724
    tcg_gen_trunc_tl_i32(r_src2, src2);
725

    
726
    r_temp = tcg_temp_new_i64();
727
    r_temp2 = tcg_temp_new_i64();
728

    
729
    if (sign_ext) {
730
        tcg_gen_ext_i32_i64(r_temp, r_src2);
731
        tcg_gen_ext_i32_i64(r_temp2, r_src1);
732
    } else {
733
        tcg_gen_extu_i32_i64(r_temp, r_src2);
734
        tcg_gen_extu_i32_i64(r_temp2, r_src1);
735
    }
736

    
737
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
738

    
739
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
740
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
741
    tcg_temp_free_i64(r_temp);
742
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
743

    
744
    tcg_gen_trunc_i64_tl(dst, r_temp2);
745

    
746
    tcg_temp_free_i64(r_temp2);
747

    
748
    tcg_temp_free_i32(r_src1);
749
    tcg_temp_free_i32(r_src2);
750
}
751

    
752
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
753
{
754
    /* zero-extend truncated operands before multiplication */
755
    gen_op_multiply(dst, src1, src2, 0);
756
}
757

    
758
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
759
{
760
    /* sign-extend truncated operands before multiplication */
761
    gen_op_multiply(dst, src1, src2, 1);
762
}
763

    
764
#ifdef TARGET_SPARC64
765
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
766
{
767
    TCGv_i32 r_const;
768
    int l1;
769

    
770
    l1 = gen_new_label();
771
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
772
    r_const = tcg_const_i32(TT_DIV_ZERO);
773
    gen_helper_raise_exception(cpu_env, r_const);
774
    tcg_temp_free_i32(r_const);
775
    gen_set_label(l1);
776
}
777

    
778
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
779
{
780
    int l1, l2;
781
    TCGv r_temp1, r_temp2;
782

    
783
    l1 = gen_new_label();
784
    l2 = gen_new_label();
785
    r_temp1 = tcg_temp_local_new();
786
    r_temp2 = tcg_temp_local_new();
787
    tcg_gen_mov_tl(r_temp1, src1);
788
    tcg_gen_mov_tl(r_temp2, src2);
789
    gen_trap_ifdivzero_tl(r_temp2);
790
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
791
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
792
    tcg_gen_movi_i64(dst, INT64_MIN);
793
    tcg_gen_br(l2);
794
    gen_set_label(l1);
795
    tcg_gen_div_i64(dst, r_temp1, r_temp2);
796
    gen_set_label(l2);
797
    tcg_temp_free(r_temp1);
798
    tcg_temp_free(r_temp2);
799
}
800
#endif
801

    
802
// 1
803
static inline void gen_op_eval_ba(TCGv dst)
804
{
805
    tcg_gen_movi_tl(dst, 1);
806
}
807

    
808
// Z
809
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
810
{
811
    gen_mov_reg_Z(dst, src);
812
}
813

    
814
// Z | (N ^ V)
815
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
816
{
817
    gen_mov_reg_N(cpu_tmp0, src);
818
    gen_mov_reg_V(dst, src);
819
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
820
    gen_mov_reg_Z(cpu_tmp0, src);
821
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
822
}
823

    
824
// N ^ V
825
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
826
{
827
    gen_mov_reg_V(cpu_tmp0, src);
828
    gen_mov_reg_N(dst, src);
829
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
830
}
831

    
832
// C | Z
833
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
834
{
835
    gen_mov_reg_Z(cpu_tmp0, src);
836
    gen_mov_reg_C(dst, src);
837
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
838
}
839

    
840
// C
841
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
842
{
843
    gen_mov_reg_C(dst, src);
844
}
845

    
846
// V
847
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
848
{
849
    gen_mov_reg_V(dst, src);
850
}
851

    
852
// 0
853
static inline void gen_op_eval_bn(TCGv dst)
854
{
855
    tcg_gen_movi_tl(dst, 0);
856
}
857

    
858
// N
859
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
860
{
861
    gen_mov_reg_N(dst, src);
862
}
863

    
864
// !Z
865
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
866
{
867
    gen_mov_reg_Z(dst, src);
868
    tcg_gen_xori_tl(dst, dst, 0x1);
869
}
870

    
871
// !(Z | (N ^ V))
872
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
873
{
874
    gen_mov_reg_N(cpu_tmp0, src);
875
    gen_mov_reg_V(dst, src);
876
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
877
    gen_mov_reg_Z(cpu_tmp0, src);
878
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
879
    tcg_gen_xori_tl(dst, dst, 0x1);
880
}
881

    
882
// !(N ^ V)
883
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
884
{
885
    gen_mov_reg_V(cpu_tmp0, src);
886
    gen_mov_reg_N(dst, src);
887
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
888
    tcg_gen_xori_tl(dst, dst, 0x1);
889
}
890

    
891
// !(C | Z)
892
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
893
{
894
    gen_mov_reg_Z(cpu_tmp0, src);
895
    gen_mov_reg_C(dst, src);
896
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
897
    tcg_gen_xori_tl(dst, dst, 0x1);
898
}
899

    
900
// !C
901
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
902
{
903
    gen_mov_reg_C(dst, src);
904
    tcg_gen_xori_tl(dst, dst, 0x1);
905
}
906

    
907
// !N
908
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
909
{
910
    gen_mov_reg_N(dst, src);
911
    tcg_gen_xori_tl(dst, dst, 0x1);
912
}
913

    
914
// !V
915
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
916
{
917
    gen_mov_reg_V(dst, src);
918
    tcg_gen_xori_tl(dst, dst, 0x1);
919
}
920

    
921
/*
922
  FPSR bit field FCC1 | FCC0:
923
   0 =
924
   1 <
925
   2 >
926
   3 unordered
927
*/
928
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
929
                                    unsigned int fcc_offset)
930
{
931
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
932
    tcg_gen_andi_tl(reg, reg, 0x1);
933
}
934

    
935
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
936
                                    unsigned int fcc_offset)
937
{
938
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
939
    tcg_gen_andi_tl(reg, reg, 0x1);
940
}
941

    
942
// !0: FCC0 | FCC1
943
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
944
                                    unsigned int fcc_offset)
945
{
946
    gen_mov_reg_FCC0(dst, src, fcc_offset);
947
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
948
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
949
}
950

    
951
// 1 or 2: FCC0 ^ FCC1
952
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
953
                                    unsigned int fcc_offset)
954
{
955
    gen_mov_reg_FCC0(dst, src, fcc_offset);
956
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
957
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
958
}
959

    
960
// 1 or 3: FCC0
961
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
962
                                    unsigned int fcc_offset)
963
{
964
    gen_mov_reg_FCC0(dst, src, fcc_offset);
965
}
966

    
967
// 1: FCC0 & !FCC1
968
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
969
                                    unsigned int fcc_offset)
970
{
971
    gen_mov_reg_FCC0(dst, src, fcc_offset);
972
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
973
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
974
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
975
}
976

    
977
// 2 or 3: FCC1
978
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
979
                                    unsigned int fcc_offset)
980
{
981
    gen_mov_reg_FCC1(dst, src, fcc_offset);
982
}
983

    
984
// 2: !FCC0 & FCC1
985
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
986
                                    unsigned int fcc_offset)
987
{
988
    gen_mov_reg_FCC0(dst, src, fcc_offset);
989
    tcg_gen_xori_tl(dst, dst, 0x1);
990
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
991
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
992
}
993

    
994
// 3: FCC0 & FCC1
995
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
996
                                    unsigned int fcc_offset)
997
{
998
    gen_mov_reg_FCC0(dst, src, fcc_offset);
999
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1000
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1001
}
1002

    
1003
// 0: !(FCC0 | FCC1)
1004
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1005
                                    unsigned int fcc_offset)
1006
{
1007
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1008
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
1010
    tcg_gen_xori_tl(dst, dst, 0x1);
1011
}
1012

    
1013
// 0 or 3: !(FCC0 ^ FCC1)
1014
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1015
                                    unsigned int fcc_offset)
1016
{
1017
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1018
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1019
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1020
    tcg_gen_xori_tl(dst, dst, 0x1);
1021
}
1022

    
1023
// 0 or 2: !FCC0
1024
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1025
                                    unsigned int fcc_offset)
1026
{
1027
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1028
    tcg_gen_xori_tl(dst, dst, 0x1);
1029
}
1030

    
1031
// !1: !(FCC0 & !FCC1)
1032
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1033
                                    unsigned int fcc_offset)
1034
{
1035
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1036
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1037
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1038
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039
    tcg_gen_xori_tl(dst, dst, 0x1);
1040
}
1041

    
1042
// 0 or 1: !FCC1
1043
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1044
                                    unsigned int fcc_offset)
1045
{
1046
    gen_mov_reg_FCC1(dst, src, fcc_offset);
1047
    tcg_gen_xori_tl(dst, dst, 0x1);
1048
}
1049

    
1050
// !2: !(!FCC0 & FCC1)
1051
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1052
                                    unsigned int fcc_offset)
1053
{
1054
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1055
    tcg_gen_xori_tl(dst, dst, 0x1);
1056
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1057
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1058
    tcg_gen_xori_tl(dst, dst, 0x1);
1059
}
1060

    
1061
// !3: !(FCC0 & FCC1)
1062
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1063
                                    unsigned int fcc_offset)
1064
{
1065
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1066
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1068
    tcg_gen_xori_tl(dst, dst, 0x1);
1069
}
1070

    
1071
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1072
                               target_ulong pc2, TCGv r_cond)
1073
{
1074
    int l1;
1075

    
1076
    l1 = gen_new_label();
1077

    
1078
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1079

    
1080
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
1081

    
1082
    gen_set_label(l1);
1083
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
1084
}
1085

    
1086
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1087
                                target_ulong pc2, TCGv r_cond)
1088
{
1089
    int l1;
1090

    
1091
    l1 = gen_new_label();
1092

    
1093
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1094

    
1095
    gen_goto_tb(dc, 0, pc2, pc1);
1096

    
1097
    gen_set_label(l1);
1098
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1099
}
1100

    
1101
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1102
                                      TCGv r_cond)
1103
{
1104
    int l1, l2;
1105

    
1106
    l1 = gen_new_label();
1107
    l2 = gen_new_label();
1108

    
1109
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1110

    
1111
    tcg_gen_movi_tl(cpu_npc, npc1);
1112
    tcg_gen_br(l2);
1113

    
1114
    gen_set_label(l1);
1115
    tcg_gen_movi_tl(cpu_npc, npc2);
1116
    gen_set_label(l2);
1117
}
1118

    
1119
/* call this function before using the condition register as it may
1120
   have been set for a jump */
1121
static inline void flush_cond(DisasContext *dc, TCGv cond)
1122
{
1123
    if (dc->npc == JUMP_PC) {
1124
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1125
        dc->npc = DYNAMIC_PC;
1126
    }
1127
}
1128

    
1129
static inline void save_npc(DisasContext *dc, TCGv cond)
1130
{
1131
    if (dc->npc == JUMP_PC) {
1132
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1133
        dc->npc = DYNAMIC_PC;
1134
    } else if (dc->npc != DYNAMIC_PC) {
1135
        tcg_gen_movi_tl(cpu_npc, dc->npc);
1136
    }
1137
}
1138

    
1139
static inline void save_state(DisasContext *dc, TCGv cond)
1140
{
1141
    tcg_gen_movi_tl(cpu_pc, dc->pc);
1142
    /* flush pending conditional evaluations before exposing cpu state */
1143
    if (dc->cc_op != CC_OP_FLAGS) {
1144
        dc->cc_op = CC_OP_FLAGS;
1145
        gen_helper_compute_psr(cpu_env);
1146
    }
1147
    save_npc(dc, cond);
1148
}
1149

    
1150
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1151
{
1152
    if (dc->npc == JUMP_PC) {
1153
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1154
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1155
        dc->pc = DYNAMIC_PC;
1156
    } else if (dc->npc == DYNAMIC_PC) {
1157
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1158
        dc->pc = DYNAMIC_PC;
1159
    } else {
1160
        dc->pc = dc->npc;
1161
    }
1162
}
1163

    
1164
static inline void gen_op_next_insn(void)
1165
{
1166
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1167
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1168
}
1169

    
1170
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1171
                            DisasContext *dc)
1172
{
1173
    TCGv_i32 r_src;
1174

    
1175
#ifdef TARGET_SPARC64
1176
    if (cc)
1177
        r_src = cpu_xcc;
1178
    else
1179
        r_src = cpu_psr;
1180
#else
1181
    r_src = cpu_psr;
1182
#endif
1183
    switch (dc->cc_op) {
1184
    case CC_OP_FLAGS:
1185
        break;
1186
    default:
1187
        gen_helper_compute_psr(cpu_env);
1188
        dc->cc_op = CC_OP_FLAGS;
1189
        break;
1190
    }
1191
    switch (cond) {
1192
    case 0x0:
1193
        gen_op_eval_bn(r_dst);
1194
        break;
1195
    case 0x1:
1196
        gen_op_eval_be(r_dst, r_src);
1197
        break;
1198
    case 0x2:
1199
        gen_op_eval_ble(r_dst, r_src);
1200
        break;
1201
    case 0x3:
1202
        gen_op_eval_bl(r_dst, r_src);
1203
        break;
1204
    case 0x4:
1205
        gen_op_eval_bleu(r_dst, r_src);
1206
        break;
1207
    case 0x5:
1208
        gen_op_eval_bcs(r_dst, r_src);
1209
        break;
1210
    case 0x6:
1211
        gen_op_eval_bneg(r_dst, r_src);
1212
        break;
1213
    case 0x7:
1214
        gen_op_eval_bvs(r_dst, r_src);
1215
        break;
1216
    case 0x8:
1217
        gen_op_eval_ba(r_dst);
1218
        break;
1219
    case 0x9:
1220
        gen_op_eval_bne(r_dst, r_src);
1221
        break;
1222
    case 0xa:
1223
        gen_op_eval_bg(r_dst, r_src);
1224
        break;
1225
    case 0xb:
1226
        gen_op_eval_bge(r_dst, r_src);
1227
        break;
1228
    case 0xc:
1229
        gen_op_eval_bgu(r_dst, r_src);
1230
        break;
1231
    case 0xd:
1232
        gen_op_eval_bcc(r_dst, r_src);
1233
        break;
1234
    case 0xe:
1235
        gen_op_eval_bpos(r_dst, r_src);
1236
        break;
1237
    case 0xf:
1238
        gen_op_eval_bvc(r_dst, r_src);
1239
        break;
1240
    }
1241
}
1242

    
1243
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1244
{
1245
    unsigned int offset;
1246

    
1247
    switch (cc) {
1248
    default:
1249
    case 0x0:
1250
        offset = 0;
1251
        break;
1252
    case 0x1:
1253
        offset = 32 - 10;
1254
        break;
1255
    case 0x2:
1256
        offset = 34 - 10;
1257
        break;
1258
    case 0x3:
1259
        offset = 36 - 10;
1260
        break;
1261
    }
1262

    
1263
    switch (cond) {
1264
    case 0x0:
1265
        gen_op_eval_bn(r_dst);
1266
        break;
1267
    case 0x1:
1268
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1269
        break;
1270
    case 0x2:
1271
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1272
        break;
1273
    case 0x3:
1274
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1275
        break;
1276
    case 0x4:
1277
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1278
        break;
1279
    case 0x5:
1280
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1281
        break;
1282
    case 0x6:
1283
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1284
        break;
1285
    case 0x7:
1286
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1287
        break;
1288
    case 0x8:
1289
        gen_op_eval_ba(r_dst);
1290
        break;
1291
    case 0x9:
1292
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1293
        break;
1294
    case 0xa:
1295
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1296
        break;
1297
    case 0xb:
1298
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1299
        break;
1300
    case 0xc:
1301
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1302
        break;
1303
    case 0xd:
1304
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1305
        break;
1306
    case 0xe:
1307
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1308
        break;
1309
    case 0xf:
1310
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1311
        break;
1312
    }
1313
}
1314

    
1315
#ifdef TARGET_SPARC64
1316
// Inverted logic
1317
static const int gen_tcg_cond_reg[8] = {
1318
    -1,
1319
    TCG_COND_NE,
1320
    TCG_COND_GT,
1321
    TCG_COND_GE,
1322
    -1,
1323
    TCG_COND_EQ,
1324
    TCG_COND_LE,
1325
    TCG_COND_LT,
1326
};
1327

    
1328
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1329
{
1330
    int l1;
1331

    
1332
    l1 = gen_new_label();
1333
    tcg_gen_movi_tl(r_dst, 0);
1334
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1335
    tcg_gen_movi_tl(r_dst, 1);
1336
    gen_set_label(l1);
1337
}
1338
#endif
1339

    
1340
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1341
                      TCGv r_cond)
1342
{
1343
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1344
    target_ulong target = dc->pc + offset;
1345

    
1346
    if (cond == 0x0) {
1347
        /* unconditional not taken */
1348
        if (a) {
1349
            dc->pc = dc->npc + 4;
1350
            dc->npc = dc->pc + 4;
1351
        } else {
1352
            dc->pc = dc->npc;
1353
            dc->npc = dc->pc + 4;
1354
        }
1355
    } else if (cond == 0x8) {
1356
        /* unconditional taken */
1357
        if (a) {
1358
            dc->pc = target;
1359
            dc->npc = dc->pc + 4;
1360
        } else {
1361
            dc->pc = dc->npc;
1362
            dc->npc = target;
1363
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1364
        }
1365
    } else {
1366
        flush_cond(dc, r_cond);
1367
        gen_cond(r_cond, cc, cond, dc);
1368
        if (a) {
1369
            gen_branch_a(dc, target, dc->npc, r_cond);
1370
            dc->is_br = 1;
1371
        } else {
1372
            dc->pc = dc->npc;
1373
            dc->jump_pc[0] = target;
1374
            if (unlikely(dc->npc == DYNAMIC_PC)) {
1375
                dc->jump_pc[1] = DYNAMIC_PC;
1376
                tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1377
            } else {
1378
                dc->jump_pc[1] = dc->npc + 4;
1379
                dc->npc = JUMP_PC;
1380
            }
1381
        }
1382
    }
1383
}
1384

    
1385
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1386
                      TCGv r_cond)
1387
{
1388
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1389
    target_ulong target = dc->pc + offset;
1390

    
1391
    if (cond == 0x0) {
1392
        /* unconditional not taken */
1393
        if (a) {
1394
            dc->pc = dc->npc + 4;
1395
            dc->npc = dc->pc + 4;
1396
        } else {
1397
            dc->pc = dc->npc;
1398
            dc->npc = dc->pc + 4;
1399
        }
1400
    } else if (cond == 0x8) {
1401
        /* unconditional taken */
1402
        if (a) {
1403
            dc->pc = target;
1404
            dc->npc = dc->pc + 4;
1405
        } else {
1406
            dc->pc = dc->npc;
1407
            dc->npc = target;
1408
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1409
        }
1410
    } else {
1411
        flush_cond(dc, r_cond);
1412
        gen_fcond(r_cond, cc, cond);
1413
        if (a) {
1414
            gen_branch_a(dc, target, dc->npc, r_cond);
1415
            dc->is_br = 1;
1416
        } else {
1417
            dc->pc = dc->npc;
1418
            dc->jump_pc[0] = target;
1419
            if (unlikely(dc->npc == DYNAMIC_PC)) {
1420
                dc->jump_pc[1] = DYNAMIC_PC;
1421
                tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1422
            } else {
1423
                dc->jump_pc[1] = dc->npc + 4;
1424
                dc->npc = JUMP_PC;
1425
            }
1426
        }
1427
    }
1428
}
1429

    
1430
#ifdef TARGET_SPARC64
1431
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1432
                          TCGv r_cond, TCGv r_reg)
1433
{
1434
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1435
    target_ulong target = dc->pc + offset;
1436

    
1437
    flush_cond(dc, r_cond);
1438
    gen_cond_reg(r_cond, cond, r_reg);
1439
    if (a) {
1440
        gen_branch_a(dc, target, dc->npc, r_cond);
1441
        dc->is_br = 1;
1442
    } else {
1443
        dc->pc = dc->npc;
1444
        dc->jump_pc[0] = target;
1445
        if (unlikely(dc->npc == DYNAMIC_PC)) {
1446
            dc->jump_pc[1] = DYNAMIC_PC;
1447
            tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1448
        } else {
1449
            dc->jump_pc[1] = dc->npc + 4;
1450
            dc->npc = JUMP_PC;
1451
        }
1452
    }
1453
}
1454

    
1455
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1456
{
1457
    switch (fccno) {
1458
    case 0:
1459
        gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1460
        break;
1461
    case 1:
1462
        gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1463
        break;
1464
    case 2:
1465
        gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1466
        break;
1467
    case 3:
1468
        gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1469
        break;
1470
    }
1471
}
1472

    
1473
static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1474
{
1475
    switch (fccno) {
1476
    case 0:
1477
        gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1478
        break;
1479
    case 1:
1480
        gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1481
        break;
1482
    case 2:
1483
        gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1484
        break;
1485
    case 3:
1486
        gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1487
        break;
1488
    }
1489
}
1490

    
1491
static inline void gen_op_fcmpq(int fccno)
1492
{
1493
    switch (fccno) {
1494
    case 0:
1495
        gen_helper_fcmpq(cpu_env);
1496
        break;
1497
    case 1:
1498
        gen_helper_fcmpq_fcc1(cpu_env);
1499
        break;
1500
    case 2:
1501
        gen_helper_fcmpq_fcc2(cpu_env);
1502
        break;
1503
    case 3:
1504
        gen_helper_fcmpq_fcc3(cpu_env);
1505
        break;
1506
    }
1507
}
1508

    
1509
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1510
{
1511
    switch (fccno) {
1512
    case 0:
1513
        gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1514
        break;
1515
    case 1:
1516
        gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1517
        break;
1518
    case 2:
1519
        gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1520
        break;
1521
    case 3:
1522
        gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1523
        break;
1524
    }
1525
}
1526

    
1527
static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1528
{
1529
    switch (fccno) {
1530
    case 0:
1531
        gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1532
        break;
1533
    case 1:
1534
        gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1535
        break;
1536
    case 2:
1537
        gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1538
        break;
1539
    case 3:
1540
        gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1541
        break;
1542
    }
1543
}
1544

    
1545
static inline void gen_op_fcmpeq(int fccno)
1546
{
1547
    switch (fccno) {
1548
    case 0:
1549
        gen_helper_fcmpeq(cpu_env);
1550
        break;
1551
    case 1:
1552
        gen_helper_fcmpeq_fcc1(cpu_env);
1553
        break;
1554
    case 2:
1555
        gen_helper_fcmpeq_fcc2(cpu_env);
1556
        break;
1557
    case 3:
1558
        gen_helper_fcmpeq_fcc3(cpu_env);
1559
        break;
1560
    }
1561
}
1562

    
1563
#else
1564

    
1565
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1566
{
1567
    gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1568
}
1569

    
1570
static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1571
{
1572
    gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1573
}
1574

    
1575
static inline void gen_op_fcmpq(int fccno)
1576
{
1577
    gen_helper_fcmpq(cpu_env);
1578
}
1579

    
1580
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1581
{
1582
    gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1583
}
1584

    
1585
static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1586
{
1587
    gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1588
}
1589

    
1590
static inline void gen_op_fcmpeq(int fccno)
1591
{
1592
    gen_helper_fcmpeq(cpu_env);
1593
}
1594
#endif
1595

    
1596
static inline void gen_op_fpexception_im(int fsr_flags)
1597
{
1598
    TCGv_i32 r_const;
1599

    
1600
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1601
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1602
    r_const = tcg_const_i32(TT_FP_EXCP);
1603
    gen_helper_raise_exception(cpu_env, r_const);
1604
    tcg_temp_free_i32(r_const);
1605
}
1606

    
1607
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1608
{
1609
#if !defined(CONFIG_USER_ONLY)
1610
    if (!dc->fpu_enabled) {
1611
        TCGv_i32 r_const;
1612

    
1613
        save_state(dc, r_cond);
1614
        r_const = tcg_const_i32(TT_NFPU_INSN);
1615
        gen_helper_raise_exception(cpu_env, r_const);
1616
        tcg_temp_free_i32(r_const);
1617
        dc->is_br = 1;
1618
        return 1;
1619
    }
1620
#endif
1621
    return 0;
1622
}
1623

    
1624
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1625
{
1626
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1627
}
1628

    
1629
static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1630
                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1631
{
1632
    TCGv_i32 dst, src;
1633

    
1634
    src = gen_load_fpr_F(dc, rs);
1635
    dst = gen_dest_fpr_F();
1636

    
1637
    gen(dst, cpu_env, src);
1638

    
1639
    gen_store_fpr_F(dc, rd, dst);
1640
}
1641

    
1642
static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1643
                                 void (*gen)(TCGv_i32, TCGv_i32))
1644
{
1645
    TCGv_i32 dst, src;
1646

    
1647
    src = gen_load_fpr_F(dc, rs);
1648
    dst = gen_dest_fpr_F();
1649

    
1650
    gen(dst, src);
1651

    
1652
    gen_store_fpr_F(dc, rd, dst);
1653
}
1654

    
1655
static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1656
                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1657
{
1658
    TCGv_i32 dst, src1, src2;
1659

    
1660
    src1 = gen_load_fpr_F(dc, rs1);
1661
    src2 = gen_load_fpr_F(dc, rs2);
1662
    dst = gen_dest_fpr_F();
1663

    
1664
    gen(dst, cpu_env, src1, src2);
1665

    
1666
    gen_store_fpr_F(dc, rd, dst);
1667
}
1668

    
1669
#ifdef TARGET_SPARC64
1670
static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1671
                                  void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1672
{
1673
    TCGv_i32 dst, src1, src2;
1674

    
1675
    src1 = gen_load_fpr_F(dc, rs1);
1676
    src2 = gen_load_fpr_F(dc, rs2);
1677
    dst = gen_dest_fpr_F();
1678

    
1679
    gen(dst, src1, src2);
1680

    
1681
    gen_store_fpr_F(dc, rd, dst);
1682
}
1683
#endif
1684

    
1685
static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1686
                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1687
{
1688
    TCGv_i64 dst, src;
1689

    
1690
    src = gen_load_fpr_D(dc, rs);
1691
    dst = gen_dest_fpr_D();
1692

    
1693
    gen(dst, cpu_env, src);
1694

    
1695
    gen_store_fpr_D(dc, rd, dst);
1696
}
1697

    
1698
#ifdef TARGET_SPARC64
1699
static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1700
                                 void (*gen)(TCGv_i64, TCGv_i64))
1701
{
1702
    TCGv_i64 dst, src;
1703

    
1704
    src = gen_load_fpr_D(dc, rs);
1705
    dst = gen_dest_fpr_D();
1706

    
1707
    gen(dst, src);
1708

    
1709
    gen_store_fpr_D(dc, rd, dst);
1710
}
1711
#endif
1712

    
1713
static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1714
                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1715
{
1716
    TCGv_i64 dst, src1, src2;
1717

    
1718
    src1 = gen_load_fpr_D(dc, rs1);
1719
    src2 = gen_load_fpr_D(dc, rs2);
1720
    dst = gen_dest_fpr_D();
1721

    
1722
    gen(dst, cpu_env, src1, src2);
1723

    
1724
    gen_store_fpr_D(dc, rd, dst);
1725
}
1726

    
1727
#ifdef TARGET_SPARC64
1728
static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1729
                                  void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1730
{
1731
    TCGv_i64 dst, src1, src2;
1732

    
1733
    src1 = gen_load_fpr_D(dc, rs1);
1734
    src2 = gen_load_fpr_D(dc, rs2);
1735
    dst = gen_dest_fpr_D();
1736

    
1737
    gen(dst, src1, src2);
1738

    
1739
    gen_store_fpr_D(dc, rd, dst);
1740
}
1741

    
1742
static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1743
                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1744
{
1745
    TCGv_i64 dst, src1, src2;
1746

    
1747
    src1 = gen_load_fpr_D(dc, rs1);
1748
    src2 = gen_load_fpr_D(dc, rs2);
1749
    dst = gen_dest_fpr_D();
1750

    
1751
    gen(dst, cpu_gsr, src1, src2);
1752

    
1753
    gen_store_fpr_D(dc, rd, dst);
1754
}
1755

    
1756
static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1757
                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1758
{
1759
    TCGv_i64 dst, src0, src1, src2;
1760

    
1761
    src1 = gen_load_fpr_D(dc, rs1);
1762
    src2 = gen_load_fpr_D(dc, rs2);
1763
    src0 = gen_load_fpr_D(dc, rd);
1764
    dst = gen_dest_fpr_D();
1765

    
1766
    gen(dst, src0, src1, src2);
1767

    
1768
    gen_store_fpr_D(dc, rd, dst);
1769
}
1770
#endif
1771

    
1772
static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1773
                              void (*gen)(TCGv_ptr))
1774
{
1775
    gen_op_load_fpr_QT1(QFPREG(rs));
1776

    
1777
    gen(cpu_env);
1778

    
1779
    gen_op_store_QT0_fpr(QFPREG(rd));
1780
    gen_update_fprs_dirty(QFPREG(rd));
1781
}
1782

    
1783
#ifdef TARGET_SPARC64
1784
static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1785
                                 void (*gen)(TCGv_ptr))
1786
{
1787
    gen_op_load_fpr_QT1(QFPREG(rs));
1788

    
1789
    gen(cpu_env);
1790

    
1791
    gen_op_store_QT0_fpr(QFPREG(rd));
1792
    gen_update_fprs_dirty(QFPREG(rd));
1793
}
1794
#endif
1795

    
1796
static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1797
                               void (*gen)(TCGv_ptr))
1798
{
1799
    gen_op_load_fpr_QT0(QFPREG(rs1));
1800
    gen_op_load_fpr_QT1(QFPREG(rs2));
1801

    
1802
    gen(cpu_env);
1803

    
1804
    gen_op_store_QT0_fpr(QFPREG(rd));
1805
    gen_update_fprs_dirty(QFPREG(rd));
1806
}
1807

    
1808
static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1809
                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1810
{
1811
    TCGv_i64 dst;
1812
    TCGv_i32 src1, src2;
1813

    
1814
    src1 = gen_load_fpr_F(dc, rs1);
1815
    src2 = gen_load_fpr_F(dc, rs2);
1816
    dst = gen_dest_fpr_D();
1817

    
1818
    gen(dst, cpu_env, src1, src2);
1819

    
1820
    gen_store_fpr_D(dc, rd, dst);
1821
}
1822

    
1823
static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1824
                               void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1825
{
1826
    TCGv_i64 src1, src2;
1827

    
1828
    src1 = gen_load_fpr_D(dc, rs1);
1829
    src2 = gen_load_fpr_D(dc, rs2);
1830

    
1831
    gen(cpu_env, src1, src2);
1832

    
1833
    gen_op_store_QT0_fpr(QFPREG(rd));
1834
    gen_update_fprs_dirty(QFPREG(rd));
1835
}
1836

    
1837
#ifdef TARGET_SPARC64
1838
static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1839
                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1840
{
1841
    TCGv_i64 dst;
1842
    TCGv_i32 src;
1843

    
1844
    src = gen_load_fpr_F(dc, rs);
1845
    dst = gen_dest_fpr_D();
1846

    
1847
    gen(dst, cpu_env, src);
1848

    
1849
    gen_store_fpr_D(dc, rd, dst);
1850
}
1851
#endif
1852

    
1853
static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1854
                                 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1855
{
1856
    TCGv_i64 dst;
1857
    TCGv_i32 src;
1858

    
1859
    src = gen_load_fpr_F(dc, rs);
1860
    dst = gen_dest_fpr_D();
1861

    
1862
    gen(dst, cpu_env, src);
1863

    
1864
    gen_store_fpr_D(dc, rd, dst);
1865
}
1866

    
1867
static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1868
                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1869
{
1870
    TCGv_i32 dst;
1871
    TCGv_i64 src;
1872

    
1873
    src = gen_load_fpr_D(dc, rs);
1874
    dst = gen_dest_fpr_F();
1875

    
1876
    gen(dst, cpu_env, src);
1877

    
1878
    gen_store_fpr_F(dc, rd, dst);
1879
}
1880

    
1881
static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1882
                              void (*gen)(TCGv_i32, TCGv_ptr))
1883
{
1884
    TCGv_i32 dst;
1885

    
1886
    gen_op_load_fpr_QT1(QFPREG(rs));
1887
    dst = gen_dest_fpr_F();
1888

    
1889
    gen(dst, cpu_env);
1890

    
1891
    gen_store_fpr_F(dc, rd, dst);
1892
}
1893

    
1894
static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1895
                              void (*gen)(TCGv_i64, TCGv_ptr))
1896
{
1897
    TCGv_i64 dst;
1898

    
1899
    gen_op_load_fpr_QT1(QFPREG(rs));
1900
    dst = gen_dest_fpr_D();
1901

    
1902
    gen(dst, cpu_env);
1903

    
1904
    gen_store_fpr_D(dc, rd, dst);
1905
}
1906

    
1907
static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1908
                                 void (*gen)(TCGv_ptr, TCGv_i32))
1909
{
1910
    TCGv_i32 src;
1911

    
1912
    src = gen_load_fpr_F(dc, rs);
1913

    
1914
    gen(cpu_env, src);
1915

    
1916
    gen_op_store_QT0_fpr(QFPREG(rd));
1917
    gen_update_fprs_dirty(QFPREG(rd));
1918
}
1919

    
1920
static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1921
                                 void (*gen)(TCGv_ptr, TCGv_i64))
1922
{
1923
    TCGv_i64 src;
1924

    
1925
    src = gen_load_fpr_D(dc, rs);
1926

    
1927
    gen(cpu_env, src);
1928

    
1929
    gen_op_store_QT0_fpr(QFPREG(rd));
1930
    gen_update_fprs_dirty(QFPREG(rd));
1931
}
1932

    
1933
/* asi moves */
1934
#ifdef TARGET_SPARC64
1935
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1936
{
1937
    int asi;
1938
    TCGv_i32 r_asi;
1939

    
1940
    if (IS_IMM) {
1941
        r_asi = tcg_temp_new_i32();
1942
        tcg_gen_mov_i32(r_asi, cpu_asi);
1943
    } else {
1944
        asi = GET_FIELD(insn, 19, 26);
1945
        r_asi = tcg_const_i32(asi);
1946
    }
1947
    return r_asi;
1948
}
1949

    
1950
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1951
                              int sign)
1952
{
1953
    TCGv_i32 r_asi, r_size, r_sign;
1954

    
1955
    r_asi = gen_get_asi(insn, addr);
1956
    r_size = tcg_const_i32(size);
1957
    r_sign = tcg_const_i32(sign);
1958
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1959
    tcg_temp_free_i32(r_sign);
1960
    tcg_temp_free_i32(r_size);
1961
    tcg_temp_free_i32(r_asi);
1962
}
1963

    
1964
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1965
{
1966
    TCGv_i32 r_asi, r_size;
1967

    
1968
    r_asi = gen_get_asi(insn, addr);
1969
    r_size = tcg_const_i32(size);
1970
    gen_helper_st_asi(addr, src, r_asi, r_size);
1971
    tcg_temp_free_i32(r_size);
1972
    tcg_temp_free_i32(r_asi);
1973
}
1974

    
1975
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1976
{
1977
    TCGv_i32 r_asi, r_size, r_rd;
1978

    
1979
    r_asi = gen_get_asi(insn, addr);
1980
    r_size = tcg_const_i32(size);
1981
    r_rd = tcg_const_i32(rd);
1982
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1983
    tcg_temp_free_i32(r_rd);
1984
    tcg_temp_free_i32(r_size);
1985
    tcg_temp_free_i32(r_asi);
1986
}
1987

    
1988
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1989
{
1990
    TCGv_i32 r_asi, r_size, r_rd;
1991

    
1992
    r_asi = gen_get_asi(insn, addr);
1993
    r_size = tcg_const_i32(size);
1994
    r_rd = tcg_const_i32(rd);
1995
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1996
    tcg_temp_free_i32(r_rd);
1997
    tcg_temp_free_i32(r_size);
1998
    tcg_temp_free_i32(r_asi);
1999
}
2000

    
2001
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2002
{
2003
    TCGv_i32 r_asi, r_size, r_sign;
2004

    
2005
    r_asi = gen_get_asi(insn, addr);
2006
    r_size = tcg_const_i32(4);
2007
    r_sign = tcg_const_i32(0);
2008
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2009
    tcg_temp_free_i32(r_sign);
2010
    gen_helper_st_asi(addr, dst, r_asi, r_size);
2011
    tcg_temp_free_i32(r_size);
2012
    tcg_temp_free_i32(r_asi);
2013
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2014
}
2015

    
2016
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2017
{
2018
    TCGv_i32 r_asi, r_rd;
2019

    
2020
    r_asi = gen_get_asi(insn, addr);
2021
    r_rd = tcg_const_i32(rd);
2022
    gen_helper_ldda_asi(addr, r_asi, r_rd);
2023
    tcg_temp_free_i32(r_rd);
2024
    tcg_temp_free_i32(r_asi);
2025
}
2026

    
2027
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2028
{
2029
    TCGv_i32 r_asi, r_size;
2030

    
2031
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
2032
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2033
    r_asi = gen_get_asi(insn, addr);
2034
    r_size = tcg_const_i32(8);
2035
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2036
    tcg_temp_free_i32(r_size);
2037
    tcg_temp_free_i32(r_asi);
2038
}
2039

    
2040
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2041
                               int rd)
2042
{
2043
    TCGv r_val1;
2044
    TCGv_i32 r_asi;
2045

    
2046
    r_val1 = tcg_temp_new();
2047
    gen_movl_reg_TN(rd, r_val1);
2048
    r_asi = gen_get_asi(insn, addr);
2049
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
2050
    tcg_temp_free_i32(r_asi);
2051
    tcg_temp_free(r_val1);
2052
}
2053

    
2054
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2055
                                int rd)
2056
{
2057
    TCGv_i32 r_asi;
2058

    
2059
    gen_movl_reg_TN(rd, cpu_tmp64);
2060
    r_asi = gen_get_asi(insn, addr);
2061
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
2062
    tcg_temp_free_i32(r_asi);
2063
}
2064

    
2065
#elif !defined(CONFIG_USER_ONLY)
2066

    
2067
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2068
                              int sign)
2069
{
2070
    TCGv_i32 r_asi, r_size, r_sign;
2071

    
2072
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2073
    r_size = tcg_const_i32(size);
2074
    r_sign = tcg_const_i32(sign);
2075
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2076
    tcg_temp_free(r_sign);
2077
    tcg_temp_free(r_size);
2078
    tcg_temp_free(r_asi);
2079
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2080
}
2081

    
2082
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2083
{
2084
    TCGv_i32 r_asi, r_size;
2085

    
2086
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
2087
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2088
    r_size = tcg_const_i32(size);
2089
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2090
    tcg_temp_free(r_size);
2091
    tcg_temp_free(r_asi);
2092
}
2093

    
2094
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2095
{
2096
    TCGv_i32 r_asi, r_size, r_sign;
2097
    TCGv_i64 r_val;
2098

    
2099
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2100
    r_size = tcg_const_i32(4);
2101
    r_sign = tcg_const_i32(0);
2102
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2103
    tcg_temp_free(r_sign);
2104
    r_val = tcg_temp_new_i64();
2105
    tcg_gen_extu_tl_i64(r_val, dst);
2106
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
2107
    tcg_temp_free_i64(r_val);
2108
    tcg_temp_free(r_size);
2109
    tcg_temp_free(r_asi);
2110
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2111
}
2112

    
2113
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2114
{
2115
    TCGv_i32 r_asi, r_size, r_sign;
2116

    
2117
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2118
    r_size = tcg_const_i32(8);
2119
    r_sign = tcg_const_i32(0);
2120
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2121
    tcg_temp_free(r_sign);
2122
    tcg_temp_free(r_size);
2123
    tcg_temp_free(r_asi);
2124
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2125
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
2126
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2127
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2128
    gen_movl_TN_reg(rd, hi);
2129
}
2130

    
2131
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2132
{
2133
    TCGv_i32 r_asi, r_size;
2134

    
2135
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
2136
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2137
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2138
    r_size = tcg_const_i32(8);
2139
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2140
    tcg_temp_free(r_size);
2141
    tcg_temp_free(r_asi);
2142
}
2143
#endif
2144

    
2145
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2146
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2147
{
2148
    TCGv_i64 r_val;
2149
    TCGv_i32 r_asi, r_size;
2150

    
2151
    gen_ld_asi(dst, addr, insn, 1, 0);
2152

    
2153
    r_val = tcg_const_i64(0xffULL);
2154
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2155
    r_size = tcg_const_i32(1);
2156
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
2157
    tcg_temp_free_i32(r_size);
2158
    tcg_temp_free_i32(r_asi);
2159
    tcg_temp_free_i64(r_val);
2160
}
2161
#endif
2162

    
2163
static inline TCGv get_src1(unsigned int insn, TCGv def)
2164
{
2165
    TCGv r_rs1 = def;
2166
    unsigned int rs1;
2167

    
2168
    rs1 = GET_FIELD(insn, 13, 17);
2169
    if (rs1 == 0) {
2170
        tcg_gen_movi_tl(def, 0);
2171
    } else if (rs1 < 8) {
2172
        r_rs1 = cpu_gregs[rs1];
2173
    } else {
2174
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2175
    }
2176
    return r_rs1;
2177
}
2178

    
2179
static inline TCGv get_src2(unsigned int insn, TCGv def)
2180
{
2181
    TCGv r_rs2 = def;
2182

    
2183
    if (IS_IMM) { /* immediate */
2184
        target_long simm = GET_FIELDs(insn, 19, 31);
2185
        tcg_gen_movi_tl(def, simm);
2186
    } else { /* register */
2187
        unsigned int rs2 = GET_FIELD(insn, 27, 31);
2188
        if (rs2 == 0) {
2189
            tcg_gen_movi_tl(def, 0);
2190
        } else if (rs2 < 8) {
2191
            r_rs2 = cpu_gregs[rs2];
2192
        } else {
2193
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2194
        }
2195
    }
2196
    return r_rs2;
2197
}
2198

    
2199
#ifdef TARGET_SPARC64
2200
static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2201
{
2202
    TCGv_i32 r_tl = tcg_temp_new_i32();
2203

    
2204
    /* load env->tl into r_tl */
2205
    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2206

    
2207
    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2208
    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2209

    
2210
    /* calculate offset to current trap state from env->ts, reuse r_tl */
2211
    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2212
    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
2213

    
2214
    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2215
    {
2216
        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2217
        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2218
        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2219
        tcg_temp_free_ptr(r_tl_tmp);
2220
    }
2221

    
2222
    tcg_temp_free_i32(r_tl);
2223
}
2224
#endif
2225

    
2226
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
2227
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2228
        goto illegal_insn;
2229
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2230
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2231
        goto nfpu_insn;
2232

    
2233
/* before an instruction, dc->pc must be static */
2234
static void disas_sparc_insn(DisasContext * dc)
2235
{
2236
    unsigned int insn, opc, rs1, rs2, rd;
2237
    TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2238
    TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2239
    TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2240
    target_long simm;
2241

    
2242
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2243
        tcg_gen_debug_insn_start(dc->pc);
2244
    insn = ldl_code(dc->pc);
2245
    opc = GET_FIELD(insn, 0, 1);
2246

    
2247
    rd = GET_FIELD(insn, 2, 6);
2248

    
2249
    cpu_tmp1 = cpu_src1 = tcg_temp_new();
2250
    cpu_tmp2 = cpu_src2 = tcg_temp_new();
2251

    
2252
    switch (opc) {
2253
    case 0:                     /* branches/sethi */
2254
        {
2255
            unsigned int xop = GET_FIELD(insn, 7, 9);
2256
            int32_t target;
2257
            switch (xop) {
2258
#ifdef TARGET_SPARC64
2259
            case 0x1:           /* V9 BPcc */
2260
                {
2261
                    int cc;
2262

    
2263
                    target = GET_FIELD_SP(insn, 0, 18);
2264
                    target = sign_extend(target, 19);
2265
                    target <<= 2;
2266
                    cc = GET_FIELD_SP(insn, 20, 21);
2267
                    if (cc == 0)
2268
                        do_branch(dc, target, insn, 0, cpu_cond);
2269
                    else if (cc == 2)
2270
                        do_branch(dc, target, insn, 1, cpu_cond);
2271
                    else
2272
                        goto illegal_insn;
2273
                    goto jmp_insn;
2274
                }
2275
            case 0x3:           /* V9 BPr */
2276
                {
2277
                    target = GET_FIELD_SP(insn, 0, 13) |
2278
                        (GET_FIELD_SP(insn, 20, 21) << 14);
2279
                    target = sign_extend(target, 16);
2280
                    target <<= 2;
2281
                    cpu_src1 = get_src1(insn, cpu_src1);
2282
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2283
                    goto jmp_insn;
2284
                }
2285
            case 0x5:           /* V9 FBPcc */
2286
                {
2287
                    int cc = GET_FIELD_SP(insn, 20, 21);
2288
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2289
                        goto jmp_insn;
2290
                    target = GET_FIELD_SP(insn, 0, 18);
2291
                    target = sign_extend(target, 19);
2292
                    target <<= 2;
2293
                    do_fbranch(dc, target, insn, cc, cpu_cond);
2294
                    goto jmp_insn;
2295
                }
2296
#else
2297
            case 0x7:           /* CBN+x */
2298
                {
2299
                    goto ncp_insn;
2300
                }
2301
#endif
2302
            case 0x2:           /* BN+x */
2303
                {
2304
                    target = GET_FIELD(insn, 10, 31);
2305
                    target = sign_extend(target, 22);
2306
                    target <<= 2;
2307
                    do_branch(dc, target, insn, 0, cpu_cond);
2308
                    goto jmp_insn;
2309
                }
2310
            case 0x6:           /* FBN+x */
2311
                {
2312
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2313
                        goto jmp_insn;
2314
                    target = GET_FIELD(insn, 10, 31);
2315
                    target = sign_extend(target, 22);
2316
                    target <<= 2;
2317
                    do_fbranch(dc, target, insn, 0, cpu_cond);
2318
                    goto jmp_insn;
2319
                }
2320
            case 0x4:           /* SETHI */
2321
                if (rd) { // nop
2322
                    uint32_t value = GET_FIELD(insn, 10, 31);
2323
                    TCGv r_const;
2324

    
2325
                    r_const = tcg_const_tl(value << 10);
2326
                    gen_movl_TN_reg(rd, r_const);
2327
                    tcg_temp_free(r_const);
2328
                }
2329
                break;
2330
            case 0x0:           /* UNIMPL */
2331
            default:
2332
                goto illegal_insn;
2333
            }
2334
            break;
2335
        }
2336
        break;
2337
    case 1:                     /*CALL*/
2338
        {
2339
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
2340
            TCGv r_const;
2341

    
2342
            r_const = tcg_const_tl(dc->pc);
2343
            gen_movl_TN_reg(15, r_const);
2344
            tcg_temp_free(r_const);
2345
            target += dc->pc;
2346
            gen_mov_pc_npc(dc, cpu_cond);
2347
            dc->npc = target;
2348
        }
2349
        goto jmp_insn;
2350
    case 2:                     /* FPU & Logical Operations */
2351
        {
2352
            unsigned int xop = GET_FIELD(insn, 7, 12);
2353
            if (xop == 0x3a) {  /* generate trap */
2354
                int cond;
2355

    
2356
                cpu_src1 = get_src1(insn, cpu_src1);
2357
                if (IS_IMM) {
2358
                    rs2 = GET_FIELD(insn, 25, 31);
2359
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2360
                } else {
2361
                    rs2 = GET_FIELD(insn, 27, 31);
2362
                    if (rs2 != 0) {
2363
                        gen_movl_reg_TN(rs2, cpu_src2);
2364
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2365
                    } else
2366
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
2367
                }
2368

    
2369
                cond = GET_FIELD(insn, 3, 6);
2370
                if (cond == 0x8) { /* Trap Always */
2371
                    save_state(dc, cpu_cond);
2372
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2373
                        supervisor(dc))
2374
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2375
                    else
2376
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2377
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2378
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2379

    
2380
                    if (rs2 == 0 &&
2381
                        dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2382

    
2383
                        gen_helper_shutdown();
2384

    
2385
                    } else {
2386
                        gen_helper_raise_exception(cpu_env, cpu_tmp32);
2387
                    }
2388
                } else if (cond != 0) {
2389
                    TCGv r_cond = tcg_temp_new();
2390
                    int l1;
2391
#ifdef TARGET_SPARC64
2392
                    /* V9 icc/xcc */
2393
                    int cc = GET_FIELD_SP(insn, 11, 12);
2394

    
2395
                    save_state(dc, cpu_cond);
2396
                    if (cc == 0)
2397
                        gen_cond(r_cond, 0, cond, dc);
2398
                    else if (cc == 2)
2399
                        gen_cond(r_cond, 1, cond, dc);
2400
                    else
2401
                        goto illegal_insn;
2402
#else
2403
                    save_state(dc, cpu_cond);
2404
                    gen_cond(r_cond, 0, cond, dc);
2405
#endif
2406
                    l1 = gen_new_label();
2407
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2408

    
2409
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2410
                        supervisor(dc))
2411
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2412
                    else
2413
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2414
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2415
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2416
                    gen_helper_raise_exception(cpu_env, cpu_tmp32);
2417

    
2418
                    gen_set_label(l1);
2419
                    tcg_temp_free(r_cond);
2420
                }
2421
                gen_op_next_insn();
2422
                tcg_gen_exit_tb(0);
2423
                dc->is_br = 1;
2424
                goto jmp_insn;
2425
            } else if (xop == 0x28) {
2426
                rs1 = GET_FIELD(insn, 13, 17);
2427
                switch(rs1) {
2428
                case 0: /* rdy */
2429
#ifndef TARGET_SPARC64
2430
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
2431
                                       manual, rdy on the microSPARC
2432
                                       II */
2433
                case 0x0f:          /* stbar in the SPARCv8 manual,
2434
                                       rdy on the microSPARC II */
2435
                case 0x10 ... 0x1f: /* implementation-dependent in the
2436
                                       SPARCv8 manual, rdy on the
2437
                                       microSPARC II */
2438
                    /* Read Asr17 */
2439
                    if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2440
                        TCGv r_const;
2441

    
2442
                        /* Read Asr17 for a Leon3 monoprocessor */
2443
                        r_const = tcg_const_tl((1 << 8)
2444
                                               | (dc->def->nwindows - 1));
2445
                        gen_movl_TN_reg(rd, r_const);
2446
                        tcg_temp_free(r_const);
2447
                        break;
2448
                    }
2449
#endif
2450
                    gen_movl_TN_reg(rd, cpu_y);
2451
                    break;
2452
#ifdef TARGET_SPARC64
2453
                case 0x2: /* V9 rdccr */
2454
                    gen_helper_compute_psr(cpu_env);
2455
                    gen_helper_rdccr(cpu_dst, cpu_env);
2456
                    gen_movl_TN_reg(rd, cpu_dst);
2457
                    break;
2458
                case 0x3: /* V9 rdasi */
2459
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2460
                    gen_movl_TN_reg(rd, cpu_dst);
2461
                    break;
2462
                case 0x4: /* V9 rdtick */
2463
                    {
2464
                        TCGv_ptr r_tickptr;
2465

    
2466
                        r_tickptr = tcg_temp_new_ptr();
2467
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2468
                                       offsetof(CPUState, tick));
2469
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2470
                        tcg_temp_free_ptr(r_tickptr);
2471
                        gen_movl_TN_reg(rd, cpu_dst);
2472
                    }
2473
                    break;
2474
                case 0x5: /* V9 rdpc */
2475
                    {
2476
                        TCGv r_const;
2477

    
2478
                        r_const = tcg_const_tl(dc->pc);
2479
                        gen_movl_TN_reg(rd, r_const);
2480
                        tcg_temp_free(r_const);
2481
                    }
2482
                    break;
2483
                case 0x6: /* V9 rdfprs */
2484
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2485
                    gen_movl_TN_reg(rd, cpu_dst);
2486
                    break;
2487
                case 0xf: /* V9 membar */
2488
                    break; /* no effect */
2489
                case 0x13: /* Graphics Status */
2490
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2491
                        goto jmp_insn;
2492
                    gen_movl_TN_reg(rd, cpu_gsr);
2493
                    break;
2494
                case 0x16: /* Softint */
2495
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2496
                    gen_movl_TN_reg(rd, cpu_dst);
2497
                    break;
2498
                case 0x17: /* Tick compare */
2499
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
2500
                    break;
2501
                case 0x18: /* System tick */
2502
                    {
2503
                        TCGv_ptr r_tickptr;
2504

    
2505
                        r_tickptr = tcg_temp_new_ptr();
2506
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2507
                                       offsetof(CPUState, stick));
2508
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2509
                        tcg_temp_free_ptr(r_tickptr);
2510
                        gen_movl_TN_reg(rd, cpu_dst);
2511
                    }
2512
                    break;
2513
                case 0x19: /* System tick compare */
2514
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
2515
                    break;
2516
                case 0x10: /* Performance Control */
2517
                case 0x11: /* Performance Instrumentation Counter */
2518
                case 0x12: /* Dispatch Control */
2519
                case 0x14: /* Softint set, WO */
2520
                case 0x15: /* Softint clear, WO */
2521
#endif
2522
                default:
2523
                    goto illegal_insn;
2524
                }
2525
#if !defined(CONFIG_USER_ONLY)
2526
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2527
#ifndef TARGET_SPARC64
2528
                if (!supervisor(dc))
2529
                    goto priv_insn;
2530
                gen_helper_compute_psr(cpu_env);
2531
                dc->cc_op = CC_OP_FLAGS;
2532
                gen_helper_rdpsr(cpu_dst, cpu_env);
2533
#else
2534
                CHECK_IU_FEATURE(dc, HYPV);
2535
                if (!hypervisor(dc))
2536
                    goto priv_insn;
2537
                rs1 = GET_FIELD(insn, 13, 17);
2538
                switch (rs1) {
2539
                case 0: // hpstate
2540
                    // gen_op_rdhpstate();
2541
                    break;
2542
                case 1: // htstate
2543
                    // gen_op_rdhtstate();
2544
                    break;
2545
                case 3: // hintp
2546
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2547
                    break;
2548
                case 5: // htba
2549
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
2550
                    break;
2551
                case 6: // hver
2552
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
2553
                    break;
2554
                case 31: // hstick_cmpr
2555
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2556
                    break;
2557
                default:
2558
                    goto illegal_insn;
2559
                }
2560
#endif
2561
                gen_movl_TN_reg(rd, cpu_dst);
2562
                break;
2563
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2564
                if (!supervisor(dc))
2565
                    goto priv_insn;
2566
#ifdef TARGET_SPARC64
2567
                rs1 = GET_FIELD(insn, 13, 17);
2568
                switch (rs1) {
2569
                case 0: // tpc
2570
                    {
2571
                        TCGv_ptr r_tsptr;
2572

    
2573
                        r_tsptr = tcg_temp_new_ptr();
2574
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2575
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2576
                                      offsetof(trap_state, tpc));
2577
                        tcg_temp_free_ptr(r_tsptr);
2578
                    }
2579
                    break;
2580
                case 1: // tnpc
2581
                    {
2582
                        TCGv_ptr r_tsptr;
2583

    
2584
                        r_tsptr = tcg_temp_new_ptr();
2585
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2586
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2587
                                      offsetof(trap_state, tnpc));
2588
                        tcg_temp_free_ptr(r_tsptr);
2589
                    }
2590
                    break;
2591
                case 2: // tstate
2592
                    {
2593
                        TCGv_ptr r_tsptr;
2594

    
2595
                        r_tsptr = tcg_temp_new_ptr();
2596
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2597
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2598
                                      offsetof(trap_state, tstate));
2599
                        tcg_temp_free_ptr(r_tsptr);
2600
                    }
2601
                    break;
2602
                case 3: // tt
2603
                    {
2604
                        TCGv_ptr r_tsptr;
2605

    
2606
                        r_tsptr = tcg_temp_new_ptr();
2607
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2608
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2609
                                       offsetof(trap_state, tt));
2610
                        tcg_temp_free_ptr(r_tsptr);
2611
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2612
                    }
2613
                    break;
2614
                case 4: // tick
2615
                    {
2616
                        TCGv_ptr r_tickptr;
2617

    
2618
                        r_tickptr = tcg_temp_new_ptr();
2619
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2620
                                       offsetof(CPUState, tick));
2621
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2622
                        gen_movl_TN_reg(rd, cpu_tmp0);
2623
                        tcg_temp_free_ptr(r_tickptr);
2624
                    }
2625
                    break;
2626
                case 5: // tba
2627
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2628
                    break;
2629
                case 6: // pstate
2630
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2631
                                   offsetof(CPUSPARCState, pstate));
2632
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2633
                    break;
2634
                case 7: // tl
2635
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2636
                                   offsetof(CPUSPARCState, tl));
2637
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2638
                    break;
2639
                case 8: // pil
2640
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2641
                                   offsetof(CPUSPARCState, psrpil));
2642
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2643
                    break;
2644
                case 9: // cwp
2645
                    gen_helper_rdcwp(cpu_tmp0, cpu_env);
2646
                    break;
2647
                case 10: // cansave
2648
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2649
                                   offsetof(CPUSPARCState, cansave));
2650
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2651
                    break;
2652
                case 11: // canrestore
2653
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2654
                                   offsetof(CPUSPARCState, canrestore));
2655
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2656
                    break;
2657
                case 12: // cleanwin
2658
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2659
                                   offsetof(CPUSPARCState, cleanwin));
2660
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2661
                    break;
2662
                case 13: // otherwin
2663
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2664
                                   offsetof(CPUSPARCState, otherwin));
2665
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2666
                    break;
2667
                case 14: // wstate
2668
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2669
                                   offsetof(CPUSPARCState, wstate));
2670
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2671
                    break;
2672
                case 16: // UA2005 gl
2673
                    CHECK_IU_FEATURE(dc, GL);
2674
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2675
                                   offsetof(CPUSPARCState, gl));
2676
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2677
                    break;
2678
                case 26: // UA2005 strand status
2679
                    CHECK_IU_FEATURE(dc, HYPV);
2680
                    if (!hypervisor(dc))
2681
                        goto priv_insn;
2682
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2683
                    break;
2684
                case 31: // ver
2685
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2686
                    break;
2687
                case 15: // fq
2688
                default:
2689
                    goto illegal_insn;
2690
                }
2691
#else
2692
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2693
#endif
2694
                gen_movl_TN_reg(rd, cpu_tmp0);
2695
                break;
2696
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2697
#ifdef TARGET_SPARC64
2698
                save_state(dc, cpu_cond);
2699
                gen_helper_flushw(cpu_env);
2700
#else
2701
                if (!supervisor(dc))
2702
                    goto priv_insn;
2703
                gen_movl_TN_reg(rd, cpu_tbr);
2704
#endif
2705
                break;
2706
#endif
2707
            } else if (xop == 0x34) {   /* FPU Operations */
2708
                if (gen_trap_ifnofpu(dc, cpu_cond))
2709
                    goto jmp_insn;
2710
                gen_op_clear_ieee_excp_and_FTT();
2711
                rs1 = GET_FIELD(insn, 13, 17);
2712
                rs2 = GET_FIELD(insn, 27, 31);
2713
                xop = GET_FIELD(insn, 18, 26);
2714
                save_state(dc, cpu_cond);
2715
                switch (xop) {
2716
                case 0x1: /* fmovs */
2717
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2718
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
2719
                    break;
2720
                case 0x5: /* fnegs */
2721
                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2722
                    break;
2723
                case 0x9: /* fabss */
2724
                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2725
                    break;
2726
                case 0x29: /* fsqrts */
2727
                    CHECK_FPU_FEATURE(dc, FSQRT);
2728
                    gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2729
                    break;
2730
                case 0x2a: /* fsqrtd */
2731
                    CHECK_FPU_FEATURE(dc, FSQRT);
2732
                    gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2733
                    break;
2734
                case 0x2b: /* fsqrtq */
2735
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2736
                    gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2737
                    break;
2738
                case 0x41: /* fadds */
2739
                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2740
                    break;
2741
                case 0x42: /* faddd */
2742
                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2743
                    break;
2744
                case 0x43: /* faddq */
2745
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2746
                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2747
                    break;
2748
                case 0x45: /* fsubs */
2749
                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2750
                    break;
2751
                case 0x46: /* fsubd */
2752
                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2753
                    break;
2754
                case 0x47: /* fsubq */
2755
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2756
                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2757
                    break;
2758
                case 0x49: /* fmuls */
2759
                    CHECK_FPU_FEATURE(dc, FMUL);
2760
                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2761
                    break;
2762
                case 0x4a: /* fmuld */
2763
                    CHECK_FPU_FEATURE(dc, FMUL);
2764
                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2765
                    break;
2766
                case 0x4b: /* fmulq */
2767
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2768
                    CHECK_FPU_FEATURE(dc, FMUL);
2769
                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2770
                    break;
2771
                case 0x4d: /* fdivs */
2772
                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2773
                    break;
2774
                case 0x4e: /* fdivd */
2775
                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2776
                    break;
2777
                case 0x4f: /* fdivq */
2778
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2779
                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2780
                    break;
2781
                case 0x69: /* fsmuld */
2782
                    CHECK_FPU_FEATURE(dc, FSMULD);
2783
                    gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2784
                    break;
2785
                case 0x6e: /* fdmulq */
2786
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2787
                    gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2788
                    break;
2789
                case 0xc4: /* fitos */
2790
                    gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2791
                    break;
2792
                case 0xc6: /* fdtos */
2793
                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2794
                    break;
2795
                case 0xc7: /* fqtos */
2796
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2797
                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2798
                    break;
2799
                case 0xc8: /* fitod */
2800
                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2801
                    break;
2802
                case 0xc9: /* fstod */
2803
                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2804
                    break;
2805
                case 0xcb: /* fqtod */
2806
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2807
                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2808
                    break;
2809
                case 0xcc: /* fitoq */
2810
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2811
                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2812
                    break;
2813
                case 0xcd: /* fstoq */
2814
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2815
                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2816
                    break;
2817
                case 0xce: /* fdtoq */
2818
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2819
                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2820
                    break;
2821
                case 0xd1: /* fstoi */
2822
                    gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2823
                    break;
2824
                case 0xd2: /* fdtoi */
2825
                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2826
                    break;
2827
                case 0xd3: /* fqtoi */
2828
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2829
                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2830
                    break;
2831
#ifdef TARGET_SPARC64
2832
                case 0x2: /* V9 fmovd */
2833
                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2834
                    gen_store_fpr_D(dc, rd, cpu_src1_64);
2835
                    break;
2836
                case 0x3: /* V9 fmovq */
2837
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2838
                    gen_move_Q(rd, rs2);
2839
                    break;
2840
                case 0x6: /* V9 fnegd */
2841
                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
2842
                    break;
2843
                case 0x7: /* V9 fnegq */
2844
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2845
                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
2846
                    break;
2847
                case 0xa: /* V9 fabsd */
2848
                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
2849
                    break;
2850
                case 0xb: /* V9 fabsq */
2851
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2852
                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
2853
                    break;
2854
                case 0x81: /* V9 fstox */
2855
                    gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
2856
                    break;
2857
                case 0x82: /* V9 fdtox */
2858
                    gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
2859
                    break;
2860
                case 0x83: /* V9 fqtox */
2861
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2862
                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
2863
                    break;
2864
                case 0x84: /* V9 fxtos */
2865
                    gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
2866
                    break;
2867
                case 0x88: /* V9 fxtod */
2868
                    gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
2869
                    break;
2870
                case 0x8c: /* V9 fxtoq */
2871
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2872
                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
2873
                    break;
2874
#endif
2875
                default:
2876
                    goto illegal_insn;
2877
                }
2878
            } else if (xop == 0x35) {   /* FPU Operations */
2879
#ifdef TARGET_SPARC64
2880
                int cond;
2881
#endif
2882
                if (gen_trap_ifnofpu(dc, cpu_cond))
2883
                    goto jmp_insn;
2884
                gen_op_clear_ieee_excp_and_FTT();
2885
                rs1 = GET_FIELD(insn, 13, 17);
2886
                rs2 = GET_FIELD(insn, 27, 31);
2887
                xop = GET_FIELD(insn, 18, 26);
2888
                save_state(dc, cpu_cond);
2889
#ifdef TARGET_SPARC64
2890
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2891
                    int l1;
2892

    
2893
                    l1 = gen_new_label();
2894
                    cond = GET_FIELD_SP(insn, 14, 17);
2895
                    cpu_src1 = get_src1(insn, cpu_src1);
2896
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2897
                                       0, l1);
2898
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2899
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
2900
                    gen_set_label(l1);
2901
                    break;
2902
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2903
                    int l1;
2904

    
2905
                    l1 = gen_new_label();
2906
                    cond = GET_FIELD_SP(insn, 14, 17);
2907
                    cpu_src1 = get_src1(insn, cpu_src1);
2908
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2909
                                       0, l1);
2910
                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2911
                    gen_store_fpr_D(dc, rd, cpu_src1_64);
2912
                    gen_set_label(l1);
2913
                    break;
2914
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2915
                    int l1;
2916

    
2917
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2918
                    l1 = gen_new_label();
2919
                    cond = GET_FIELD_SP(insn, 14, 17);
2920
                    cpu_src1 = get_src1(insn, cpu_src1);
2921
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2922
                                       0, l1);
2923
                    gen_move_Q(rd, rs2);
2924
                    gen_set_label(l1);
2925
                    break;
2926
                }
2927
#endif
2928
                switch (xop) {
2929
#ifdef TARGET_SPARC64
2930
#define FMOVSCC(fcc)                                                    \
2931
                    {                                                   \
2932
                        TCGv r_cond;                                    \
2933
                        int l1;                                         \
2934
                                                                        \
2935
                        l1 = gen_new_label();                           \
2936
                        r_cond = tcg_temp_new();                        \
2937
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2938
                        gen_fcond(r_cond, fcc, cond);                   \
2939
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2940
                                           0, l1);                      \
2941
                        cpu_src1_32 = gen_load_fpr_F(dc, rs2);          \
2942
                        gen_store_fpr_F(dc, rd, cpu_src1_32);           \
2943
                        gen_set_label(l1);                              \
2944
                        tcg_temp_free(r_cond);                          \
2945
                    }
2946
#define FMOVDCC(fcc)                                                    \
2947
                    {                                                   \
2948
                        TCGv r_cond;                                    \
2949
                        int l1;                                         \
2950
                                                                        \
2951
                        l1 = gen_new_label();                           \
2952
                        r_cond = tcg_temp_new();                        \
2953
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2954
                        gen_fcond(r_cond, fcc, cond);                   \
2955
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2956
                                           0, l1);                      \
2957
                        cpu_src1_64 = gen_load_fpr_D(dc, rs2);          \
2958
                        gen_store_fpr_D(dc, rd, cpu_src1_64);           \
2959
                        gen_set_label(l1);                              \
2960
                        tcg_temp_free(r_cond);                          \
2961
                    }
2962
#define FMOVQCC(fcc)                                                    \
2963
                    {                                                   \
2964
                        TCGv r_cond;                                    \
2965
                        int l1;                                         \
2966
                                                                        \
2967
                        l1 = gen_new_label();                           \
2968
                        r_cond = tcg_temp_new();                        \
2969
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2970
                        gen_fcond(r_cond, fcc, cond);                   \
2971
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2972
                                           0, l1);                      \
2973
                        gen_move_Q(rd, rs2);                            \
2974
                        gen_set_label(l1);                              \
2975
                        tcg_temp_free(r_cond);                          \
2976
                    }
2977
                    case 0x001: /* V9 fmovscc %fcc0 */
2978
                        FMOVSCC(0);
2979
                        break;
2980
                    case 0x002: /* V9 fmovdcc %fcc0 */
2981
                        FMOVDCC(0);
2982
                        break;
2983
                    case 0x003: /* V9 fmovqcc %fcc0 */
2984
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2985
                        FMOVQCC(0);
2986
                        break;
2987
                    case 0x041: /* V9 fmovscc %fcc1 */
2988
                        FMOVSCC(1);
2989
                        break;
2990
                    case 0x042: /* V9 fmovdcc %fcc1 */
2991
                        FMOVDCC(1);
2992
                        break;
2993
                    case 0x043: /* V9 fmovqcc %fcc1 */
2994
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2995
                        FMOVQCC(1);
2996
                        break;
2997
                    case 0x081: /* V9 fmovscc %fcc2 */
2998
                        FMOVSCC(2);
2999
                        break;
3000
                    case 0x082: /* V9 fmovdcc %fcc2 */
3001
                        FMOVDCC(2);
3002
                        break;
3003
                    case 0x083: /* V9 fmovqcc %fcc2 */
3004
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3005
                        FMOVQCC(2);
3006
                        break;
3007
                    case 0x0c1: /* V9 fmovscc %fcc3 */
3008
                        FMOVSCC(3);
3009
                        break;
3010
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
3011
                        FMOVDCC(3);
3012
                        break;
3013
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
3014
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3015
                        FMOVQCC(3);
3016
                        break;
3017
#undef FMOVSCC
3018
#undef FMOVDCC
3019
#undef FMOVQCC
3020
#define FMOVSCC(icc)                                                    \
3021
                    {                                                   \
3022
                        TCGv r_cond;                                    \
3023
                        int l1;                                         \
3024
                                                                        \
3025
                        l1 = gen_new_label();                           \
3026
                        r_cond = tcg_temp_new();                        \
3027
                        cond = GET_FIELD_SP(insn, 14, 17);              \
3028
                        gen_cond(r_cond, icc, cond, dc);                \
3029
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3030
                                           0, l1);                      \
3031
                        cpu_src1_32 = gen_load_fpr_F(dc, rs2);          \
3032
                        gen_store_fpr_F(dc, rd, cpu_src1_32);           \
3033
                        gen_set_label(l1);                              \
3034
                        tcg_temp_free(r_cond);                          \
3035
                    }
3036
#define FMOVDCC(icc)                                                    \
3037
                    {                                                   \
3038
                        TCGv r_cond;                                    \
3039
                        int l1;                                         \
3040
                                                                        \
3041
                        l1 = gen_new_label();                           \
3042
                        r_cond = tcg_temp_new();                        \
3043
                        cond = GET_FIELD_SP(insn, 14, 17);              \
3044
                        gen_cond(r_cond, icc, cond, dc);                \
3045
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3046
                                           0, l1);                      \
3047
                        cpu_src1_64 = gen_load_fpr_D(dc, rs2);          \
3048
                        gen_store_fpr_D(dc, rd, cpu_src1_64);           \
3049
                        gen_update_fprs_dirty(DFPREG(rd));              \
3050
                        gen_set_label(l1);                              \
3051
                        tcg_temp_free(r_cond);                          \
3052
                    }
3053
#define FMOVQCC(icc)                                                    \
3054
                    {                                                   \
3055
                        TCGv r_cond;                                    \
3056
                        int l1;                                         \
3057
                                                                        \
3058
                        l1 = gen_new_label();                           \
3059
                        r_cond = tcg_temp_new();                        \
3060
                        cond = GET_FIELD_SP(insn, 14, 17);              \
3061
                        gen_cond(r_cond, icc, cond, dc);                \
3062
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3063
                                           0, l1);                      \
3064
                        gen_move_Q(rd, rs2);                            \
3065
                        gen_set_label(l1);                              \
3066
                        tcg_temp_free(r_cond);                          \
3067
                    }
3068

    
3069
                    case 0x101: /* V9 fmovscc %icc */
3070
                        FMOVSCC(0);
3071
                        break;
3072
                    case 0x102: /* V9 fmovdcc %icc */
3073
                        FMOVDCC(0);
3074
                        break;
3075
                    case 0x103: /* V9 fmovqcc %icc */
3076
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3077
                        FMOVQCC(0);
3078
                        break;
3079
                    case 0x181: /* V9 fmovscc %xcc */
3080
                        FMOVSCC(1);
3081
                        break;
3082
                    case 0x182: /* V9 fmovdcc %xcc */
3083
                        FMOVDCC(1);
3084
                        break;
3085
                    case 0x183: /* V9 fmovqcc %xcc */
3086
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3087
                        FMOVQCC(1);
3088
                        break;
3089
#undef FMOVSCC
3090
#undef FMOVDCC
3091
#undef FMOVQCC
3092
#endif
3093
                    case 0x51: /* fcmps, V9 %fcc */
3094
                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3095
                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3096
                        gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3097
                        break;
3098
                    case 0x52: /* fcmpd, V9 %fcc */
3099
                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3100
                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3101
                        gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3102
                        break;
3103
                    case 0x53: /* fcmpq, V9 %fcc */
3104
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3105
                        gen_op_load_fpr_QT0(QFPREG(rs1));
3106
                        gen_op_load_fpr_QT1(QFPREG(rs2));
3107
                        gen_op_fcmpq(rd & 3);
3108
                        break;
3109
                    case 0x55: /* fcmpes, V9 %fcc */
3110
                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3111
                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3112
                        gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3113
                        break;
3114
                    case 0x56: /* fcmped, V9 %fcc */
3115
                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3116
                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3117
                        gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3118
                        break;
3119
                    case 0x57: /* fcmpeq, V9 %fcc */
3120
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3121
                        gen_op_load_fpr_QT0(QFPREG(rs1));
3122
                        gen_op_load_fpr_QT1(QFPREG(rs2));
3123
                        gen_op_fcmpeq(rd & 3);
3124
                        break;
3125
                    default:
3126
                        goto illegal_insn;
3127
                }
3128
            } else if (xop == 0x2) {
3129
                // clr/mov shortcut
3130

    
3131
                rs1 = GET_FIELD(insn, 13, 17);
3132
                if (rs1 == 0) {
3133
                    // or %g0, x, y -> mov T0, x; mov y, T0
3134
                    if (IS_IMM) {       /* immediate */
3135
                        TCGv r_const;
3136

    
3137
                        simm = GET_FIELDs(insn, 19, 31);
3138
                        r_const = tcg_const_tl(simm);
3139
                        gen_movl_TN_reg(rd, r_const);
3140
                        tcg_temp_free(r_const);
3141
                    } else {            /* register */
3142
                        rs2 = GET_FIELD(insn, 27, 31);
3143
                        gen_movl_reg_TN(rs2, cpu_dst);
3144
                        gen_movl_TN_reg(rd, cpu_dst);
3145
                    }
3146
                } else {
3147
                    cpu_src1 = get_src1(insn, cpu_src1);
3148
                    if (IS_IMM) {       /* immediate */
3149
                        simm = GET_FIELDs(insn, 19, 31);
3150
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3151
                        gen_movl_TN_reg(rd, cpu_dst);
3152
                    } else {            /* register */
3153
                        // or x, %g0, y -> mov T1, x; mov y, T1
3154
                        rs2 = GET_FIELD(insn, 27, 31);
3155
                        if (rs2 != 0) {
3156
                            gen_movl_reg_TN(rs2, cpu_src2);
3157
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3158
                            gen_movl_TN_reg(rd, cpu_dst);
3159
                        } else
3160
                            gen_movl_TN_reg(rd, cpu_src1);
3161
                    }
3162
                }
3163
#ifdef TARGET_SPARC64
3164
            } else if (xop == 0x25) { /* sll, V9 sllx */
3165
                cpu_src1 = get_src1(insn, cpu_src1);
3166
                if (IS_IMM) {   /* immediate */
3167
                    simm = GET_FIELDs(insn, 20, 31);
3168
                    if (insn & (1 << 12)) {
3169
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3170
                    } else {
3171
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3172
                    }
3173
                } else {                /* register */
3174
                    rs2 = GET_FIELD(insn, 27, 31);
3175
                    gen_movl_reg_TN(rs2, cpu_src2);
3176
                    if (insn & (1 << 12)) {
3177
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3178
                    } else {
3179
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3180
                    }
3181
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3182
                }
3183
                gen_movl_TN_reg(rd, cpu_dst);
3184
            } else if (xop == 0x26) { /* srl, V9 srlx */
3185
                cpu_src1 = get_src1(insn, cpu_src1);
3186
                if (IS_IMM) {   /* immediate */
3187
                    simm = GET_FIELDs(insn, 20, 31);
3188
                    if (insn & (1 << 12)) {
3189
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3190
                    } else {
3191
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3192
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3193
                    }
3194
                } else {                /* register */
3195
                    rs2 = GET_FIELD(insn, 27, 31);
3196
                    gen_movl_reg_TN(rs2, cpu_src2);
3197
                    if (insn & (1 << 12)) {
3198
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3199
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3200
                    } else {
3201
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3202
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3203
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3204
                    }
3205
                }
3206
                gen_movl_TN_reg(rd, cpu_dst);
3207
            } else if (xop == 0x27) { /* sra, V9 srax */
3208
                cpu_src1 = get_src1(insn, cpu_src1);
3209
                if (IS_IMM) {   /* immediate */
3210
                    simm = GET_FIELDs(insn, 20, 31);
3211
                    if (insn & (1 << 12)) {
3212
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3213
                    } else {
3214
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3215
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3216
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3217
                    }
3218
                } else {                /* register */
3219
                    rs2 = GET_FIELD(insn, 27, 31);
3220
                    gen_movl_reg_TN(rs2, cpu_src2);
3221
                    if (insn & (1 << 12)) {
3222
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3223
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3224
                    } else {
3225
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3226
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3227
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3228
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3229
                    }
3230
                }
3231
                gen_movl_TN_reg(rd, cpu_dst);
3232
#endif
3233
            } else if (xop < 0x36) {
3234
                if (xop < 0x20) {
3235
                    cpu_src1 = get_src1(insn, cpu_src1);
3236
                    cpu_src2 = get_src2(insn, cpu_src2);
3237
                    switch (xop & ~0x10) {
3238
                    case 0x0: /* add */
3239
                        if (IS_IMM) {
3240
                            simm = GET_FIELDs(insn, 19, 31);
3241
                            if (xop & 0x10) {
3242
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3243
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3244
                                dc->cc_op = CC_OP_ADD;
3245
                            } else {
3246
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3247
                            }
3248
                        } else {
3249
                            if (xop & 0x10) {
3250
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3251
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3252
                                dc->cc_op = CC_OP_ADD;
3253
                            } else {
3254
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3255
                            }
3256
                        }
3257
                        break;
3258
                    case 0x1: /* and */
3259
                        if (IS_IMM) {
3260
                            simm = GET_FIELDs(insn, 19, 31);
3261
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3262
                        } else {
3263
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3264
                        }
3265
                        if (xop & 0x10) {
3266
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3267
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3268
                            dc->cc_op = CC_OP_LOGIC;
3269
                        }
3270
                        break;
3271
                    case 0x2: /* or */
3272
                        if (IS_IMM) {
3273
                            simm = GET_FIELDs(insn, 19, 31);
3274
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3275
                        } else {
3276
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3277
                        }
3278
                        if (xop & 0x10) {
3279
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3280
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3281
                            dc->cc_op = CC_OP_LOGIC;
3282
                        }
3283
                        break;
3284
                    case 0x3: /* xor */
3285
                        if (IS_IMM) {
3286
                            simm = GET_FIELDs(insn, 19, 31);
3287
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3288
                        } else {
3289
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3290
                        }
3291
                        if (xop & 0x10) {
3292
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3293
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3294
                            dc->cc_op = CC_OP_LOGIC;
3295
                        }
3296
                        break;
3297
                    case 0x4: /* sub */
3298
                        if (IS_IMM) {
3299
                            simm = GET_FIELDs(insn, 19, 31);
3300
                            if (xop & 0x10) {
3301
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3302
                            } else {
3303
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3304
                            }
3305
                        } else {
3306
                            if (xop & 0x10) {
3307
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3308
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3309
                                dc->cc_op = CC_OP_SUB;
3310
                            } else {
3311
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3312
                            }
3313
                        }
3314
                        break;
3315
                    case 0x5: /* andn */
3316
                        if (IS_IMM) {
3317
                            simm = GET_FIELDs(insn, 19, 31);
3318
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3319
                        } else {
3320
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3321
                        }
3322
                        if (xop & 0x10) {
3323
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3324
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3325
                            dc->cc_op = CC_OP_LOGIC;
3326
                        }
3327
                        break;
3328
                    case 0x6: /* orn */
3329
                        if (IS_IMM) {
3330
                            simm = GET_FIELDs(insn, 19, 31);
3331
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3332
                        } else {
3333
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3334
                        }
3335
                        if (xop & 0x10) {
3336
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3337
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3338
                            dc->cc_op = CC_OP_LOGIC;
3339
                        }
3340
                        break;
3341
                    case 0x7: /* xorn */
3342
                        if (IS_IMM) {
3343
                            simm = GET_FIELDs(insn, 19, 31);
3344
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3345
                        } else {
3346
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3347
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3348
                        }
3349
                        if (xop & 0x10) {
3350
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3351
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3352
                            dc->cc_op = CC_OP_LOGIC;
3353
                        }
3354
                        break;
3355
                    case 0x8: /* addx, V9 addc */
3356
                        gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3357
                                        (xop & 0x10));
3358
                        break;
3359
#ifdef TARGET_SPARC64
3360
                    case 0x9: /* V9 mulx */
3361
                        if (IS_IMM) {
3362
                            simm = GET_FIELDs(insn, 19, 31);
3363
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3364
                        } else {
3365
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3366
                        }
3367
                        break;
3368
#endif
3369
                    case 0xa: /* umul */
3370
                        CHECK_IU_FEATURE(dc, MUL);
3371
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3372
                        if (xop & 0x10) {
3373
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3374
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3375
                            dc->cc_op = CC_OP_LOGIC;
3376
                        }
3377
                        break;
3378
                    case 0xb: /* smul */
3379
                        CHECK_IU_FEATURE(dc, MUL);
3380
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3381
                        if (xop & 0x10) {
3382
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3383
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3384
                            dc->cc_op = CC_OP_LOGIC;
3385
                        }
3386
                        break;
3387
                    case 0xc: /* subx, V9 subc */
3388
                        gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3389
                                        (xop & 0x10));
3390
                        break;
3391
#ifdef TARGET_SPARC64
3392
                    case 0xd: /* V9 udivx */
3393
                        {
3394
                            TCGv r_temp1, r_temp2;
3395
                            r_temp1 = tcg_temp_local_new();
3396
                            r_temp2 = tcg_temp_local_new();
3397
                            tcg_gen_mov_tl(r_temp1, cpu_src1);
3398
                            tcg_gen_mov_tl(r_temp2, cpu_src2);
3399
                            gen_trap_ifdivzero_tl(r_temp2);
3400
                            tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3401
                            tcg_temp_free(r_temp1);
3402
                            tcg_temp_free(r_temp2);
3403
                        }
3404
                        break;
3405
#endif
3406
                    case 0xe: /* udiv */
3407
                        CHECK_IU_FEATURE(dc, DIV);
3408
                        if (xop & 0x10) {
3409
                            gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3410
                                               cpu_src2);
3411
                            dc->cc_op = CC_OP_DIV;
3412
                        } else {
3413
                            gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3414
                                            cpu_src2);
3415
                        }
3416
                        break;
3417
                    case 0xf: /* sdiv */
3418
                        CHECK_IU_FEATURE(dc, DIV);
3419
                        if (xop & 0x10) {
3420
                            gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3421
                                               cpu_src2);
3422
                            dc->cc_op = CC_OP_DIV;
3423
                        } else {
3424
                            gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3425
                                            cpu_src2);
3426
                        }
3427
                        break;
3428
                    default:
3429
                        goto illegal_insn;
3430
                    }
3431
                    gen_movl_TN_reg(rd, cpu_dst);
3432
                } else {
3433
                    cpu_src1 = get_src1(insn, cpu_src1);
3434
                    cpu_src2 = get_src2(insn, cpu_src2);
3435
                    switch (xop) {
3436
                    case 0x20: /* taddcc */
3437
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3438
                        gen_movl_TN_reg(rd, cpu_dst);
3439
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3440
                        dc->cc_op = CC_OP_TADD;
3441
                        break;
3442
                    case 0x21: /* tsubcc */
3443
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3444
                        gen_movl_TN_reg(rd, cpu_dst);
3445
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3446
                        dc->cc_op = CC_OP_TSUB;
3447
                        break;
3448
                    case 0x22: /* taddcctv */
3449
                        save_state(dc, cpu_cond);
3450
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3451
                        gen_movl_TN_reg(rd, cpu_dst);
3452
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3453
                        dc->cc_op = CC_OP_TADDTV;
3454
                        break;
3455
                    case 0x23: /* tsubcctv */
3456
                        save_state(dc, cpu_cond);
3457
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3458
                        gen_movl_TN_reg(rd, cpu_dst);
3459
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3460
                        dc->cc_op = CC_OP_TSUBTV;
3461
                        break;
3462
                    case 0x24: /* mulscc */
3463
                        gen_helper_compute_psr(cpu_env);
3464
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3465
                        gen_movl_TN_reg(rd, cpu_dst);
3466
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3467
                        dc->cc_op = CC_OP_ADD;
3468
                        break;
3469
#ifndef TARGET_SPARC64
3470
                    case 0x25:  /* sll */
3471
                        if (IS_IMM) { /* immediate */
3472
                            simm = GET_FIELDs(insn, 20, 31);
3473
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3474
                        } else { /* register */
3475
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3476
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3477
                        }
3478
                        gen_movl_TN_reg(rd, cpu_dst);
3479
                        break;
3480
                    case 0x26:  /* srl */
3481
                        if (IS_IMM) { /* immediate */
3482
                            simm = GET_FIELDs(insn, 20, 31);
3483
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3484
                        } else { /* register */
3485
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3486
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3487
                        }
3488
                        gen_movl_TN_reg(rd, cpu_dst);
3489
                        break;
3490
                    case 0x27:  /* sra */
3491
                        if (IS_IMM) { /* immediate */
3492
                            simm = GET_FIELDs(insn, 20, 31);
3493
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3494
                        } else { /* register */
3495
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3496
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3497
                        }
3498
                        gen_movl_TN_reg(rd, cpu_dst);
3499
                        break;
3500
#endif
3501
                    case 0x30:
3502
                        {
3503
                            switch(rd) {
3504
                            case 0: /* wry */
3505
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3506
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3507
                                break;
3508
#ifndef TARGET_SPARC64
3509
                            case 0x01 ... 0x0f: /* undefined in the
3510
                                                   SPARCv8 manual, nop
3511
                                                   on the microSPARC
3512
                                                   II */
3513
                            case 0x10 ... 0x1f: /* implementation-dependent
3514
                                                   in the SPARCv8
3515
                                                   manual, nop on the
3516
                                                   microSPARC II */
3517
                                break;
3518
#else
3519
                            case 0x2: /* V9 wrccr */
3520
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3521
                                gen_helper_wrccr(cpu_env, cpu_dst);
3522
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3523
                                dc->cc_op = CC_OP_FLAGS;
3524
                                break;
3525
                            case 0x3: /* V9 wrasi */
3526
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3527
                                tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3528
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3529
                                break;
3530
                            case 0x6: /* V9 wrfprs */
3531
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3532
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3533
                                save_state(dc, cpu_cond);
3534
                                gen_op_next_insn();
3535
                                tcg_gen_exit_tb(0);
3536
                                dc->is_br = 1;
3537
                                break;
3538
                            case 0xf: /* V9 sir, nop if user */
3539
#if !defined(CONFIG_USER_ONLY)
3540
                                if (supervisor(dc)) {
3541
                                    ; // XXX
3542
                                }
3543
#endif
3544
                                break;
3545
                            case 0x13: /* Graphics Status */
3546
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3547
                                    goto jmp_insn;
3548
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3549
                                break;
3550
                            case 0x14: /* Softint set */
3551
                                if (!supervisor(dc))
3552
                                    goto illegal_insn;
3553
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3554
                                gen_helper_set_softint(cpu_env, cpu_tmp64);
3555
                                break;
3556
                            case 0x15: /* Softint clear */
3557
                                if (!supervisor(dc))
3558
                                    goto illegal_insn;
3559
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3560
                                gen_helper_clear_softint(cpu_env, cpu_tmp64);
3561
                                break;
3562
                            case 0x16: /* Softint write */
3563
                                if (!supervisor(dc))
3564
                                    goto illegal_insn;
3565
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3566
                                gen_helper_write_softint(cpu_env, cpu_tmp64);
3567
                                break;
3568
                            case 0x17: /* Tick compare */
3569
#if !defined(CONFIG_USER_ONLY)
3570
                                if (!supervisor(dc))
3571
                                    goto illegal_insn;
3572
#endif
3573
                                {
3574
                                    TCGv_ptr r_tickptr;
3575

    
3576
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3577
                                                   cpu_src2);
3578
                                    r_tickptr = tcg_temp_new_ptr();
3579
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3580
                                                   offsetof(CPUState, tick));
3581
                                    gen_helper_tick_set_limit(r_tickptr,
3582
                                                              cpu_tick_cmpr);
3583
                                    tcg_temp_free_ptr(r_tickptr);
3584
                                }
3585
                                break;
3586
                            case 0x18: /* System tick */
3587
#if !defined(CONFIG_USER_ONLY)
3588
                                if (!supervisor(dc))
3589
                                    goto illegal_insn;
3590
#endif
3591
                                {
3592
                                    TCGv_ptr r_tickptr;
3593

    
3594
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3595
                                                   cpu_src2);
3596
                                    r_tickptr = tcg_temp_new_ptr();
3597
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3598
                                                   offsetof(CPUState, stick));
3599
                                    gen_helper_tick_set_count(r_tickptr,
3600
                                                              cpu_dst);
3601
                                    tcg_temp_free_ptr(r_tickptr);
3602
                                }
3603
                                break;
3604
                            case 0x19: /* System tick compare */
3605
#if !defined(CONFIG_USER_ONLY)
3606
                                if (!supervisor(dc))
3607
                                    goto illegal_insn;
3608
#endif
3609
                                {
3610
                                    TCGv_ptr r_tickptr;
3611

    
3612
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3613
                                                   cpu_src2);
3614
                                    r_tickptr = tcg_temp_new_ptr();
3615
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3616
                                                   offsetof(CPUState, stick));
3617
                                    gen_helper_tick_set_limit(r_tickptr,
3618
                                                              cpu_stick_cmpr);
3619
                                    tcg_temp_free_ptr(r_tickptr);
3620
                                }
3621
                                break;
3622

    
3623
                            case 0x10: /* Performance Control */
3624
                            case 0x11: /* Performance Instrumentation
3625
                                          Counter */
3626
                            case 0x12: /* Dispatch Control */
3627
#endif
3628
                            default:
3629
                                goto illegal_insn;
3630
                            }
3631
                        }
3632
                        break;
3633
#if !defined(CONFIG_USER_ONLY)
3634
                    case 0x31: /* wrpsr, V9 saved, restored */
3635
                        {
3636
                            if (!supervisor(dc))
3637
                                goto priv_insn;
3638
#ifdef TARGET_SPARC64
3639
                            switch (rd) {
3640
                            case 0:
3641
                                gen_helper_saved(cpu_env);
3642
                                break;
3643
                            case 1:
3644
                                gen_helper_restored(cpu_env);
3645
                                break;
3646
                            case 2: /* UA2005 allclean */
3647
                            case 3: /* UA2005 otherw */
3648
                            case 4: /* UA2005 normalw */
3649
                            case 5: /* UA2005 invalw */
3650
                                // XXX
3651
                            default:
3652
                                goto illegal_insn;
3653
                            }
3654
#else
3655
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3656
                            gen_helper_wrpsr(cpu_env, cpu_dst);
3657
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3658
                            dc->cc_op = CC_OP_FLAGS;
3659
                            save_state(dc, cpu_cond);
3660
                            gen_op_next_insn();
3661
                            tcg_gen_exit_tb(0);
3662
                            dc->is_br = 1;
3663
#endif
3664
                        }
3665
                        break;
3666
                    case 0x32: /* wrwim, V9 wrpr */
3667
                        {
3668
                            if (!supervisor(dc))
3669
                                goto priv_insn;
3670
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3671
#ifdef TARGET_SPARC64
3672
                            switch (rd) {
3673
                            case 0: // tpc
3674
                                {
3675
                                    TCGv_ptr r_tsptr;
3676

    
3677
                                    r_tsptr = tcg_temp_new_ptr();
3678
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3679
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3680
                                                  offsetof(trap_state, tpc));
3681
                                    tcg_temp_free_ptr(r_tsptr);
3682
                                }
3683
                                break;
3684
                            case 1: // tnpc
3685
                                {
3686
                                    TCGv_ptr r_tsptr;
3687

    
3688
                                    r_tsptr = tcg_temp_new_ptr();
3689
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3690
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3691
                                                  offsetof(trap_state, tnpc));
3692
                                    tcg_temp_free_ptr(r_tsptr);
3693
                                }
3694
                                break;
3695
                            case 2: // tstate
3696
                                {
3697
                                    TCGv_ptr r_tsptr;
3698

    
3699
                                    r_tsptr = tcg_temp_new_ptr();
3700
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3701
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3702
                                                  offsetof(trap_state,
3703
                                                           tstate));
3704
                                    tcg_temp_free_ptr(r_tsptr);
3705
                                }
3706
                                break;
3707
                            case 3: // tt
3708
                                {
3709
                                    TCGv_ptr r_tsptr;
3710

    
3711
                                    r_tsptr = tcg_temp_new_ptr();
3712
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3713
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3714
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3715
                                                   offsetof(trap_state, tt));
3716
                                    tcg_temp_free_ptr(r_tsptr);
3717
                                }
3718
                                break;
3719
                            case 4: // tick
3720
                                {
3721
                                    TCGv_ptr r_tickptr;
3722

    
3723
                                    r_tickptr = tcg_temp_new_ptr();
3724
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3725
                                                   offsetof(CPUState, tick));
3726
                                    gen_helper_tick_set_count(r_tickptr,
3727
                                                              cpu_tmp0);
3728
                                    tcg_temp_free_ptr(r_tickptr);
3729
                                }
3730
                                break;
3731
                            case 5: // tba
3732
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3733
                                break;
3734
                            case 6: // pstate
3735
                                {
3736
                                    TCGv r_tmp = tcg_temp_local_new();
3737

    
3738
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3739
                                    save_state(dc, cpu_cond);
3740
                                    gen_helper_wrpstate(cpu_env, r_tmp);
3741
                                    tcg_temp_free(r_tmp);
3742
                                    dc->npc = DYNAMIC_PC;
3743
                                }
3744
                                break;
3745
                            case 7: // tl
3746
                                {
3747
                                    TCGv r_tmp = tcg_temp_local_new();
3748

    
3749
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3750
                                    save_state(dc, cpu_cond);
3751
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3752
                                    tcg_temp_free(r_tmp);
3753
                                    tcg_gen_st_i32(cpu_tmp32, cpu_env,
3754
                                                   offsetof(CPUSPARCState, tl));
3755
                                    dc->npc = DYNAMIC_PC;
3756
                                }
3757
                                break;
3758
                            case 8: // pil
3759
                                gen_helper_wrpil(cpu_env, cpu_tmp0);
3760
                                break;
3761
                            case 9: // cwp
3762
                                gen_helper_wrcwp(cpu_env, cpu_tmp0);
3763
                                break;
3764
                            case 10: // cansave
3765
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3766
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3767
                                               offsetof(CPUSPARCState,
3768
                                                        cansave));
3769
                                break;
3770
                            case 11: // canrestore
3771
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3772
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3773
                                               offsetof(CPUSPARCState,
3774
                                                        canrestore));
3775
                                break;
3776
                            case 12: // cleanwin
3777
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3778
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3779
                                               offsetof(CPUSPARCState,
3780
                                                        cleanwin));
3781
                                break;
3782
                            case 13: // otherwin
3783
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3784
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3785
                                               offsetof(CPUSPARCState,
3786
                                                        otherwin));
3787
                                break;
3788
                            case 14: // wstate
3789
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3790
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3791
                                               offsetof(CPUSPARCState,
3792
                                                        wstate));
3793
                                break;
3794
                            case 16: // UA2005 gl
3795
                                CHECK_IU_FEATURE(dc, GL);
3796
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3797
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3798
                                               offsetof(CPUSPARCState, gl));
3799
                                break;
3800
                            case 26: // UA2005 strand status
3801
                                CHECK_IU_FEATURE(dc, HYPV);
3802
                                if (!hypervisor(dc))
3803
                                    goto priv_insn;
3804
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3805
                                break;
3806
                            default:
3807
                                goto illegal_insn;
3808
                            }
3809
#else
3810
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3811
                            if (dc->def->nwindows != 32)
3812
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3813
                                                (1 << dc->def->nwindows) - 1);
3814
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3815
#endif
3816
                        }
3817
                        break;
3818
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3819
                        {
3820
#ifndef TARGET_SPARC64
3821
                            if (!supervisor(dc))
3822
                                goto priv_insn;
3823
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3824
#else
3825
                            CHECK_IU_FEATURE(dc, HYPV);
3826
                            if (!hypervisor(dc))
3827
                                goto priv_insn;
3828
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3829
                            switch (rd) {
3830
                            case 0: // hpstate
3831
                                // XXX gen_op_wrhpstate();
3832
                                save_state(dc, cpu_cond);
3833
                                gen_op_next_insn();
3834
                                tcg_gen_exit_tb(0);
3835
                                dc->is_br = 1;
3836
                                break;
3837
                            case 1: // htstate
3838
                                // XXX gen_op_wrhtstate();
3839
                                break;
3840
                            case 3: // hintp
3841
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3842
                                break;
3843
                            case 5: // htba
3844
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3845
                                break;
3846
                            case 31: // hstick_cmpr
3847
                                {
3848
                                    TCGv_ptr r_tickptr;
3849

    
3850
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3851
                                    r_tickptr = tcg_temp_new_ptr();
3852
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3853
                                                   offsetof(CPUState, hstick));
3854
                                    gen_helper_tick_set_limit(r_tickptr,
3855
                                                              cpu_hstick_cmpr);
3856
                                    tcg_temp_free_ptr(r_tickptr);
3857
                                }
3858
                                break;
3859
                            case 6: // hver readonly
3860
                            default:
3861
                                goto illegal_insn;
3862
                            }
3863
#endif
3864
                        }
3865
                        break;
3866
#endif
3867
#ifdef TARGET_SPARC64
3868
                    case 0x2c: /* V9 movcc */
3869
                        {
3870
                            int cc = GET_FIELD_SP(insn, 11, 12);
3871
                            int cond = GET_FIELD_SP(insn, 14, 17);
3872
                            TCGv r_cond;
3873
                            int l1;
3874

    
3875
                            r_cond = tcg_temp_new();
3876
                            if (insn & (1 << 18)) {
3877
                                if (cc == 0)
3878
                                    gen_cond(r_cond, 0, cond, dc);
3879
                                else if (cc == 2)
3880
                                    gen_cond(r_cond, 1, cond, dc);
3881
                                else
3882
                                    goto illegal_insn;
3883
                            } else {
3884
                                gen_fcond(r_cond, cc, cond);
3885
                            }
3886

    
3887
                            l1 = gen_new_label();
3888

    
3889
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3890
                            if (IS_IMM) {       /* immediate */
3891
                                TCGv r_const;
3892

    
3893
                                simm = GET_FIELD_SPs(insn, 0, 10);
3894
                                r_const = tcg_const_tl(simm);
3895
                                gen_movl_TN_reg(rd, r_const);
3896
                                tcg_temp_free(r_const);
3897
                            } else {
3898
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3899
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3900
                                gen_movl_TN_reg(rd, cpu_tmp0);
3901
                            }
3902
                            gen_set_label(l1);
3903
                            tcg_temp_free(r_cond);
3904
                            break;
3905
                        }
3906
                    case 0x2d: /* V9 sdivx */
3907
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3908
                        gen_movl_TN_reg(rd, cpu_dst);
3909
                        break;
3910
                    case 0x2e: /* V9 popc */
3911
                        {
3912
                            cpu_src2 = get_src2(insn, cpu_src2);
3913
                            gen_helper_popc(cpu_dst, cpu_src2);
3914
                            gen_movl_TN_reg(rd, cpu_dst);
3915
                        }
3916
                    case 0x2f: /* V9 movr */
3917
                        {
3918
                            int cond = GET_FIELD_SP(insn, 10, 12);
3919
                            int l1;
3920

    
3921
                            cpu_src1 = get_src1(insn, cpu_src1);
3922

    
3923
                            l1 = gen_new_label();
3924

    
3925
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3926
                                              cpu_src1, 0, l1);
3927
                            if (IS_IMM) {       /* immediate */
3928
                                TCGv r_const;
3929

    
3930
                                simm = GET_FIELD_SPs(insn, 0, 9);
3931
                                r_const = tcg_const_tl(simm);
3932
                                gen_movl_TN_reg(rd, r_const);
3933
                                tcg_temp_free(r_const);
3934
                            } else {
3935
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3936
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3937
                                gen_movl_TN_reg(rd, cpu_tmp0);
3938
                            }
3939
                            gen_set_label(l1);
3940
                            break;
3941
                        }
3942
#endif
3943
                    default:
3944
                        goto illegal_insn;
3945
                    }
3946
                }
3947
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3948
#ifdef TARGET_SPARC64
3949
                int opf = GET_FIELD_SP(insn, 5, 13);
3950
                rs1 = GET_FIELD(insn, 13, 17);
3951
                rs2 = GET_FIELD(insn, 27, 31);
3952
                if (gen_trap_ifnofpu(dc, cpu_cond))
3953
                    goto jmp_insn;
3954

    
3955
                switch (opf) {
3956
                case 0x000: /* VIS I edge8cc */
3957
                case 0x001: /* VIS II edge8n */
3958
                case 0x002: /* VIS I edge8lcc */
3959
                case 0x003: /* VIS II edge8ln */
3960
                case 0x004: /* VIS I edge16cc */
3961
                case 0x005: /* VIS II edge16n */
3962
                case 0x006: /* VIS I edge16lcc */
3963
                case 0x007: /* VIS II edge16ln */
3964
                case 0x008: /* VIS I edge32cc */
3965
                case 0x009: /* VIS II edge32n */
3966
                case 0x00a: /* VIS I edge32lcc */
3967
                case 0x00b: /* VIS II edge32ln */
3968
                    // XXX
3969
                    goto illegal_insn;
3970
                case 0x010: /* VIS I array8 */
3971
                    CHECK_FPU_FEATURE(dc, VIS1);
3972
                    cpu_src1 = get_src1(insn, cpu_src1);
3973
                    gen_movl_reg_TN(rs2, cpu_src2);
3974
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3975
                    gen_movl_TN_reg(rd, cpu_dst);
3976
                    break;
3977
                case 0x012: /* VIS I array16 */
3978
                    CHECK_FPU_FEATURE(dc, VIS1);
3979
                    cpu_src1 = get_src1(insn, cpu_src1);
3980
                    gen_movl_reg_TN(rs2, cpu_src2);
3981
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3982
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3983
                    gen_movl_TN_reg(rd, cpu_dst);
3984
                    break;
3985
                case 0x014: /* VIS I array32 */
3986
                    CHECK_FPU_FEATURE(dc, VIS1);
3987
                    cpu_src1 = get_src1(insn, cpu_src1);
3988
                    gen_movl_reg_TN(rs2, cpu_src2);
3989
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3990
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3991
                    gen_movl_TN_reg(rd, cpu_dst);
3992
                    break;
3993
                case 0x018: /* VIS I alignaddr */
3994
                    CHECK_FPU_FEATURE(dc, VIS1);
3995
                    cpu_src1 = get_src1(insn, cpu_src1);
3996
                    gen_movl_reg_TN(rs2, cpu_src2);
3997
                    gen_helper_alignaddr(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3998
                    gen_movl_TN_reg(rd, cpu_dst);
3999
                    break;
4000
                case 0x019: /* VIS II bmask */
4001
                case 0x01a: /* VIS I alignaddrl */
4002
                    // XXX
4003
                    goto illegal_insn;
4004
                case 0x020: /* VIS I fcmple16 */
4005
                    CHECK_FPU_FEATURE(dc, VIS1);
4006
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4007
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4008
                    gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4009
                    gen_movl_TN_reg(rd, cpu_dst);
4010
                    break;
4011
                case 0x022: /* VIS I fcmpne16 */
4012
                    CHECK_FPU_FEATURE(dc, VIS1);
4013
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4014
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4015
                    gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4016
                    gen_movl_TN_reg(rd, cpu_dst);
4017
                    break;
4018
                case 0x024: /* VIS I fcmple32 */
4019
                    CHECK_FPU_FEATURE(dc, VIS1);
4020
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4021
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4022
                    gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4023
                    gen_movl_TN_reg(rd, cpu_dst);
4024
                    break;
4025
                case 0x026: /* VIS I fcmpne32 */
4026
                    CHECK_FPU_FEATURE(dc, VIS1);
4027
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4028
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4029
                    gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4030
                    gen_movl_TN_reg(rd, cpu_dst);
4031
                    break;
4032
                case 0x028: /* VIS I fcmpgt16 */
4033
                    CHECK_FPU_FEATURE(dc, VIS1);
4034
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4035
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4036
                    gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4037
                    gen_movl_TN_reg(rd, cpu_dst);
4038
                    break;
4039
                case 0x02a: /* VIS I fcmpeq16 */
4040
                    CHECK_FPU_FEATURE(dc, VIS1);
4041
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4042
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4043
                    gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4044
                    gen_movl_TN_reg(rd, cpu_dst);
4045
                    break;
4046
                case 0x02c: /* VIS I fcmpgt32 */
4047
                    CHECK_FPU_FEATURE(dc, VIS1);
4048
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4049
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4050
                    gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4051
                    gen_movl_TN_reg(rd, cpu_dst);
4052
                    break;
4053
                case 0x02e: /* VIS I fcmpeq32 */
4054
                    CHECK_FPU_FEATURE(dc, VIS1);
4055
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4056
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4057
                    gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4058
                    gen_movl_TN_reg(rd, cpu_dst);
4059
                    break;
4060
                case 0x031: /* VIS I fmul8x16 */
4061
                    CHECK_FPU_FEATURE(dc, VIS1);
4062
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4063
                    break;
4064
                case 0x033: /* VIS I fmul8x16au */
4065
                    CHECK_FPU_FEATURE(dc, VIS1);
4066
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4067
                    break;
4068
                case 0x035: /* VIS I fmul8x16al */
4069
                    CHECK_FPU_FEATURE(dc, VIS1);
4070
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4071
                    break;
4072
                case 0x036: /* VIS I fmul8sux16 */
4073
                    CHECK_FPU_FEATURE(dc, VIS1);
4074
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4075
                    break;
4076
                case 0x037: /* VIS I fmul8ulx16 */
4077
                    CHECK_FPU_FEATURE(dc, VIS1);
4078
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4079
                    break;
4080
                case 0x038: /* VIS I fmuld8sux16 */
4081
                    CHECK_FPU_FEATURE(dc, VIS1);
4082
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4083
                    break;
4084
                case 0x039: /* VIS I fmuld8ulx16 */
4085
                    CHECK_FPU_FEATURE(dc, VIS1);
4086
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4087
                    break;
4088
                case 0x03a: /* VIS I fpack32 */
4089
                    CHECK_FPU_FEATURE(dc, VIS1);
4090
                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4091
                    break;
4092
                case 0x03b: /* VIS I fpack16 */
4093
                    CHECK_FPU_FEATURE(dc, VIS1);
4094
                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4095
                    cpu_dst_32 = gen_dest_fpr_F();
4096
                    gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4097
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4098
                    break;
4099
                case 0x03d: /* VIS I fpackfix */
4100
                    CHECK_FPU_FEATURE(dc, VIS1);
4101
                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4102
                    cpu_dst_32 = gen_dest_fpr_F();
4103
                    gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4104
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4105
                    break;
4106
                case 0x03e: /* VIS I pdist */
4107
                    CHECK_FPU_FEATURE(dc, VIS1);
4108
                    gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4109
                    break;
4110
                case 0x048: /* VIS I faligndata */
4111
                    CHECK_FPU_FEATURE(dc, VIS1);
4112
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4113
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4114
                    cpu_dst_64 = gen_dest_fpr_D();
4115
                    gen_helper_faligndata(cpu_dst_64, cpu_env,
4116
                                          cpu_src1_64, cpu_src2_64);
4117
                    gen_store_fpr_D(dc, rd, cpu_dst_64);
4118
                    break;
4119
                case 0x04b: /* VIS I fpmerge */
4120
                    CHECK_FPU_FEATURE(dc, VIS1);
4121
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4122
                    break;
4123
                case 0x04c: /* VIS II bshuffle */
4124
                    // XXX
4125
                    goto illegal_insn;
4126
                case 0x04d: /* VIS I fexpand */
4127
                    CHECK_FPU_FEATURE(dc, VIS1);
4128
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4129
                    break;
4130
                case 0x050: /* VIS I fpadd16 */
4131
                    CHECK_FPU_FEATURE(dc, VIS1);
4132
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4133
                    break;
4134
                case 0x051: /* VIS I fpadd16s */
4135
                    CHECK_FPU_FEATURE(dc, VIS1);
4136
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4137
                    break;
4138
                case 0x052: /* VIS I fpadd32 */
4139
                    CHECK_FPU_FEATURE(dc, VIS1);
4140
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4141
                    break;
4142
                case 0x053: /* VIS I fpadd32s */
4143
                    CHECK_FPU_FEATURE(dc, VIS1);
4144
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4145
                    break;
4146
                case 0x054: /* VIS I fpsub16 */
4147
                    CHECK_FPU_FEATURE(dc, VIS1);
4148
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4149
                    break;
4150
                case 0x055: /* VIS I fpsub16s */
4151
                    CHECK_FPU_FEATURE(dc, VIS1);
4152
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4153
                    break;
4154
                case 0x056: /* VIS I fpsub32 */
4155
                    CHECK_FPU_FEATURE(dc, VIS1);
4156
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4157
                    break;
4158
                case 0x057: /* VIS I fpsub32s */
4159
                    CHECK_FPU_FEATURE(dc, VIS1);
4160
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4161
                    break;
4162
                case 0x060: /* VIS I fzero */
4163
                    CHECK_FPU_FEATURE(dc, VIS1);
4164
                    cpu_dst_64 = gen_dest_fpr_D();
4165
                    tcg_gen_movi_i64(cpu_dst_64, 0);
4166
                    gen_store_fpr_D(dc, rd, cpu_dst_64);
4167
                    break;
4168
                case 0x061: /* VIS I fzeros */
4169
                    CHECK_FPU_FEATURE(dc, VIS1);
4170
                    cpu_dst_32 = gen_dest_fpr_F();
4171
                    tcg_gen_movi_i32(cpu_dst_32, 0);
4172
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4173
                    break;
4174
                case 0x062: /* VIS I fnor */
4175
                    CHECK_FPU_FEATURE(dc, VIS1);
4176
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4177
                    break;
4178
                case 0x063: /* VIS I fnors */
4179
                    CHECK_FPU_FEATURE(dc, VIS1);
4180
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4181
                    break;
4182
                case 0x064: /* VIS I fandnot2 */
4183
                    CHECK_FPU_FEATURE(dc, VIS1);
4184
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4185
                    break;
4186
                case 0x065: /* VIS I fandnot2s */
4187
                    CHECK_FPU_FEATURE(dc, VIS1);
4188
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4189
                    break;
4190
                case 0x066: /* VIS I fnot2 */
4191
                    CHECK_FPU_FEATURE(dc, VIS1);
4192
                    gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4193
                    break;
4194
                case 0x067: /* VIS I fnot2s */
4195
                    CHECK_FPU_FEATURE(dc, VIS1);
4196
                    gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4197
                    break;
4198
                case 0x068: /* VIS I fandnot1 */
4199
                    CHECK_FPU_FEATURE(dc, VIS1);
4200
                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4201
                    break;
4202
                case 0x069: /* VIS I fandnot1s */
4203
                    CHECK_FPU_FEATURE(dc, VIS1);
4204
                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4205
                    break;
4206
                case 0x06a: /* VIS I fnot1 */
4207
                    CHECK_FPU_FEATURE(dc, VIS1);
4208
                    gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4209
                    break;
4210
                case 0x06b: /* VIS I fnot1s */
4211
                    CHECK_FPU_FEATURE(dc, VIS1);
4212
                    gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4213
                    break;
4214
                case 0x06c: /* VIS I fxor */
4215
                    CHECK_FPU_FEATURE(dc, VIS1);
4216
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4217
                    break;
4218
                case 0x06d: /* VIS I fxors */
4219
                    CHECK_FPU_FEATURE(dc, VIS1);
4220
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4221
                    break;
4222
                case 0x06e: /* VIS I fnand */
4223
                    CHECK_FPU_FEATURE(dc, VIS1);
4224
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4225
                    break;
4226
                case 0x06f: /* VIS I fnands */
4227
                    CHECK_FPU_FEATURE(dc, VIS1);
4228
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4229
                    break;
4230
                case 0x070: /* VIS I fand */
4231
                    CHECK_FPU_FEATURE(dc, VIS1);
4232
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4233
                    break;
4234
                case 0x071: /* VIS I fands */
4235
                    CHECK_FPU_FEATURE(dc, VIS1);
4236
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4237
                    break;
4238
                case 0x072: /* VIS I fxnor */
4239
                    CHECK_FPU_FEATURE(dc, VIS1);
4240
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4241
                    break;
4242
                case 0x073: /* VIS I fxnors */
4243
                    CHECK_FPU_FEATURE(dc, VIS1);
4244
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4245
                    break;
4246
                case 0x074: /* VIS I fsrc1 */
4247
                    CHECK_FPU_FEATURE(dc, VIS1);
4248
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4249
                    gen_store_fpr_D(dc, rd, cpu_src1_64);
4250
                    break;
4251
                case 0x075: /* VIS I fsrc1s */
4252
                    CHECK_FPU_FEATURE(dc, VIS1);
4253
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4254
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
4255
                    break;
4256
                case 0x076: /* VIS I fornot2 */
4257
                    CHECK_FPU_FEATURE(dc, VIS1);
4258
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4259
                    break;
4260
                case 0x077: /* VIS I fornot2s */
4261
                    CHECK_FPU_FEATURE(dc, VIS1);
4262
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4263
                    break;
4264
                case 0x078: /* VIS I fsrc2 */
4265
                    CHECK_FPU_FEATURE(dc, VIS1);
4266
                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4267
                    gen_store_fpr_D(dc, rd, cpu_src1_64);
4268
                    break;
4269
                case 0x079: /* VIS I fsrc2s */
4270
                    CHECK_FPU_FEATURE(dc, VIS1);
4271
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4272
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
4273
                    break;
4274
                case 0x07a: /* VIS I fornot1 */
4275
                    CHECK_FPU_FEATURE(dc, VIS1);
4276
                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4277
                    break;
4278
                case 0x07b: /* VIS I fornot1s */
4279
                    CHECK_FPU_FEATURE(dc, VIS1);
4280
                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4281
                    break;
4282
                case 0x07c: /* VIS I for */
4283
                    CHECK_FPU_FEATURE(dc, VIS1);
4284
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4285
                    break;
4286
                case 0x07d: /* VIS I fors */
4287
                    CHECK_FPU_FEATURE(dc, VIS1);
4288
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4289
                    break;
4290
                case 0x07e: /* VIS I fone */
4291
                    CHECK_FPU_FEATURE(dc, VIS1);
4292
                    cpu_dst_64 = gen_dest_fpr_D();
4293
                    tcg_gen_movi_i64(cpu_dst_64, -1);
4294
                    gen_store_fpr_D(dc, rd, cpu_dst_64);
4295
                    break;
4296
                case 0x07f: /* VIS I fones */
4297
                    CHECK_FPU_FEATURE(dc, VIS1);
4298
                    cpu_dst_32 = gen_dest_fpr_F();
4299
                    tcg_gen_movi_i32(cpu_dst_32, -1);
4300
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4301
                    break;
4302
                case 0x080: /* VIS I shutdown */
4303
                case 0x081: /* VIS II siam */
4304
                    // XXX
4305
                    goto illegal_insn;
4306
                default:
4307
                    goto illegal_insn;
4308
                }
4309
#else
4310
                goto ncp_insn;
4311
#endif
4312
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4313
#ifdef TARGET_SPARC64
4314
                goto illegal_insn;
4315
#else
4316
                goto ncp_insn;
4317
#endif
4318
#ifdef TARGET_SPARC64
4319
            } else if (xop == 0x39) { /* V9 return */
4320
                TCGv_i32 r_const;
4321

    
4322
                save_state(dc, cpu_cond);
4323
                cpu_src1 = get_src1(insn, cpu_src1);
4324
                if (IS_IMM) {   /* immediate */
4325
                    simm = GET_FIELDs(insn, 19, 31);
4326
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4327
                } else {                /* register */
4328
                    rs2 = GET_FIELD(insn, 27, 31);
4329
                    if (rs2) {
4330
                        gen_movl_reg_TN(rs2, cpu_src2);
4331
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4332
                    } else
4333
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4334
                }
4335
                gen_helper_restore(cpu_env);
4336
                gen_mov_pc_npc(dc, cpu_cond);
4337
                r_const = tcg_const_i32(3);
4338
                gen_helper_check_align(cpu_dst, r_const);
4339
                tcg_temp_free_i32(r_const);
4340
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4341
                dc->npc = DYNAMIC_PC;
4342
                goto jmp_insn;
4343
#endif
4344
            } else {
4345
                cpu_src1 = get_src1(insn, cpu_src1);
4346
                if (IS_IMM) {   /* immediate */
4347
                    simm = GET_FIELDs(insn, 19, 31);
4348
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4349
                } else {                /* register */
4350
                    rs2 = GET_FIELD(insn, 27, 31);
4351
                    if (rs2) {
4352
                        gen_movl_reg_TN(rs2, cpu_src2);
4353
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4354
                    } else
4355
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4356
                }
4357
                switch (xop) {
4358
                case 0x38:      /* jmpl */
4359
                    {
4360
                        TCGv r_pc;
4361
                        TCGv_i32 r_const;
4362

    
4363
                        r_pc = tcg_const_tl(dc->pc);
4364
                        gen_movl_TN_reg(rd, r_pc);
4365
                        tcg_temp_free(r_pc);
4366
                        gen_mov_pc_npc(dc, cpu_cond);
4367
                        r_const = tcg_const_i32(3);
4368
                        gen_helper_check_align(cpu_dst, r_const);
4369
                        tcg_temp_free_i32(r_const);
4370
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4371
                        dc->npc = DYNAMIC_PC;
4372
                    }
4373
                    goto jmp_insn;
4374
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4375
                case 0x39:      /* rett, V9 return */
4376
                    {
4377
                        TCGv_i32 r_const;
4378

    
4379
                        if (!supervisor(dc))
4380
                            goto priv_insn;
4381
                        gen_mov_pc_npc(dc, cpu_cond);
4382
                        r_const = tcg_const_i32(3);
4383
                        gen_helper_check_align(cpu_dst, r_const);
4384
                        tcg_temp_free_i32(r_const);
4385
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4386
                        dc->npc = DYNAMIC_PC;
4387
                        gen_helper_rett(cpu_env);
4388
                    }
4389
                    goto jmp_insn;
4390
#endif
4391
                case 0x3b: /* flush */
4392
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4393
                        goto unimp_flush;
4394
                    /* nop */
4395
                    break;
4396
                case 0x3c:      /* save */
4397
                    save_state(dc, cpu_cond);
4398
                    gen_helper_save(cpu_env);
4399
                    gen_movl_TN_reg(rd, cpu_dst);
4400
                    break;
4401
                case 0x3d:      /* restore */
4402
                    save_state(dc, cpu_cond);
4403
                    gen_helper_restore(cpu_env);
4404
                    gen_movl_TN_reg(rd, cpu_dst);
4405
                    break;
4406
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4407
                case 0x3e:      /* V9 done/retry */
4408
                    {
4409
                        switch (rd) {
4410
                        case 0:
4411
                            if (!supervisor(dc))
4412
                                goto priv_insn;
4413
                            dc->npc = DYNAMIC_PC;
4414
                            dc->pc = DYNAMIC_PC;
4415
                            gen_helper_done(cpu_env);
4416
                            goto jmp_insn;
4417
                        case 1:
4418
                            if (!supervisor(dc))
4419
                                goto priv_insn;
4420
                            dc->npc = DYNAMIC_PC;
4421
                            dc->pc = DYNAMIC_PC;
4422
                            gen_helper_retry(cpu_env);
4423
                            goto jmp_insn;
4424
                        default:
4425
                            goto illegal_insn;
4426
                        }
4427
                    }
4428
                    break;
4429
#endif
4430
                default:
4431
                    goto illegal_insn;
4432
                }
4433
            }
4434
            break;
4435
        }
4436
        break;
4437
    case 3:                     /* load/store instructions */
4438
        {
4439
            unsigned int xop = GET_FIELD(insn, 7, 12);
4440

    
4441
            /* flush pending conditional evaluations before exposing
4442
               cpu state */
4443
            if (dc->cc_op != CC_OP_FLAGS) {
4444
                dc->cc_op = CC_OP_FLAGS;
4445
                gen_helper_compute_psr(cpu_env);
4446
            }
4447
            cpu_src1 = get_src1(insn, cpu_src1);
4448
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4449
                rs2 = GET_FIELD(insn, 27, 31);
4450
                gen_movl_reg_TN(rs2, cpu_src2);
4451
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4452
            } else if (IS_IMM) {     /* immediate */
4453
                simm = GET_FIELDs(insn, 19, 31);
4454
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4455
            } else {            /* register */
4456
                rs2 = GET_FIELD(insn, 27, 31);
4457
                if (rs2 != 0) {
4458
                    gen_movl_reg_TN(rs2, cpu_src2);
4459
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4460
                } else
4461
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4462
            }
4463
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4464
                (xop > 0x17 && xop <= 0x1d ) ||
4465
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4466
                switch (xop) {
4467
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4468
                    gen_address_mask(dc, cpu_addr);
4469
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4470
                    break;
4471
                case 0x1:       /* ldub, load unsigned byte */
4472
                    gen_address_mask(dc, cpu_addr);
4473
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4474
                    break;
4475
                case 0x2:       /* lduh, load unsigned halfword */
4476
                    gen_address_mask(dc, cpu_addr);
4477
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4478
                    break;
4479
                case 0x3:       /* ldd, load double word */
4480
                    if (rd & 1)
4481
                        goto illegal_insn;
4482
                    else {
4483
                        TCGv_i32 r_const;
4484

    
4485
                        save_state(dc, cpu_cond);
4486
                        r_const = tcg_const_i32(7);
4487
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4488
                        tcg_temp_free_i32(r_const);
4489
                        gen_address_mask(dc, cpu_addr);
4490
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4491
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4492
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4493
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4494
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4495
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4496
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4497
                    }
4498
                    break;
4499
                case 0x9:       /* ldsb, load signed byte */
4500
                    gen_address_mask(dc, cpu_addr);
4501
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4502
                    break;
4503
                case 0xa:       /* ldsh, load signed halfword */
4504
                    gen_address_mask(dc, cpu_addr);
4505
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4506
                    break;
4507
                case 0xd:       /* ldstub -- XXX: should be atomically */
4508
                    {
4509
                        TCGv r_const;
4510

    
4511
                        gen_address_mask(dc, cpu_addr);
4512
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4513
                        r_const = tcg_const_tl(0xff);
4514
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4515
                        tcg_temp_free(r_const);
4516
                    }
4517
                    break;
4518
                case 0x0f:      /* swap, swap register with memory. Also
4519
                                   atomically */
4520
                    CHECK_IU_FEATURE(dc, SWAP);
4521
                    gen_movl_reg_TN(rd, cpu_val);
4522
                    gen_address_mask(dc, cpu_addr);
4523
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4524
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4525
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4526
                    break;
4527
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4528
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4529
#ifndef TARGET_SPARC64
4530
                    if (IS_IMM)
4531
                        goto illegal_insn;
4532
                    if (!supervisor(dc))
4533
                        goto priv_insn;
4534
#endif
4535
                    save_state(dc, cpu_cond);
4536
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4537
                    break;
4538
                case 0x11:      /* lduba, load unsigned byte alternate */
4539
#ifndef TARGET_SPARC64
4540
                    if (IS_IMM)
4541
                        goto illegal_insn;
4542
                    if (!supervisor(dc))
4543
                        goto priv_insn;
4544
#endif
4545
                    save_state(dc, cpu_cond);
4546
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4547
                    break;
4548
                case 0x12:      /* lduha, load unsigned halfword alternate */
4549
#ifndef TARGET_SPARC64
4550
                    if (IS_IMM)
4551
                        goto illegal_insn;
4552
                    if (!supervisor(dc))
4553
                        goto priv_insn;
4554
#endif
4555
                    save_state(dc, cpu_cond);
4556
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4557
                    break;
4558
                case 0x13:      /* ldda, load double word alternate */
4559
#ifndef TARGET_SPARC64
4560
                    if (IS_IMM)
4561
                        goto illegal_insn;
4562
                    if (!supervisor(dc))
4563
                        goto priv_insn;
4564
#endif
4565
                    if (rd & 1)
4566
                        goto illegal_insn;
4567
                    save_state(dc, cpu_cond);
4568
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4569
                    goto skip_move;
4570
                case 0x19:      /* ldsba, load signed byte alternate */
4571
#ifndef TARGET_SPARC64
4572
                    if (IS_IMM)
4573
                        goto illegal_insn;
4574
                    if (!supervisor(dc))
4575
                        goto priv_insn;
4576
#endif
4577
                    save_state(dc, cpu_cond);
4578
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4579
                    break;
4580
                case 0x1a:      /* ldsha, load signed halfword alternate */
4581
#ifndef TARGET_SPARC64
4582
                    if (IS_IMM)
4583
                        goto illegal_insn;
4584
                    if (!supervisor(dc))
4585
                        goto priv_insn;
4586
#endif
4587
                    save_state(dc, cpu_cond);
4588
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4589
                    break;
4590
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4591
#ifndef TARGET_SPARC64
4592
                    if (IS_IMM)
4593
                        goto illegal_insn;
4594
                    if (!supervisor(dc))
4595
                        goto priv_insn;
4596
#endif
4597
                    save_state(dc, cpu_cond);
4598
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4599
                    break;
4600
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4601
                                   atomically */
4602
                    CHECK_IU_FEATURE(dc, SWAP);
4603
#ifndef TARGET_SPARC64
4604
                    if (IS_IMM)
4605
                        goto illegal_insn;
4606
                    if (!supervisor(dc))
4607
                        goto priv_insn;
4608
#endif
4609
                    save_state(dc, cpu_cond);
4610
                    gen_movl_reg_TN(rd, cpu_val);
4611
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4612
                    break;
4613

    
4614
#ifndef TARGET_SPARC64
4615
                case 0x30: /* ldc */
4616
                case 0x31: /* ldcsr */
4617
                case 0x33: /* lddc */
4618
                    goto ncp_insn;
4619
#endif
4620
#endif
4621
#ifdef TARGET_SPARC64
4622
                case 0x08: /* V9 ldsw */
4623
                    gen_address_mask(dc, cpu_addr);
4624
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4625
                    break;
4626
                case 0x0b: /* V9 ldx */
4627
                    gen_address_mask(dc, cpu_addr);
4628
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4629
                    break;
4630
                case 0x18: /* V9 ldswa */
4631
                    save_state(dc, cpu_cond);
4632
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4633
                    break;
4634
                case 0x1b: /* V9 ldxa */
4635
                    save_state(dc, cpu_cond);
4636
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4637
                    break;
4638
                case 0x2d: /* V9 prefetch, no effect */
4639
                    goto skip_move;
4640
                case 0x30: /* V9 ldfa */
4641
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4642
                        goto jmp_insn;
4643
                    }
4644
                    save_state(dc, cpu_cond);
4645
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4646
                    gen_update_fprs_dirty(rd);
4647
                    goto skip_move;
4648
                case 0x33: /* V9 lddfa */
4649
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4650
                        goto jmp_insn;
4651
                    }
4652
                    save_state(dc, cpu_cond);
4653
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4654
                    gen_update_fprs_dirty(DFPREG(rd));
4655
                    goto skip_move;
4656
                case 0x3d: /* V9 prefetcha, no effect */
4657
                    goto skip_move;
4658
                case 0x32: /* V9 ldqfa */
4659
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4660
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4661
                        goto jmp_insn;
4662
                    }
4663
                    save_state(dc, cpu_cond);
4664
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4665
                    gen_update_fprs_dirty(QFPREG(rd));
4666
                    goto skip_move;
4667
#endif
4668
                default:
4669
                    goto illegal_insn;
4670
                }
4671
                gen_movl_TN_reg(rd, cpu_val);
4672
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4673
            skip_move: ;
4674
#endif
4675
            } else if (xop >= 0x20 && xop < 0x24) {
4676
                if (gen_trap_ifnofpu(dc, cpu_cond))
4677
                    goto jmp_insn;
4678
                save_state(dc, cpu_cond);
4679
                switch (xop) {
4680
                case 0x20:      /* ldf, load fpreg */
4681
                    gen_address_mask(dc, cpu_addr);
4682
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4683
                    cpu_dst_32 = gen_dest_fpr_F();
4684
                    tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4685
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4686
                    break;
4687
                case 0x21:      /* ldfsr, V9 ldxfsr */
4688
#ifdef TARGET_SPARC64
4689
                    gen_address_mask(dc, cpu_addr);
4690
                    if (rd == 1) {
4691
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4692
                        gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4693
                    } else {
4694
                        tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4695
                        tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4696
                        gen_helper_ldfsr(cpu_env, cpu_tmp32);
4697
                    }
4698
#else
4699
                    {
4700
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4701
                        gen_helper_ldfsr(cpu_env, cpu_tmp32);
4702
                    }
4703
#endif
4704
                    break;
4705
                case 0x22:      /* ldqf, load quad fpreg */
4706
                    {
4707
                        TCGv_i32 r_const;
4708

    
4709
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4710
                        r_const = tcg_const_i32(dc->mem_idx);
4711
                        gen_address_mask(dc, cpu_addr);
4712
                        gen_helper_ldqf(cpu_addr, r_const);
4713
                        tcg_temp_free_i32(r_const);
4714
                        gen_op_store_QT0_fpr(QFPREG(rd));
4715
                        gen_update_fprs_dirty(QFPREG(rd));
4716
                    }
4717
                    break;
4718
                case 0x23:      /* lddf, load double fpreg */
4719
                    gen_address_mask(dc, cpu_addr);
4720
                    cpu_dst_64 = gen_dest_fpr_D();
4721
                    tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4722
                    gen_store_fpr_D(dc, rd, cpu_dst_64);
4723
                    break;
4724
                default:
4725
                    goto illegal_insn;
4726
                }
4727
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4728
                       xop == 0xe || xop == 0x1e) {
4729
                gen_movl_reg_TN(rd, cpu_val);
4730
                switch (xop) {
4731
                case 0x4: /* st, store word */
4732
                    gen_address_mask(dc, cpu_addr);
4733
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4734
                    break;
4735
                case 0x5: /* stb, store byte */
4736
                    gen_address_mask(dc, cpu_addr);
4737
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4738
                    break;
4739
                case 0x6: /* sth, store halfword */
4740
                    gen_address_mask(dc, cpu_addr);
4741
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4742
                    break;
4743
                case 0x7: /* std, store double word */
4744
                    if (rd & 1)
4745
                        goto illegal_insn;
4746
                    else {
4747
                        TCGv_i32 r_const;
4748

    
4749
                        save_state(dc, cpu_cond);
4750
                        gen_address_mask(dc, cpu_addr);
4751
                        r_const = tcg_const_i32(7);
4752
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4753
                        tcg_temp_free_i32(r_const);
4754
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4755
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4756
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4757
                    }
4758
                    break;
4759
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4760
                case 0x14: /* sta, V9 stwa, store word alternate */
4761
#ifndef TARGET_SPARC64
4762
                    if (IS_IMM)
4763
                        goto illegal_insn;
4764
                    if (!supervisor(dc))
4765
                        goto priv_insn;
4766
#endif
4767
                    save_state(dc, cpu_cond);
4768
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4769
                    dc->npc = DYNAMIC_PC;
4770
                    break;
4771
                case 0x15: /* stba, store byte alternate */
4772
#ifndef TARGET_SPARC64
4773
                    if (IS_IMM)
4774
                        goto illegal_insn;
4775
                    if (!supervisor(dc))
4776
                        goto priv_insn;
4777
#endif
4778
                    save_state(dc, cpu_cond);
4779
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4780
                    dc->npc = DYNAMIC_PC;
4781
                    break;
4782
                case 0x16: /* stha, store halfword alternate */
4783
#ifndef TARGET_SPARC64
4784
                    if (IS_IMM)
4785
                        goto illegal_insn;
4786
                    if (!supervisor(dc))
4787
                        goto priv_insn;
4788
#endif
4789
                    save_state(dc, cpu_cond);
4790
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4791
                    dc->npc = DYNAMIC_PC;
4792
                    break;
4793
                case 0x17: /* stda, store double word alternate */
4794
#ifndef TARGET_SPARC64
4795
                    if (IS_IMM)
4796
                        goto illegal_insn;
4797
                    if (!supervisor(dc))
4798
                        goto priv_insn;
4799
#endif
4800
                    if (rd & 1)
4801
                        goto illegal_insn;
4802
                    else {
4803
                        save_state(dc, cpu_cond);
4804
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4805
                    }
4806
                    break;
4807
#endif
4808
#ifdef TARGET_SPARC64
4809
                case 0x0e: /* V9 stx */
4810
                    gen_address_mask(dc, cpu_addr);
4811
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4812
                    break;
4813
                case 0x1e: /* V9 stxa */
4814
                    save_state(dc, cpu_cond);
4815
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4816
                    dc->npc = DYNAMIC_PC;
4817
                    break;
4818
#endif
4819
                default:
4820
                    goto illegal_insn;
4821
                }
4822
            } else if (xop > 0x23 && xop < 0x28) {
4823
                if (gen_trap_ifnofpu(dc, cpu_cond))
4824
                    goto jmp_insn;
4825
                save_state(dc, cpu_cond);
4826
                switch (xop) {
4827
                case 0x24: /* stf, store fpreg */
4828
                    gen_address_mask(dc, cpu_addr);
4829
                    cpu_src1_32 = gen_load_fpr_F(dc, rd);
4830
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
4831
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4832
                    break;
4833
                case 0x25: /* stfsr, V9 stxfsr */
4834
#ifdef TARGET_SPARC64
4835
                    gen_address_mask(dc, cpu_addr);
4836
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4837
                    if (rd == 1)
4838
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4839
                    else
4840
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4841
#else
4842
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4843
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4844
#endif
4845
                    break;
4846
                case 0x26:
4847
#ifdef TARGET_SPARC64
4848
                    /* V9 stqf, store quad fpreg */
4849
                    {
4850
                        TCGv_i32 r_const;
4851

    
4852
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4853
                        gen_op_load_fpr_QT0(QFPREG(rd));
4854
                        r_const = tcg_const_i32(dc->mem_idx);
4855
                        gen_address_mask(dc, cpu_addr);
4856
                        gen_helper_stqf(cpu_addr, r_const);
4857
                        tcg_temp_free_i32(r_const);
4858
                    }
4859
                    break;
4860
#else /* !TARGET_SPARC64 */
4861
                    /* stdfq, store floating point queue */
4862
#if defined(CONFIG_USER_ONLY)
4863
                    goto illegal_insn;
4864
#else
4865
                    if (!supervisor(dc))
4866
                        goto priv_insn;
4867
                    if (gen_trap_ifnofpu(dc, cpu_cond))
4868
                        goto jmp_insn;
4869
                    goto nfq_insn;
4870
#endif
4871
#endif
4872
                case 0x27: /* stdf, store double fpreg */
4873
                    gen_address_mask(dc, cpu_addr);
4874
                    cpu_src1_64 = gen_load_fpr_D(dc, rd);
4875
                    tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
4876
                    break;
4877
                default:
4878
                    goto illegal_insn;
4879
                }
4880
            } else if (xop > 0x33 && xop < 0x3f) {
4881
                save_state(dc, cpu_cond);
4882
                switch (xop) {
4883
#ifdef TARGET_SPARC64
4884
                case 0x34: /* V9 stfa */
4885
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4886
                        goto jmp_insn;
4887
                    }
4888
                    gen_stf_asi(cpu_addr, insn, 4, rd);
4889
                    break;
4890
                case 0x36: /* V9 stqfa */
4891
                    {
4892
                        TCGv_i32 r_const;
4893

    
4894
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4895
                        if (gen_trap_ifnofpu(dc, cpu_cond)) {
4896
                            goto jmp_insn;
4897
                        }
4898
                        r_const = tcg_const_i32(7);
4899
                        gen_helper_check_align(cpu_addr, r_const);
4900
                        tcg_temp_free_i32(r_const);
4901
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4902
                    }
4903
                    break;
4904
                case 0x37: /* V9 stdfa */
4905
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4906
                        goto jmp_insn;
4907
                    }
4908
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4909
                    break;
4910
                case 0x3c: /* V9 casa */
4911
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4912
                    gen_movl_TN_reg(rd, cpu_val);
4913
                    break;
4914
                case 0x3e: /* V9 casxa */
4915
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4916
                    gen_movl_TN_reg(rd, cpu_val);
4917
                    break;
4918
#else
4919
                case 0x34: /* stc */
4920
                case 0x35: /* stcsr */
4921
                case 0x36: /* stdcq */
4922
                case 0x37: /* stdc */
4923
                    goto ncp_insn;
4924
#endif
4925
                default:
4926
                    goto illegal_insn;
4927
                }
4928
            } else
4929
                goto illegal_insn;
4930
        }
4931
        break;
4932
    }
4933
    /* default case for non jump instructions */
4934
    if (dc->npc == DYNAMIC_PC) {
4935
        dc->pc = DYNAMIC_PC;
4936
        gen_op_next_insn();
4937
    } else if (dc->npc == JUMP_PC) {
4938
        /* we can do a static jump */
4939
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4940
        dc->is_br = 1;
4941
    } else {
4942
        dc->pc = dc->npc;
4943
        dc->npc = dc->npc + 4;
4944
    }
4945
 jmp_insn:
4946
    goto egress;
4947
 illegal_insn:
4948
    {
4949
        TCGv_i32 r_const;
4950

    
4951
        save_state(dc, cpu_cond);
4952
        r_const = tcg_const_i32(TT_ILL_INSN);
4953
        gen_helper_raise_exception(cpu_env, r_const);
4954
        tcg_temp_free_i32(r_const);
4955
        dc->is_br = 1;
4956
    }
4957
    goto egress;
4958
 unimp_flush:
4959
    {
4960
        TCGv_i32 r_const;
4961

    
4962
        save_state(dc, cpu_cond);
4963
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4964
        gen_helper_raise_exception(cpu_env, r_const);
4965
        tcg_temp_free_i32(r_const);
4966
        dc->is_br = 1;
4967
    }
4968
    goto egress;
4969
#if !defined(CONFIG_USER_ONLY)
4970
 priv_insn:
4971
    {
4972
        TCGv_i32 r_const;
4973

    
4974
        save_state(dc, cpu_cond);
4975
        r_const = tcg_const_i32(TT_PRIV_INSN);
4976
        gen_helper_raise_exception(cpu_env, r_const);
4977
        tcg_temp_free_i32(r_const);
4978
        dc->is_br = 1;
4979
    }
4980
    goto egress;
4981
#endif
4982
 nfpu_insn:
4983
    save_state(dc, cpu_cond);
4984
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4985
    dc->is_br = 1;
4986
    goto egress;
4987
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4988
 nfq_insn:
4989
    save_state(dc, cpu_cond);
4990
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4991
    dc->is_br = 1;
4992
    goto egress;
4993
#endif
4994
#ifndef TARGET_SPARC64
4995
 ncp_insn:
4996
    {
4997
        TCGv r_const;
4998

    
4999
        save_state(dc, cpu_cond);
5000
        r_const = tcg_const_i32(TT_NCP_INSN);
5001
        gen_helper_raise_exception(cpu_env, r_const);
5002
        tcg_temp_free(r_const);
5003
        dc->is_br = 1;
5004
    }
5005
    goto egress;
5006
#endif
5007
 egress:
5008
    tcg_temp_free(cpu_tmp1);
5009
    tcg_temp_free(cpu_tmp2);
5010
    if (dc->n_t32 != 0) {
5011
        int i;
5012
        for (i = dc->n_t32 - 1; i >= 0; --i) {
5013
            tcg_temp_free_i32(dc->t32[i]);
5014
        }
5015
        dc->n_t32 = 0;
5016
    }
5017
}
5018

    
5019
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5020
                                                  int spc, CPUSPARCState *env)
5021
{
5022
    target_ulong pc_start, last_pc;
5023
    uint16_t *gen_opc_end;
5024
    DisasContext dc1, *dc = &dc1;
5025
    CPUBreakpoint *bp;
5026
    int j, lj = -1;
5027
    int num_insns;
5028
    int max_insns;
5029

    
5030
    memset(dc, 0, sizeof(DisasContext));
5031
    dc->tb = tb;
5032
    pc_start = tb->pc;
5033
    dc->pc = pc_start;
5034
    last_pc = dc->pc;
5035
    dc->npc = (target_ulong) tb->cs_base;
5036
    dc->cc_op = CC_OP_DYNAMIC;
5037
    dc->mem_idx = cpu_mmu_index(env);
5038
    dc->def = env->def;
5039
    dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5040
    dc->address_mask_32bit = tb_am_enabled(tb->flags);
5041
    dc->singlestep = (env->singlestep_enabled || singlestep);
5042
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5043

    
5044
    cpu_tmp0 = tcg_temp_new();
5045
    cpu_tmp32 = tcg_temp_new_i32();
5046
    cpu_tmp64 = tcg_temp_new_i64();
5047

    
5048
    cpu_dst = tcg_temp_local_new();
5049

    
5050
    // loads and stores
5051
    cpu_val = tcg_temp_local_new();
5052
    cpu_addr = tcg_temp_local_new();
5053

    
5054
    num_insns = 0;
5055
    max_insns = tb->cflags & CF_COUNT_MASK;
5056
    if (max_insns == 0)
5057
        max_insns = CF_COUNT_MASK;
5058
    gen_icount_start();
5059
    do {
5060
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5061
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5062
                if (bp->pc == dc->pc) {
5063
                    if (dc->pc != pc_start)
5064
                        save_state(dc, cpu_cond);
5065
                    gen_helper_debug(cpu_env);
5066
                    tcg_gen_exit_tb(0);
5067
                    dc->is_br = 1;
5068
                    goto exit_gen_loop;
5069
                }
5070
            }
5071
        }
5072
        if (spc) {
5073
            qemu_log("Search PC...\n");
5074
            j = gen_opc_ptr - gen_opc_buf;
5075
            if (lj < j) {
5076
                lj++;
5077
                while (lj < j)
5078
                    gen_opc_instr_start[lj++] = 0;
5079
                gen_opc_pc[lj] = dc->pc;
5080
                gen_opc_npc[lj] = dc->npc;
5081
                gen_opc_instr_start[lj] = 1;
5082
                gen_opc_icount[lj] = num_insns;
5083
            }
5084
        }
5085
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5086
            gen_io_start();
5087
        last_pc = dc->pc;
5088
        disas_sparc_insn(dc);
5089
        num_insns++;
5090

    
5091
        if (dc->is_br)
5092
            break;
5093
        /* if the next PC is different, we abort now */
5094
        if (dc->pc != (last_pc + 4))
5095
            break;
5096
        /* if we reach a page boundary, we stop generation so that the
5097
           PC of a TT_TFAULT exception is always in the right page */
5098
        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5099
            break;
5100
        /* if single step mode, we generate only one instruction and
5101
           generate an exception */
5102
        if (dc->singlestep) {
5103
            break;
5104
        }
5105
    } while ((gen_opc_ptr < gen_opc_end) &&
5106
             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5107
             num_insns < max_insns);
5108

    
5109
 exit_gen_loop:
5110
    tcg_temp_free(cpu_addr);
5111
    tcg_temp_free(cpu_val);
5112
    tcg_temp_free(cpu_dst);
5113
    tcg_temp_free_i64(cpu_tmp64);
5114
    tcg_temp_free_i32(cpu_tmp32);
5115
    tcg_temp_free(cpu_tmp0);
5116

    
5117
    if (tb->cflags & CF_LAST_IO)
5118
        gen_io_end();
5119
    if (!dc->is_br) {
5120
        if (dc->pc != DYNAMIC_PC &&
5121
            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5122
            /* static PC and NPC: we can use direct chaining */
5123
            gen_goto_tb(dc, 0, dc->pc, dc->npc);
5124
        } else {
5125
            if (dc->pc != DYNAMIC_PC)
5126
                tcg_gen_movi_tl(cpu_pc, dc->pc);
5127
            save_npc(dc, cpu_cond);
5128
            tcg_gen_exit_tb(0);
5129
        }
5130
    }
5131
    gen_icount_end(tb, num_insns);
5132
    *gen_opc_ptr = INDEX_op_end;
5133
    if (spc) {
5134
        j = gen_opc_ptr - gen_opc_buf;
5135
        lj++;
5136
        while (lj <= j)
5137
            gen_opc_instr_start[lj++] = 0;
5138
#if 0
5139
        log_page_dump();
5140
#endif
5141
        gen_opc_jump_pc[0] = dc->jump_pc[0];
5142
        gen_opc_jump_pc[1] = dc->jump_pc[1];
5143
    } else {
5144
        tb->size = last_pc + 4 - pc_start;
5145
        tb->icount = num_insns;
5146
    }
5147
#ifdef DEBUG_DISAS
5148
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5149
        qemu_log("--------------\n");
5150
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
5151
        log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5152
        qemu_log("\n");
5153
    }
5154
#endif
5155
}
5156

    
5157
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5158
{
5159
    gen_intermediate_code_internal(tb, 0, env);
5160
}
5161

    
5162
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5163
{
5164
    gen_intermediate_code_internal(tb, 1, env);
5165
}
5166

    
5167
void gen_intermediate_code_init(CPUSPARCState *env)
5168
{
5169
    unsigned int i;
5170
    static int inited;
5171
    static const char * const gregnames[8] = {
5172
        NULL, // g0 not used
5173
        "g1",
5174
        "g2",
5175
        "g3",
5176
        "g4",
5177
        "g5",
5178
        "g6",
5179
        "g7",
5180
    };
5181
    static const char * const fregnames[32] = {
5182
        "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5183
        "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5184
        "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5185
        "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5186
    };
5187

    
5188
    /* init various static tables */
5189
    if (!inited) {
5190
        inited = 1;
5191

    
5192
        cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5193
        cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5194
                                             offsetof(CPUState, regwptr),
5195
                                             "regwptr");
5196
#ifdef TARGET_SPARC64
5197
        cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5198
                                         "xcc");
5199
        cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5200
                                         "asi");
5201
        cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5202
                                          "fprs");
5203
        cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5204
                                     "gsr");
5205
        cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5206
                                           offsetof(CPUState, tick_cmpr),
5207
                                           "tick_cmpr");
5208
        cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5209
                                            offsetof(CPUState, stick_cmpr),
5210
                                            "stick_cmpr");
5211
        cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5212
                                             offsetof(CPUState, hstick_cmpr),
5213
                                             "hstick_cmpr");
5214
        cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5215
                                       "hintp");
5216
        cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5217
                                      "htba");
5218
        cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5219
                                      "hver");
5220
        cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5221
                                     offsetof(CPUState, ssr), "ssr");
5222
        cpu_ver = tcg_global_mem_new(TCG_AREG0,
5223
                                     offsetof(CPUState, version), "ver");
5224
        cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5225
                                             offsetof(CPUState, softint),
5226
                                             "softint");
5227
#else
5228
        cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5229
                                     "wim");
5230
#endif
5231
        cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5232
                                      "cond");
5233
        cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5234
                                        "cc_src");
5235
        cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5236
                                         offsetof(CPUState, cc_src2),
5237
                                         "cc_src2");
5238
        cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5239
                                        "cc_dst");
5240
        cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5241
                                           "cc_op");
5242
        cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5243
                                         "psr");
5244
        cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5245
                                     "fsr");
5246
        cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5247
                                    "pc");
5248
        cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5249
                                     "npc");
5250
        cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5251
#ifndef CONFIG_USER_ONLY
5252
        cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5253
                                     "tbr");
5254
#endif
5255
        for (i = 1; i < 8; i++) {
5256
            cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5257
                                              offsetof(CPUState, gregs[i]),
5258
                                              gregnames[i]);
5259
        }
5260
        for (i = 0; i < TARGET_DPREGS; i++) {
5261
            cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5262
                                                offsetof(CPUState, fpr[i]),
5263
                                                fregnames[i]);
5264
        }
5265

    
5266
        /* register helpers */
5267

    
5268
#define GEN_HELPER 2
5269
#include "helper.h"
5270
    }
5271
}
5272

    
5273
void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5274
{
5275
    target_ulong npc;
5276
    env->pc = gen_opc_pc[pc_pos];
5277
    npc = gen_opc_npc[pc_pos];
5278
    if (npc == 1) {
5279
        /* dynamic NPC: already stored */
5280
    } else if (npc == 2) {
5281
        /* jump PC: use 'cond' and the jump targets of the translation */
5282
        if (env->cond) {
5283
            env->npc = gen_opc_jump_pc[0];
5284
        } else {
5285
            env->npc = gen_opc_jump_pc[1];
5286
        }
5287
    } else {
5288
        env->npc = npc;
5289
    }
5290

    
5291
    /* flush pending conditional evaluations before exposing cpu state */
5292
    if (CC_OP != CC_OP_FLAGS) {
5293
        helper_compute_psr(env);
5294
    }
5295
}