Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ 6c073553

History | View | Annotate | Download (196.1 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20

    
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26

    
27
#include "cpu.h"
28
#include "disas.h"
29
#include "helper.h"
30
#include "tcg-op.h"
31

    
32
#define GEN_HELPER 1
33
#include "helper.h"
34

    
35
#define DEBUG_DISAS
36

    
37
#define DYNAMIC_PC  1 /* dynamic pc value */
38
#define JUMP_PC     2 /* dynamic pc value which takes only two values
39
                         according to jump_pc[T2] */
40

    
41
/* global register indexes */
42
static TCGv_ptr cpu_env, cpu_regwptr;
43
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44
static TCGv_i32 cpu_cc_op;
45
static TCGv_i32 cpu_psr;
46
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47
static TCGv cpu_y;
48
#ifndef CONFIG_USER_ONLY
49
static TCGv cpu_tbr;
50
#endif
51
static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52
#ifdef TARGET_SPARC64
53
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54
static TCGv cpu_gsr;
55
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57
static TCGv_i32 cpu_softint;
58
#else
59
static TCGv cpu_wim;
60
#endif
61
/* local register indexes (only used inside old micro ops) */
62
static TCGv cpu_tmp0;
63
static TCGv_i32 cpu_tmp32;
64
static TCGv_i64 cpu_tmp64;
65
/* Floating point registers */
66
static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67

    
68
static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69
static target_ulong gen_opc_jump_pc[2];
70

    
71
#include "gen-icount.h"
72

    
73
typedef struct DisasContext {
74
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
75
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77
    int is_br;
78
    int mem_idx;
79
    int fpu_enabled;
80
    int address_mask_32bit;
81
    int singlestep;
82
    uint32_t cc_op;  /* current CC operation */
83
    struct TranslationBlock *tb;
84
    sparc_def_t *def;
85
    TCGv_i32 t32[3];
86
    int n_t32;
87
} DisasContext;
88

    
89
// This function uses non-native bit order
90
#define GET_FIELD(X, FROM, TO)                                  \
91
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92

    
93
// This function uses the order in the manuals, i.e. bit 0 is 2^0
94
#define GET_FIELD_SP(X, FROM, TO)               \
95
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
96

    
97
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99

    
100
#ifdef TARGET_SPARC64
101
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103
#else
104
#define DFPREG(r) (r & 0x1e)
105
#define QFPREG(r) (r & 0x1c)
106
#endif
107

    
108
#define UA2005_HTRAP_MASK 0xff
109
#define V8_TRAP_MASK 0x7f
110

    
111
static int sign_extend(int x, int len)
112
{
113
    len = 32 - len;
114
    return (x << len) >> len;
115
}
116

    
117
#define IS_IMM (insn & (1<<13))
118

    
119
static inline void gen_update_fprs_dirty(int rd)
120
{
121
#if defined(TARGET_SPARC64)
122
    tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
123
#endif
124
}
125

    
126
/* floating point registers moves */
127
static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
128
{
129
#if TCG_TARGET_REG_BITS == 32
130
    if (src & 1) {
131
        return TCGV_LOW(cpu_fpr[src / 2]);
132
    } else {
133
        return TCGV_HIGH(cpu_fpr[src / 2]);
134
    }
135
#else
136
    if (src & 1) {
137
        return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
138
    } else {
139
        TCGv_i32 ret = tcg_temp_local_new_i32();
140
        TCGv_i64 t = tcg_temp_new_i64();
141

    
142
        tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
143
        tcg_gen_trunc_i64_i32(ret, t);
144
        tcg_temp_free_i64(t);
145

    
146
        dc->t32[dc->n_t32++] = ret;
147
        assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
148

    
149
        return ret;
150
    }
151
#endif
152
}
153

    
154
static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
155
{
156
#if TCG_TARGET_REG_BITS == 32
157
    if (dst & 1) {
158
        tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
159
    } else {
160
        tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
161
    }
162
#else
163
    TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
164
    tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
165
                        (dst & 1 ? 0 : 32), 32);
166
#endif
167
    gen_update_fprs_dirty(dst);
168
}
169

    
170
static TCGv_i32 gen_dest_fpr_F(void)
171
{
172
    return cpu_tmp32;
173
}
174

    
175
static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
176
{
177
    src = DFPREG(src);
178
    return cpu_fpr[src / 2];
179
}
180

    
181
static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
182
{
183
    dst = DFPREG(dst);
184
    tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
185
    gen_update_fprs_dirty(dst);
186
}
187

    
188
static TCGv_i64 gen_dest_fpr_D(void)
189
{
190
    return cpu_tmp64;
191
}
192

    
193
static void gen_op_load_fpr_QT0(unsigned int src)
194
{
195
    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
196
                   offsetof(CPU_QuadU, ll.upper));
197
    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
198
                   offsetof(CPU_QuadU, ll.lower));
199
}
200

    
201
static void gen_op_load_fpr_QT1(unsigned int src)
202
{
203
    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
204
                   offsetof(CPU_QuadU, ll.upper));
205
    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
206
                   offsetof(CPU_QuadU, ll.lower));
207
}
208

    
209
static void gen_op_store_QT0_fpr(unsigned int dst)
210
{
211
    tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
212
                   offsetof(CPU_QuadU, ll.upper));
213
    tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
214
                   offsetof(CPU_QuadU, ll.lower));
215
}
216

    
217
#ifdef TARGET_SPARC64
218
static void gen_move_Q(unsigned int rd, unsigned int rs)
219
{
220
    rd = QFPREG(rd);
221
    rs = QFPREG(rs);
222

    
223
    tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
224
    tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
225
    gen_update_fprs_dirty(rd);
226
}
227
#endif
228

    
229
/* moves */
230
#ifdef CONFIG_USER_ONLY
231
#define supervisor(dc) 0
232
#ifdef TARGET_SPARC64
233
#define hypervisor(dc) 0
234
#endif
235
#else
236
#define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237
#ifdef TARGET_SPARC64
238
#define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
239
#else
240
#endif
241
#endif
242

    
243
#ifdef TARGET_SPARC64
244
#ifndef TARGET_ABI32
245
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
246
#else
247
#define AM_CHECK(dc) (1)
248
#endif
249
#endif
250

    
251
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
252
{
253
#ifdef TARGET_SPARC64
254
    if (AM_CHECK(dc))
255
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
256
#endif
257
}
258

    
259
static inline void gen_movl_reg_TN(int reg, TCGv tn)
260
{
261
    if (reg == 0)
262
        tcg_gen_movi_tl(tn, 0);
263
    else if (reg < 8)
264
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
265
    else {
266
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
267
    }
268
}
269

    
270
static inline void gen_movl_TN_reg(int reg, TCGv tn)
271
{
272
    if (reg == 0)
273
        return;
274
    else if (reg < 8)
275
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
276
    else {
277
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
278
    }
279
}
280

    
281
static inline void gen_goto_tb(DisasContext *s, int tb_num,
282
                               target_ulong pc, target_ulong npc)
283
{
284
    TranslationBlock *tb;
285

    
286
    tb = s->tb;
287
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
288
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
289
        !s->singlestep)  {
290
        /* jump to same page: we can use a direct jump */
291
        tcg_gen_goto_tb(tb_num);
292
        tcg_gen_movi_tl(cpu_pc, pc);
293
        tcg_gen_movi_tl(cpu_npc, npc);
294
        tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
295
    } else {
296
        /* jump to another page: currently not optimized */
297
        tcg_gen_movi_tl(cpu_pc, pc);
298
        tcg_gen_movi_tl(cpu_npc, npc);
299
        tcg_gen_exit_tb(0);
300
    }
301
}
302

    
303
// XXX suboptimal
304
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
305
{
306
    tcg_gen_extu_i32_tl(reg, src);
307
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
308
    tcg_gen_andi_tl(reg, reg, 0x1);
309
}
310

    
311
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
312
{
313
    tcg_gen_extu_i32_tl(reg, src);
314
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
315
    tcg_gen_andi_tl(reg, reg, 0x1);
316
}
317

    
318
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
319
{
320
    tcg_gen_extu_i32_tl(reg, src);
321
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
322
    tcg_gen_andi_tl(reg, reg, 0x1);
323
}
324

    
325
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
326
{
327
    tcg_gen_extu_i32_tl(reg, src);
328
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
329
    tcg_gen_andi_tl(reg, reg, 0x1);
330
}
331

    
332
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
333
{
334
    TCGv r_temp;
335
    TCGv_i32 r_const;
336
    int l1;
337

    
338
    l1 = gen_new_label();
339

    
340
    r_temp = tcg_temp_new();
341
    tcg_gen_xor_tl(r_temp, src1, src2);
342
    tcg_gen_not_tl(r_temp, r_temp);
343
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
344
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
345
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
346
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
347
    r_const = tcg_const_i32(TT_TOVF);
348
    gen_helper_raise_exception(cpu_env, r_const);
349
    tcg_temp_free_i32(r_const);
350
    gen_set_label(l1);
351
    tcg_temp_free(r_temp);
352
}
353

    
354
static inline void gen_tag_tv(TCGv src1, TCGv src2)
355
{
356
    int l1;
357
    TCGv_i32 r_const;
358

    
359
    l1 = gen_new_label();
360
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
361
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
362
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
363
    r_const = tcg_const_i32(TT_TOVF);
364
    gen_helper_raise_exception(cpu_env, r_const);
365
    tcg_temp_free_i32(r_const);
366
    gen_set_label(l1);
367
}
368

    
369
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
370
{
371
    tcg_gen_mov_tl(cpu_cc_src, src1);
372
    tcg_gen_movi_tl(cpu_cc_src2, src2);
373
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
374
    tcg_gen_mov_tl(dst, cpu_cc_dst);
375
}
376

    
377
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
378
{
379
    tcg_gen_mov_tl(cpu_cc_src, src1);
380
    tcg_gen_mov_tl(cpu_cc_src2, src2);
381
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
382
    tcg_gen_mov_tl(dst, cpu_cc_dst);
383
}
384

    
385
static TCGv_i32 gen_add32_carry32(void)
386
{
387
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
388

    
389
    /* Carry is computed from a previous add: (dst < src)  */
390
#if TARGET_LONG_BITS == 64
391
    cc_src1_32 = tcg_temp_new_i32();
392
    cc_src2_32 = tcg_temp_new_i32();
393
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
394
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
395
#else
396
    cc_src1_32 = cpu_cc_dst;
397
    cc_src2_32 = cpu_cc_src;
398
#endif
399

    
400
    carry_32 = tcg_temp_new_i32();
401
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
402

    
403
#if TARGET_LONG_BITS == 64
404
    tcg_temp_free_i32(cc_src1_32);
405
    tcg_temp_free_i32(cc_src2_32);
406
#endif
407

    
408
    return carry_32;
409
}
410

    
411
static TCGv_i32 gen_sub32_carry32(void)
412
{
413
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
414

    
415
    /* Carry is computed from a previous borrow: (src1 < src2)  */
416
#if TARGET_LONG_BITS == 64
417
    cc_src1_32 = tcg_temp_new_i32();
418
    cc_src2_32 = tcg_temp_new_i32();
419
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
420
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
421
#else
422
    cc_src1_32 = cpu_cc_src;
423
    cc_src2_32 = cpu_cc_src2;
424
#endif
425

    
426
    carry_32 = tcg_temp_new_i32();
427
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
428

    
429
#if TARGET_LONG_BITS == 64
430
    tcg_temp_free_i32(cc_src1_32);
431
    tcg_temp_free_i32(cc_src2_32);
432
#endif
433

    
434
    return carry_32;
435
}
436

    
437
static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
438
                            TCGv src2, int update_cc)
439
{
440
    TCGv_i32 carry_32;
441
    TCGv carry;
442

    
443
    switch (dc->cc_op) {
444
    case CC_OP_DIV:
445
    case CC_OP_LOGIC:
446
        /* Carry is known to be zero.  Fall back to plain ADD.  */
447
        if (update_cc) {
448
            gen_op_add_cc(dst, src1, src2);
449
        } else {
450
            tcg_gen_add_tl(dst, src1, src2);
451
        }
452
        return;
453

    
454
    case CC_OP_ADD:
455
    case CC_OP_TADD:
456
    case CC_OP_TADDTV:
457
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
458
        {
459
            /* For 32-bit hosts, we can re-use the host's hardware carry
460
               generation by using an ADD2 opcode.  We discard the low
461
               part of the output.  Ideally we'd combine this operation
462
               with the add that generated the carry in the first place.  */
463
            TCGv dst_low = tcg_temp_new();
464
            tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
465
                            cpu_cc_src, src1, cpu_cc_src2, src2);
466
            tcg_temp_free(dst_low);
467
            goto add_done;
468
        }
469
#endif
470
        carry_32 = gen_add32_carry32();
471
        break;
472

    
473
    case CC_OP_SUB:
474
    case CC_OP_TSUB:
475
    case CC_OP_TSUBTV:
476
        carry_32 = gen_sub32_carry32();
477
        break;
478

    
479
    default:
480
        /* We need external help to produce the carry.  */
481
        carry_32 = tcg_temp_new_i32();
482
        gen_helper_compute_C_icc(carry_32, cpu_env);
483
        break;
484
    }
485

    
486
#if TARGET_LONG_BITS == 64
487
    carry = tcg_temp_new();
488
    tcg_gen_extu_i32_i64(carry, carry_32);
489
#else
490
    carry = carry_32;
491
#endif
492

    
493
    tcg_gen_add_tl(dst, src1, src2);
494
    tcg_gen_add_tl(dst, dst, carry);
495

    
496
    tcg_temp_free_i32(carry_32);
497
#if TARGET_LONG_BITS == 64
498
    tcg_temp_free(carry);
499
#endif
500

    
501
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
502
 add_done:
503
#endif
504
    if (update_cc) {
505
        tcg_gen_mov_tl(cpu_cc_src, src1);
506
        tcg_gen_mov_tl(cpu_cc_src2, src2);
507
        tcg_gen_mov_tl(cpu_cc_dst, dst);
508
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
509
        dc->cc_op = CC_OP_ADDX;
510
    }
511
}
512

    
513
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
514
{
515
    tcg_gen_mov_tl(cpu_cc_src, src1);
516
    tcg_gen_mov_tl(cpu_cc_src2, src2);
517
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
518
    tcg_gen_mov_tl(dst, cpu_cc_dst);
519
}
520

    
521
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
522
{
523
    tcg_gen_mov_tl(cpu_cc_src, src1);
524
    tcg_gen_mov_tl(cpu_cc_src2, src2);
525
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
526
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
527
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528
    tcg_gen_mov_tl(dst, cpu_cc_dst);
529
}
530

    
531
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
532
{
533
    TCGv r_temp;
534
    TCGv_i32 r_const;
535
    int l1;
536

    
537
    l1 = gen_new_label();
538

    
539
    r_temp = tcg_temp_new();
540
    tcg_gen_xor_tl(r_temp, src1, src2);
541
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
542
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
543
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
544
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
545
    r_const = tcg_const_i32(TT_TOVF);
546
    gen_helper_raise_exception(cpu_env, r_const);
547
    tcg_temp_free_i32(r_const);
548
    gen_set_label(l1);
549
    tcg_temp_free(r_temp);
550
}
551

    
552
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
553
{
554
    tcg_gen_mov_tl(cpu_cc_src, src1);
555
    tcg_gen_movi_tl(cpu_cc_src2, src2);
556
    if (src2 == 0) {
557
        tcg_gen_mov_tl(cpu_cc_dst, src1);
558
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
559
        dc->cc_op = CC_OP_LOGIC;
560
    } else {
561
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
562
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
563
        dc->cc_op = CC_OP_SUB;
564
    }
565
    tcg_gen_mov_tl(dst, cpu_cc_dst);
566
}
567

    
568
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
569
{
570
    tcg_gen_mov_tl(cpu_cc_src, src1);
571
    tcg_gen_mov_tl(cpu_cc_src2, src2);
572
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
573
    tcg_gen_mov_tl(dst, cpu_cc_dst);
574
}
575

    
576
static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
577
                            TCGv src2, int update_cc)
578
{
579
    TCGv_i32 carry_32;
580
    TCGv carry;
581

    
582
    switch (dc->cc_op) {
583
    case CC_OP_DIV:
584
    case CC_OP_LOGIC:
585
        /* Carry is known to be zero.  Fall back to plain SUB.  */
586
        if (update_cc) {
587
            gen_op_sub_cc(dst, src1, src2);
588
        } else {
589
            tcg_gen_sub_tl(dst, src1, src2);
590
        }
591
        return;
592

    
593
    case CC_OP_ADD:
594
    case CC_OP_TADD:
595
    case CC_OP_TADDTV:
596
        carry_32 = gen_add32_carry32();
597
        break;
598

    
599
    case CC_OP_SUB:
600
    case CC_OP_TSUB:
601
    case CC_OP_TSUBTV:
602
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
603
        {
604
            /* For 32-bit hosts, we can re-use the host's hardware carry
605
               generation by using a SUB2 opcode.  We discard the low
606
               part of the output.  Ideally we'd combine this operation
607
               with the add that generated the carry in the first place.  */
608
            TCGv dst_low = tcg_temp_new();
609
            tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
610
                            cpu_cc_src, src1, cpu_cc_src2, src2);
611
            tcg_temp_free(dst_low);
612
            goto sub_done;
613
        }
614
#endif
615
        carry_32 = gen_sub32_carry32();
616
        break;
617

    
618
    default:
619
        /* We need external help to produce the carry.  */
620
        carry_32 = tcg_temp_new_i32();
621
        gen_helper_compute_C_icc(carry_32, cpu_env);
622
        break;
623
    }
624

    
625
#if TARGET_LONG_BITS == 64
626
    carry = tcg_temp_new();
627
    tcg_gen_extu_i32_i64(carry, carry_32);
628
#else
629
    carry = carry_32;
630
#endif
631

    
632
    tcg_gen_sub_tl(dst, src1, src2);
633
    tcg_gen_sub_tl(dst, dst, carry);
634

    
635
    tcg_temp_free_i32(carry_32);
636
#if TARGET_LONG_BITS == 64
637
    tcg_temp_free(carry);
638
#endif
639

    
640
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
641
 sub_done:
642
#endif
643
    if (update_cc) {
644
        tcg_gen_mov_tl(cpu_cc_src, src1);
645
        tcg_gen_mov_tl(cpu_cc_src2, src2);
646
        tcg_gen_mov_tl(cpu_cc_dst, dst);
647
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
648
        dc->cc_op = CC_OP_SUBX;
649
    }
650
}
651

    
652
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
653
{
654
    tcg_gen_mov_tl(cpu_cc_src, src1);
655
    tcg_gen_mov_tl(cpu_cc_src2, src2);
656
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657
    tcg_gen_mov_tl(dst, cpu_cc_dst);
658
}
659

    
660
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
661
{
662
    tcg_gen_mov_tl(cpu_cc_src, src1);
663
    tcg_gen_mov_tl(cpu_cc_src2, src2);
664
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
665
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
666
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667
    tcg_gen_mov_tl(dst, cpu_cc_dst);
668
}
669

    
670
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
671
{
672
    TCGv r_temp;
673
    int l1;
674

    
675
    l1 = gen_new_label();
676
    r_temp = tcg_temp_new();
677

    
678
    /* old op:
679
    if (!(env->y & 1))
680
        T1 = 0;
681
    */
682
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
683
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
684
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
685
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
686
    tcg_gen_movi_tl(cpu_cc_src2, 0);
687
    gen_set_label(l1);
688

    
689
    // b2 = T0 & 1;
690
    // env->y = (b2 << 31) | (env->y >> 1);
691
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
692
    tcg_gen_shli_tl(r_temp, r_temp, 31);
693
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
694
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
695
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
696
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
697

    
698
    // b1 = N ^ V;
699
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
700
    gen_mov_reg_V(r_temp, cpu_psr);
701
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
702
    tcg_temp_free(r_temp);
703

    
704
    // T0 = (b1 << 31) | (T0 >> 1);
705
    // src1 = T0;
706
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
707
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
708
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
709

    
710
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
711

    
712
    tcg_gen_mov_tl(dst, cpu_cc_dst);
713
}
714

    
715
static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
716
{
717
    TCGv_i32 r_src1, r_src2;
718
    TCGv_i64 r_temp, r_temp2;
719

    
720
    r_src1 = tcg_temp_new_i32();
721
    r_src2 = tcg_temp_new_i32();
722

    
723
    tcg_gen_trunc_tl_i32(r_src1, src1);
724
    tcg_gen_trunc_tl_i32(r_src2, src2);
725

    
726
    r_temp = tcg_temp_new_i64();
727
    r_temp2 = tcg_temp_new_i64();
728

    
729
    if (sign_ext) {
730
        tcg_gen_ext_i32_i64(r_temp, r_src2);
731
        tcg_gen_ext_i32_i64(r_temp2, r_src1);
732
    } else {
733
        tcg_gen_extu_i32_i64(r_temp, r_src2);
734
        tcg_gen_extu_i32_i64(r_temp2, r_src1);
735
    }
736

    
737
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
738

    
739
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
740
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
741
    tcg_temp_free_i64(r_temp);
742
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
743

    
744
    tcg_gen_trunc_i64_tl(dst, r_temp2);
745

    
746
    tcg_temp_free_i64(r_temp2);
747

    
748
    tcg_temp_free_i32(r_src1);
749
    tcg_temp_free_i32(r_src2);
750
}
751

    
752
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
753
{
754
    /* zero-extend truncated operands before multiplication */
755
    gen_op_multiply(dst, src1, src2, 0);
756
}
757

    
758
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
759
{
760
    /* sign-extend truncated operands before multiplication */
761
    gen_op_multiply(dst, src1, src2, 1);
762
}
763

    
764
#ifdef TARGET_SPARC64
765
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
766
{
767
    TCGv_i32 r_const;
768
    int l1;
769

    
770
    l1 = gen_new_label();
771
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
772
    r_const = tcg_const_i32(TT_DIV_ZERO);
773
    gen_helper_raise_exception(cpu_env, r_const);
774
    tcg_temp_free_i32(r_const);
775
    gen_set_label(l1);
776
}
777

    
778
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
779
{
780
    int l1, l2;
781
    TCGv r_temp1, r_temp2;
782

    
783
    l1 = gen_new_label();
784
    l2 = gen_new_label();
785
    r_temp1 = tcg_temp_local_new();
786
    r_temp2 = tcg_temp_local_new();
787
    tcg_gen_mov_tl(r_temp1, src1);
788
    tcg_gen_mov_tl(r_temp2, src2);
789
    gen_trap_ifdivzero_tl(r_temp2);
790
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
791
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
792
    tcg_gen_movi_i64(dst, INT64_MIN);
793
    tcg_gen_br(l2);
794
    gen_set_label(l1);
795
    tcg_gen_div_i64(dst, r_temp1, r_temp2);
796
    gen_set_label(l2);
797
    tcg_temp_free(r_temp1);
798
    tcg_temp_free(r_temp2);
799
}
800
#endif
801

    
802
// 1
803
static inline void gen_op_eval_ba(TCGv dst)
804
{
805
    tcg_gen_movi_tl(dst, 1);
806
}
807

    
808
// Z
809
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
810
{
811
    gen_mov_reg_Z(dst, src);
812
}
813

    
814
// Z | (N ^ V)
815
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
816
{
817
    gen_mov_reg_N(cpu_tmp0, src);
818
    gen_mov_reg_V(dst, src);
819
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
820
    gen_mov_reg_Z(cpu_tmp0, src);
821
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
822
}
823

    
824
// N ^ V
825
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
826
{
827
    gen_mov_reg_V(cpu_tmp0, src);
828
    gen_mov_reg_N(dst, src);
829
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
830
}
831

    
832
// C | Z
833
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
834
{
835
    gen_mov_reg_Z(cpu_tmp0, src);
836
    gen_mov_reg_C(dst, src);
837
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
838
}
839

    
840
// C
841
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
842
{
843
    gen_mov_reg_C(dst, src);
844
}
845

    
846
// V
847
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
848
{
849
    gen_mov_reg_V(dst, src);
850
}
851

    
852
// 0
853
static inline void gen_op_eval_bn(TCGv dst)
854
{
855
    tcg_gen_movi_tl(dst, 0);
856
}
857

    
858
// N
859
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
860
{
861
    gen_mov_reg_N(dst, src);
862
}
863

    
864
// !Z
865
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
866
{
867
    gen_mov_reg_Z(dst, src);
868
    tcg_gen_xori_tl(dst, dst, 0x1);
869
}
870

    
871
// !(Z | (N ^ V))
872
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
873
{
874
    gen_mov_reg_N(cpu_tmp0, src);
875
    gen_mov_reg_V(dst, src);
876
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
877
    gen_mov_reg_Z(cpu_tmp0, src);
878
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
879
    tcg_gen_xori_tl(dst, dst, 0x1);
880
}
881

    
882
// !(N ^ V)
883
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
884
{
885
    gen_mov_reg_V(cpu_tmp0, src);
886
    gen_mov_reg_N(dst, src);
887
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
888
    tcg_gen_xori_tl(dst, dst, 0x1);
889
}
890

    
891
// !(C | Z)
892
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
893
{
894
    gen_mov_reg_Z(cpu_tmp0, src);
895
    gen_mov_reg_C(dst, src);
896
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
897
    tcg_gen_xori_tl(dst, dst, 0x1);
898
}
899

    
900
// !C
901
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
902
{
903
    gen_mov_reg_C(dst, src);
904
    tcg_gen_xori_tl(dst, dst, 0x1);
905
}
906

    
907
// !N
908
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
909
{
910
    gen_mov_reg_N(dst, src);
911
    tcg_gen_xori_tl(dst, dst, 0x1);
912
}
913

    
914
// !V
915
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
916
{
917
    gen_mov_reg_V(dst, src);
918
    tcg_gen_xori_tl(dst, dst, 0x1);
919
}
920

    
921
/*
922
  FPSR bit field FCC1 | FCC0:
923
   0 =
924
   1 <
925
   2 >
926
   3 unordered
927
*/
928
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
929
                                    unsigned int fcc_offset)
930
{
931
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
932
    tcg_gen_andi_tl(reg, reg, 0x1);
933
}
934

    
935
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
936
                                    unsigned int fcc_offset)
937
{
938
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
939
    tcg_gen_andi_tl(reg, reg, 0x1);
940
}
941

    
942
// !0: FCC0 | FCC1
943
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
944
                                    unsigned int fcc_offset)
945
{
946
    gen_mov_reg_FCC0(dst, src, fcc_offset);
947
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
948
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
949
}
950

    
951
// 1 or 2: FCC0 ^ FCC1
952
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
953
                                    unsigned int fcc_offset)
954
{
955
    gen_mov_reg_FCC0(dst, src, fcc_offset);
956
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
957
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
958
}
959

    
960
// 1 or 3: FCC0
961
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
962
                                    unsigned int fcc_offset)
963
{
964
    gen_mov_reg_FCC0(dst, src, fcc_offset);
965
}
966

    
967
// 1: FCC0 & !FCC1
968
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
969
                                    unsigned int fcc_offset)
970
{
971
    gen_mov_reg_FCC0(dst, src, fcc_offset);
972
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
973
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
974
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
975
}
976

    
977
// 2 or 3: FCC1
978
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
979
                                    unsigned int fcc_offset)
980
{
981
    gen_mov_reg_FCC1(dst, src, fcc_offset);
982
}
983

    
984
// 2: !FCC0 & FCC1
985
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
986
                                    unsigned int fcc_offset)
987
{
988
    gen_mov_reg_FCC0(dst, src, fcc_offset);
989
    tcg_gen_xori_tl(dst, dst, 0x1);
990
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
991
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
992
}
993

    
994
// 3: FCC0 & FCC1
995
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
996
                                    unsigned int fcc_offset)
997
{
998
    gen_mov_reg_FCC0(dst, src, fcc_offset);
999
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1000
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1001
}
1002

    
1003
// 0: !(FCC0 | FCC1)
1004
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1005
                                    unsigned int fcc_offset)
1006
{
1007
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1008
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
1010
    tcg_gen_xori_tl(dst, dst, 0x1);
1011
}
1012

    
1013
// 0 or 3: !(FCC0 ^ FCC1)
1014
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1015
                                    unsigned int fcc_offset)
1016
{
1017
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1018
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1019
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1020
    tcg_gen_xori_tl(dst, dst, 0x1);
1021
}
1022

    
1023
// 0 or 2: !FCC0
1024
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1025
                                    unsigned int fcc_offset)
1026
{
1027
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1028
    tcg_gen_xori_tl(dst, dst, 0x1);
1029
}
1030

    
1031
// !1: !(FCC0 & !FCC1)
1032
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1033
                                    unsigned int fcc_offset)
1034
{
1035
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1036
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1037
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1038
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039
    tcg_gen_xori_tl(dst, dst, 0x1);
1040
}
1041

    
1042
// 0 or 1: !FCC1
1043
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1044
                                    unsigned int fcc_offset)
1045
{
1046
    gen_mov_reg_FCC1(dst, src, fcc_offset);
1047
    tcg_gen_xori_tl(dst, dst, 0x1);
1048
}
1049

    
1050
// !2: !(!FCC0 & FCC1)
1051
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1052
                                    unsigned int fcc_offset)
1053
{
1054
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1055
    tcg_gen_xori_tl(dst, dst, 0x1);
1056
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1057
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1058
    tcg_gen_xori_tl(dst, dst, 0x1);
1059
}
1060

    
1061
// !3: !(FCC0 & FCC1)
1062
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1063
                                    unsigned int fcc_offset)
1064
{
1065
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1066
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1068
    tcg_gen_xori_tl(dst, dst, 0x1);
1069
}
1070

    
1071
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1072
                               target_ulong pc2, TCGv r_cond)
1073
{
1074
    int l1;
1075

    
1076
    l1 = gen_new_label();
1077

    
1078
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1079

    
1080
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
1081

    
1082
    gen_set_label(l1);
1083
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
1084
}
1085

    
1086
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1087
                                target_ulong pc2, TCGv r_cond)
1088
{
1089
    int l1;
1090

    
1091
    l1 = gen_new_label();
1092

    
1093
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1094

    
1095
    gen_goto_tb(dc, 0, pc2, pc1);
1096

    
1097
    gen_set_label(l1);
1098
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1099
}
1100

    
1101
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1102
                                      TCGv r_cond)
1103
{
1104
    int l1, l2;
1105

    
1106
    l1 = gen_new_label();
1107
    l2 = gen_new_label();
1108

    
1109
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1110

    
1111
    tcg_gen_movi_tl(cpu_npc, npc1);
1112
    tcg_gen_br(l2);
1113

    
1114
    gen_set_label(l1);
1115
    tcg_gen_movi_tl(cpu_npc, npc2);
1116
    gen_set_label(l2);
1117
}
1118

    
1119
/* call this function before using the condition register as it may
1120
   have been set for a jump */
1121
static inline void flush_cond(DisasContext *dc, TCGv cond)
1122
{
1123
    if (dc->npc == JUMP_PC) {
1124
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1125
        dc->npc = DYNAMIC_PC;
1126
    }
1127
}
1128

    
1129
static inline void save_npc(DisasContext *dc, TCGv cond)
1130
{
1131
    if (dc->npc == JUMP_PC) {
1132
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1133
        dc->npc = DYNAMIC_PC;
1134
    } else if (dc->npc != DYNAMIC_PC) {
1135
        tcg_gen_movi_tl(cpu_npc, dc->npc);
1136
    }
1137
}
1138

    
1139
static inline void save_state(DisasContext *dc, TCGv cond)
1140
{
1141
    tcg_gen_movi_tl(cpu_pc, dc->pc);
1142
    /* flush pending conditional evaluations before exposing cpu state */
1143
    if (dc->cc_op != CC_OP_FLAGS) {
1144
        dc->cc_op = CC_OP_FLAGS;
1145
        gen_helper_compute_psr(cpu_env);
1146
    }
1147
    save_npc(dc, cond);
1148
}
1149

    
1150
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1151
{
1152
    if (dc->npc == JUMP_PC) {
1153
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1154
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1155
        dc->pc = DYNAMIC_PC;
1156
    } else if (dc->npc == DYNAMIC_PC) {
1157
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1158
        dc->pc = DYNAMIC_PC;
1159
    } else {
1160
        dc->pc = dc->npc;
1161
    }
1162
}
1163

    
1164
static inline void gen_op_next_insn(void)
1165
{
1166
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1167
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1168
}
1169

    
1170
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1171
                            DisasContext *dc)
1172
{
1173
    TCGv_i32 r_src;
1174

    
1175
#ifdef TARGET_SPARC64
1176
    if (cc)
1177
        r_src = cpu_xcc;
1178
    else
1179
        r_src = cpu_psr;
1180
#else
1181
    r_src = cpu_psr;
1182
#endif
1183
    switch (dc->cc_op) {
1184
    case CC_OP_FLAGS:
1185
        break;
1186
    default:
1187
        gen_helper_compute_psr(cpu_env);
1188
        dc->cc_op = CC_OP_FLAGS;
1189
        break;
1190
    }
1191
    switch (cond) {
1192
    case 0x0:
1193
        gen_op_eval_bn(r_dst);
1194
        break;
1195
    case 0x1:
1196
        gen_op_eval_be(r_dst, r_src);
1197
        break;
1198
    case 0x2:
1199
        gen_op_eval_ble(r_dst, r_src);
1200
        break;
1201
    case 0x3:
1202
        gen_op_eval_bl(r_dst, r_src);
1203
        break;
1204
    case 0x4:
1205
        gen_op_eval_bleu(r_dst, r_src);
1206
        break;
1207
    case 0x5:
1208
        gen_op_eval_bcs(r_dst, r_src);
1209
        break;
1210
    case 0x6:
1211
        gen_op_eval_bneg(r_dst, r_src);
1212
        break;
1213
    case 0x7:
1214
        gen_op_eval_bvs(r_dst, r_src);
1215
        break;
1216
    case 0x8:
1217
        gen_op_eval_ba(r_dst);
1218
        break;
1219
    case 0x9:
1220
        gen_op_eval_bne(r_dst, r_src);
1221
        break;
1222
    case 0xa:
1223
        gen_op_eval_bg(r_dst, r_src);
1224
        break;
1225
    case 0xb:
1226
        gen_op_eval_bge(r_dst, r_src);
1227
        break;
1228
    case 0xc:
1229
        gen_op_eval_bgu(r_dst, r_src);
1230
        break;
1231
    case 0xd:
1232
        gen_op_eval_bcc(r_dst, r_src);
1233
        break;
1234
    case 0xe:
1235
        gen_op_eval_bpos(r_dst, r_src);
1236
        break;
1237
    case 0xf:
1238
        gen_op_eval_bvc(r_dst, r_src);
1239
        break;
1240
    }
1241
}
1242

    
1243
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1244
{
1245
    unsigned int offset;
1246

    
1247
    switch (cc) {
1248
    default:
1249
    case 0x0:
1250
        offset = 0;
1251
        break;
1252
    case 0x1:
1253
        offset = 32 - 10;
1254
        break;
1255
    case 0x2:
1256
        offset = 34 - 10;
1257
        break;
1258
    case 0x3:
1259
        offset = 36 - 10;
1260
        break;
1261
    }
1262

    
1263
    switch (cond) {
1264
    case 0x0:
1265
        gen_op_eval_bn(r_dst);
1266
        break;
1267
    case 0x1:
1268
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1269
        break;
1270
    case 0x2:
1271
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1272
        break;
1273
    case 0x3:
1274
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1275
        break;
1276
    case 0x4:
1277
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1278
        break;
1279
    case 0x5:
1280
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1281
        break;
1282
    case 0x6:
1283
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1284
        break;
1285
    case 0x7:
1286
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1287
        break;
1288
    case 0x8:
1289
        gen_op_eval_ba(r_dst);
1290
        break;
1291
    case 0x9:
1292
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1293
        break;
1294
    case 0xa:
1295
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1296
        break;
1297
    case 0xb:
1298
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1299
        break;
1300
    case 0xc:
1301
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1302
        break;
1303
    case 0xd:
1304
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1305
        break;
1306
    case 0xe:
1307
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1308
        break;
1309
    case 0xf:
1310
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1311
        break;
1312
    }
1313
}
1314

    
1315
#ifdef TARGET_SPARC64
1316
// Inverted logic
1317
static const int gen_tcg_cond_reg[8] = {
1318
    -1,
1319
    TCG_COND_NE,
1320
    TCG_COND_GT,
1321
    TCG_COND_GE,
1322
    -1,
1323
    TCG_COND_EQ,
1324
    TCG_COND_LE,
1325
    TCG_COND_LT,
1326
};
1327

    
1328
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1329
{
1330
    int l1;
1331

    
1332
    l1 = gen_new_label();
1333
    tcg_gen_movi_tl(r_dst, 0);
1334
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1335
    tcg_gen_movi_tl(r_dst, 1);
1336
    gen_set_label(l1);
1337
}
1338
#endif
1339

    
1340
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1341
                      TCGv r_cond)
1342
{
1343
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1344
    target_ulong target = dc->pc + offset;
1345

    
1346
    if (cond == 0x0) {
1347
        /* unconditional not taken */
1348
        if (a) {
1349
            dc->pc = dc->npc + 4;
1350
            dc->npc = dc->pc + 4;
1351
        } else {
1352
            dc->pc = dc->npc;
1353
            dc->npc = dc->pc + 4;
1354
        }
1355
    } else if (cond == 0x8) {
1356
        /* unconditional taken */
1357
        if (a) {
1358
            dc->pc = target;
1359
            dc->npc = dc->pc + 4;
1360
        } else {
1361
            dc->pc = dc->npc;
1362
            dc->npc = target;
1363
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1364
        }
1365
    } else {
1366
        flush_cond(dc, r_cond);
1367
        gen_cond(r_cond, cc, cond, dc);
1368
        if (a) {
1369
            gen_branch_a(dc, target, dc->npc, r_cond);
1370
            dc->is_br = 1;
1371
        } else {
1372
            dc->pc = dc->npc;
1373
            dc->jump_pc[0] = target;
1374
            if (unlikely(dc->npc == DYNAMIC_PC)) {
1375
                dc->jump_pc[1] = DYNAMIC_PC;
1376
                tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1377
            } else {
1378
                dc->jump_pc[1] = dc->npc + 4;
1379
                dc->npc = JUMP_PC;
1380
            }
1381
        }
1382
    }
1383
}
1384

    
1385
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1386
                      TCGv r_cond)
1387
{
1388
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1389
    target_ulong target = dc->pc + offset;
1390

    
1391
    if (cond == 0x0) {
1392
        /* unconditional not taken */
1393
        if (a) {
1394
            dc->pc = dc->npc + 4;
1395
            dc->npc = dc->pc + 4;
1396
        } else {
1397
            dc->pc = dc->npc;
1398
            dc->npc = dc->pc + 4;
1399
        }
1400
    } else if (cond == 0x8) {
1401
        /* unconditional taken */
1402
        if (a) {
1403
            dc->pc = target;
1404
            dc->npc = dc->pc + 4;
1405
        } else {
1406
            dc->pc = dc->npc;
1407
            dc->npc = target;
1408
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1409
        }
1410
    } else {
1411
        flush_cond(dc, r_cond);
1412
        gen_fcond(r_cond, cc, cond);
1413
        if (a) {
1414
            gen_branch_a(dc, target, dc->npc, r_cond);
1415
            dc->is_br = 1;
1416
        } else {
1417
            dc->pc = dc->npc;
1418
            dc->jump_pc[0] = target;
1419
            if (unlikely(dc->npc == DYNAMIC_PC)) {
1420
                dc->jump_pc[1] = DYNAMIC_PC;
1421
                tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1422
            } else {
1423
                dc->jump_pc[1] = dc->npc + 4;
1424
                dc->npc = JUMP_PC;
1425
            }
1426
        }
1427
    }
1428
}
1429

    
1430
#ifdef TARGET_SPARC64
1431
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1432
                          TCGv r_cond, TCGv r_reg)
1433
{
1434
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1435
    target_ulong target = dc->pc + offset;
1436

    
1437
    flush_cond(dc, r_cond);
1438
    gen_cond_reg(r_cond, cond, r_reg);
1439
    if (a) {
1440
        gen_branch_a(dc, target, dc->npc, r_cond);
1441
        dc->is_br = 1;
1442
    } else {
1443
        dc->pc = dc->npc;
1444
        dc->jump_pc[0] = target;
1445
        if (unlikely(dc->npc == DYNAMIC_PC)) {
1446
            dc->jump_pc[1] = DYNAMIC_PC;
1447
            tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1448
        } else {
1449
            dc->jump_pc[1] = dc->npc + 4;
1450
            dc->npc = JUMP_PC;
1451
        }
1452
    }
1453
}
1454

    
1455
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1456
{
1457
    switch (fccno) {
1458
    case 0:
1459
        gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1460
        break;
1461
    case 1:
1462
        gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1463
        break;
1464
    case 2:
1465
        gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1466
        break;
1467
    case 3:
1468
        gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1469
        break;
1470
    }
1471
}
1472

    
1473
static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1474
{
1475
    switch (fccno) {
1476
    case 0:
1477
        gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1478
        break;
1479
    case 1:
1480
        gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1481
        break;
1482
    case 2:
1483
        gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1484
        break;
1485
    case 3:
1486
        gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1487
        break;
1488
    }
1489
}
1490

    
1491
static inline void gen_op_fcmpq(int fccno)
1492
{
1493
    switch (fccno) {
1494
    case 0:
1495
        gen_helper_fcmpq(cpu_env);
1496
        break;
1497
    case 1:
1498
        gen_helper_fcmpq_fcc1(cpu_env);
1499
        break;
1500
    case 2:
1501
        gen_helper_fcmpq_fcc2(cpu_env);
1502
        break;
1503
    case 3:
1504
        gen_helper_fcmpq_fcc3(cpu_env);
1505
        break;
1506
    }
1507
}
1508

    
1509
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1510
{
1511
    switch (fccno) {
1512
    case 0:
1513
        gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1514
        break;
1515
    case 1:
1516
        gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1517
        break;
1518
    case 2:
1519
        gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1520
        break;
1521
    case 3:
1522
        gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1523
        break;
1524
    }
1525
}
1526

    
1527
static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1528
{
1529
    switch (fccno) {
1530
    case 0:
1531
        gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1532
        break;
1533
    case 1:
1534
        gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1535
        break;
1536
    case 2:
1537
        gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1538
        break;
1539
    case 3:
1540
        gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1541
        break;
1542
    }
1543
}
1544

    
1545
static inline void gen_op_fcmpeq(int fccno)
1546
{
1547
    switch (fccno) {
1548
    case 0:
1549
        gen_helper_fcmpeq(cpu_env);
1550
        break;
1551
    case 1:
1552
        gen_helper_fcmpeq_fcc1(cpu_env);
1553
        break;
1554
    case 2:
1555
        gen_helper_fcmpeq_fcc2(cpu_env);
1556
        break;
1557
    case 3:
1558
        gen_helper_fcmpeq_fcc3(cpu_env);
1559
        break;
1560
    }
1561
}
1562

    
1563
#else
1564

    
1565
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1566
{
1567
    gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1568
}
1569

    
1570
static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1571
{
1572
    gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1573
}
1574

    
1575
static inline void gen_op_fcmpq(int fccno)
1576
{
1577
    gen_helper_fcmpq(cpu_env);
1578
}
1579

    
1580
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1581
{
1582
    gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1583
}
1584

    
1585
static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1586
{
1587
    gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1588
}
1589

    
1590
static inline void gen_op_fcmpeq(int fccno)
1591
{
1592
    gen_helper_fcmpeq(cpu_env);
1593
}
1594
#endif
1595

    
1596
static inline void gen_op_fpexception_im(int fsr_flags)
1597
{
1598
    TCGv_i32 r_const;
1599

    
1600
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1601
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1602
    r_const = tcg_const_i32(TT_FP_EXCP);
1603
    gen_helper_raise_exception(cpu_env, r_const);
1604
    tcg_temp_free_i32(r_const);
1605
}
1606

    
1607
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1608
{
1609
#if !defined(CONFIG_USER_ONLY)
1610
    if (!dc->fpu_enabled) {
1611
        TCGv_i32 r_const;
1612

    
1613
        save_state(dc, r_cond);
1614
        r_const = tcg_const_i32(TT_NFPU_INSN);
1615
        gen_helper_raise_exception(cpu_env, r_const);
1616
        tcg_temp_free_i32(r_const);
1617
        dc->is_br = 1;
1618
        return 1;
1619
    }
1620
#endif
1621
    return 0;
1622
}
1623

    
1624
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1625
{
1626
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1627
}
1628

    
1629
static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1630
                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1631
{
1632
    TCGv_i32 dst, src;
1633

    
1634
    src = gen_load_fpr_F(dc, rs);
1635
    dst = gen_dest_fpr_F();
1636

    
1637
    gen(dst, cpu_env, src);
1638

    
1639
    gen_store_fpr_F(dc, rd, dst);
1640
}
1641

    
1642
static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1643
                                 void (*gen)(TCGv_i32, TCGv_i32))
1644
{
1645
    TCGv_i32 dst, src;
1646

    
1647
    src = gen_load_fpr_F(dc, rs);
1648
    dst = gen_dest_fpr_F();
1649

    
1650
    gen(dst, src);
1651

    
1652
    gen_store_fpr_F(dc, rd, dst);
1653
}
1654

    
1655
static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1656
                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1657
{
1658
    TCGv_i32 dst, src1, src2;
1659

    
1660
    src1 = gen_load_fpr_F(dc, rs1);
1661
    src2 = gen_load_fpr_F(dc, rs2);
1662
    dst = gen_dest_fpr_F();
1663

    
1664
    gen(dst, cpu_env, src1, src2);
1665

    
1666
    gen_store_fpr_F(dc, rd, dst);
1667
}
1668

    
1669
#ifdef TARGET_SPARC64
1670
static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1671
                                  void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1672
{
1673
    TCGv_i32 dst, src1, src2;
1674

    
1675
    src1 = gen_load_fpr_F(dc, rs1);
1676
    src2 = gen_load_fpr_F(dc, rs2);
1677
    dst = gen_dest_fpr_F();
1678

    
1679
    gen(dst, src1, src2);
1680

    
1681
    gen_store_fpr_F(dc, rd, dst);
1682
}
1683
#endif
1684

    
1685
static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1686
                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1687
{
1688
    TCGv_i64 dst, src;
1689

    
1690
    src = gen_load_fpr_D(dc, rs);
1691
    dst = gen_dest_fpr_D();
1692

    
1693
    gen(dst, cpu_env, src);
1694

    
1695
    gen_store_fpr_D(dc, rd, dst);
1696
}
1697

    
1698
#ifdef TARGET_SPARC64
1699
static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1700
                                 void (*gen)(TCGv_i64, TCGv_i64))
1701
{
1702
    TCGv_i64 dst, src;
1703

    
1704
    src = gen_load_fpr_D(dc, rs);
1705
    dst = gen_dest_fpr_D();
1706

    
1707
    gen(dst, src);
1708

    
1709
    gen_store_fpr_D(dc, rd, dst);
1710
}
1711
#endif
1712

    
1713
static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1714
                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1715
{
1716
    TCGv_i64 dst, src1, src2;
1717

    
1718
    src1 = gen_load_fpr_D(dc, rs1);
1719
    src2 = gen_load_fpr_D(dc, rs2);
1720
    dst = gen_dest_fpr_D();
1721

    
1722
    gen(dst, cpu_env, src1, src2);
1723

    
1724
    gen_store_fpr_D(dc, rd, dst);
1725
}
1726

    
1727
#ifdef TARGET_SPARC64
1728
static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1729
                                  void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1730
{
1731
    TCGv_i64 dst, src1, src2;
1732

    
1733
    src1 = gen_load_fpr_D(dc, rs1);
1734
    src2 = gen_load_fpr_D(dc, rs2);
1735
    dst = gen_dest_fpr_D();
1736

    
1737
    gen(dst, src1, src2);
1738

    
1739
    gen_store_fpr_D(dc, rd, dst);
1740
}
1741

    
1742
static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1743
                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1744
{
1745
    TCGv_i64 dst, src1, src2;
1746

    
1747
    src1 = gen_load_fpr_D(dc, rs1);
1748
    src2 = gen_load_fpr_D(dc, rs2);
1749
    dst = gen_dest_fpr_D();
1750

    
1751
    gen(dst, cpu_gsr, src1, src2);
1752

    
1753
    gen_store_fpr_D(dc, rd, dst);
1754
}
1755

    
1756
static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1757
                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1758
{
1759
    TCGv_i64 dst, src0, src1, src2;
1760

    
1761
    src1 = gen_load_fpr_D(dc, rs1);
1762
    src2 = gen_load_fpr_D(dc, rs2);
1763
    src0 = gen_load_fpr_D(dc, rd);
1764
    dst = gen_dest_fpr_D();
1765

    
1766
    gen(dst, src0, src1, src2);
1767

    
1768
    gen_store_fpr_D(dc, rd, dst);
1769
}
1770
#endif
1771

    
1772
static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1773
                              void (*gen)(TCGv_ptr))
1774
{
1775
    gen_op_load_fpr_QT1(QFPREG(rs));
1776

    
1777
    gen(cpu_env);
1778

    
1779
    gen_op_store_QT0_fpr(QFPREG(rd));
1780
    gen_update_fprs_dirty(QFPREG(rd));
1781
}
1782

    
1783
#ifdef TARGET_SPARC64
1784
static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1785
                                 void (*gen)(TCGv_ptr))
1786
{
1787
    gen_op_load_fpr_QT1(QFPREG(rs));
1788

    
1789
    gen(cpu_env);
1790

    
1791
    gen_op_store_QT0_fpr(QFPREG(rd));
1792
    gen_update_fprs_dirty(QFPREG(rd));
1793
}
1794
#endif
1795

    
1796
static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1797
                               void (*gen)(TCGv_ptr))
1798
{
1799
    gen_op_load_fpr_QT0(QFPREG(rs1));
1800
    gen_op_load_fpr_QT1(QFPREG(rs2));
1801

    
1802
    gen(cpu_env);
1803

    
1804
    gen_op_store_QT0_fpr(QFPREG(rd));
1805
    gen_update_fprs_dirty(QFPREG(rd));
1806
}
1807

    
1808
static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1809
                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1810
{
1811
    TCGv_i64 dst;
1812
    TCGv_i32 src1, src2;
1813

    
1814
    src1 = gen_load_fpr_F(dc, rs1);
1815
    src2 = gen_load_fpr_F(dc, rs2);
1816
    dst = gen_dest_fpr_D();
1817

    
1818
    gen(dst, cpu_env, src1, src2);
1819

    
1820
    gen_store_fpr_D(dc, rd, dst);
1821
}
1822

    
1823
static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1824
                               void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1825
{
1826
    TCGv_i64 src1, src2;
1827

    
1828
    src1 = gen_load_fpr_D(dc, rs1);
1829
    src2 = gen_load_fpr_D(dc, rs2);
1830

    
1831
    gen(cpu_env, src1, src2);
1832

    
1833
    gen_op_store_QT0_fpr(QFPREG(rd));
1834
    gen_update_fprs_dirty(QFPREG(rd));
1835
}
1836

    
1837
#ifdef TARGET_SPARC64
1838
static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1839
                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1840
{
1841
    TCGv_i64 dst;
1842
    TCGv_i32 src;
1843

    
1844
    src = gen_load_fpr_F(dc, rs);
1845
    dst = gen_dest_fpr_D();
1846

    
1847
    gen(dst, cpu_env, src);
1848

    
1849
    gen_store_fpr_D(dc, rd, dst);
1850
}
1851
#endif
1852

    
1853
static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1854
                                 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1855
{
1856
    TCGv_i64 dst;
1857
    TCGv_i32 src;
1858

    
1859
    src = gen_load_fpr_F(dc, rs);
1860
    dst = gen_dest_fpr_D();
1861

    
1862
    gen(dst, cpu_env, src);
1863

    
1864
    gen_store_fpr_D(dc, rd, dst);
1865
}
1866

    
1867
static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1868
                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1869
{
1870
    TCGv_i32 dst;
1871
    TCGv_i64 src;
1872

    
1873
    src = gen_load_fpr_D(dc, rs);
1874
    dst = gen_dest_fpr_F();
1875

    
1876
    gen(dst, cpu_env, src);
1877

    
1878
    gen_store_fpr_F(dc, rd, dst);
1879
}
1880

    
1881
static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1882
                              void (*gen)(TCGv_i32, TCGv_ptr))
1883
{
1884
    TCGv_i32 dst;
1885

    
1886
    gen_op_load_fpr_QT1(QFPREG(rs));
1887
    dst = gen_dest_fpr_F();
1888

    
1889
    gen(dst, cpu_env);
1890

    
1891
    gen_store_fpr_F(dc, rd, dst);
1892
}
1893

    
1894
static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1895
                              void (*gen)(TCGv_i64, TCGv_ptr))
1896
{
1897
    TCGv_i64 dst;
1898

    
1899
    gen_op_load_fpr_QT1(QFPREG(rs));
1900
    dst = gen_dest_fpr_D();
1901

    
1902
    gen(dst, cpu_env);
1903

    
1904
    gen_store_fpr_D(dc, rd, dst);
1905
}
1906

    
1907
static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1908
                                 void (*gen)(TCGv_ptr, TCGv_i32))
1909
{
1910
    TCGv_i32 src;
1911

    
1912
    src = gen_load_fpr_F(dc, rs);
1913

    
1914
    gen(cpu_env, src);
1915

    
1916
    gen_op_store_QT0_fpr(QFPREG(rd));
1917
    gen_update_fprs_dirty(QFPREG(rd));
1918
}
1919

    
1920
static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1921
                                 void (*gen)(TCGv_ptr, TCGv_i64))
1922
{
1923
    TCGv_i64 src;
1924

    
1925
    src = gen_load_fpr_D(dc, rs);
1926

    
1927
    gen(cpu_env, src);
1928

    
1929
    gen_op_store_QT0_fpr(QFPREG(rd));
1930
    gen_update_fprs_dirty(QFPREG(rd));
1931
}
1932

    
1933
/* asi moves */
1934
#ifdef TARGET_SPARC64
1935
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1936
{
1937
    int asi;
1938
    TCGv_i32 r_asi;
1939

    
1940
    if (IS_IMM) {
1941
        r_asi = tcg_temp_new_i32();
1942
        tcg_gen_mov_i32(r_asi, cpu_asi);
1943
    } else {
1944
        asi = GET_FIELD(insn, 19, 26);
1945
        r_asi = tcg_const_i32(asi);
1946
    }
1947
    return r_asi;
1948
}
1949

    
1950
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1951
                              int sign)
1952
{
1953
    TCGv_i32 r_asi, r_size, r_sign;
1954

    
1955
    r_asi = gen_get_asi(insn, addr);
1956
    r_size = tcg_const_i32(size);
1957
    r_sign = tcg_const_i32(sign);
1958
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1959
    tcg_temp_free_i32(r_sign);
1960
    tcg_temp_free_i32(r_size);
1961
    tcg_temp_free_i32(r_asi);
1962
}
1963

    
1964
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1965
{
1966
    TCGv_i32 r_asi, r_size;
1967

    
1968
    r_asi = gen_get_asi(insn, addr);
1969
    r_size = tcg_const_i32(size);
1970
    gen_helper_st_asi(addr, src, r_asi, r_size);
1971
    tcg_temp_free_i32(r_size);
1972
    tcg_temp_free_i32(r_asi);
1973
}
1974

    
1975
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1976
{
1977
    TCGv_i32 r_asi, r_size, r_rd;
1978

    
1979
    r_asi = gen_get_asi(insn, addr);
1980
    r_size = tcg_const_i32(size);
1981
    r_rd = tcg_const_i32(rd);
1982
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1983
    tcg_temp_free_i32(r_rd);
1984
    tcg_temp_free_i32(r_size);
1985
    tcg_temp_free_i32(r_asi);
1986
}
1987

    
1988
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1989
{
1990
    TCGv_i32 r_asi, r_size, r_rd;
1991

    
1992
    r_asi = gen_get_asi(insn, addr);
1993
    r_size = tcg_const_i32(size);
1994
    r_rd = tcg_const_i32(rd);
1995
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1996
    tcg_temp_free_i32(r_rd);
1997
    tcg_temp_free_i32(r_size);
1998
    tcg_temp_free_i32(r_asi);
1999
}
2000

    
2001
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2002
{
2003
    TCGv_i32 r_asi, r_size, r_sign;
2004

    
2005
    r_asi = gen_get_asi(insn, addr);
2006
    r_size = tcg_const_i32(4);
2007
    r_sign = tcg_const_i32(0);
2008
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2009
    tcg_temp_free_i32(r_sign);
2010
    gen_helper_st_asi(addr, dst, r_asi, r_size);
2011
    tcg_temp_free_i32(r_size);
2012
    tcg_temp_free_i32(r_asi);
2013
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2014
}
2015

    
2016
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2017
{
2018
    TCGv_i32 r_asi, r_rd;
2019

    
2020
    r_asi = gen_get_asi(insn, addr);
2021
    r_rd = tcg_const_i32(rd);
2022
    gen_helper_ldda_asi(addr, r_asi, r_rd);
2023
    tcg_temp_free_i32(r_rd);
2024
    tcg_temp_free_i32(r_asi);
2025
}
2026

    
2027
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2028
{
2029
    TCGv_i32 r_asi, r_size;
2030

    
2031
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
2032
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2033
    r_asi = gen_get_asi(insn, addr);
2034
    r_size = tcg_const_i32(8);
2035
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2036
    tcg_temp_free_i32(r_size);
2037
    tcg_temp_free_i32(r_asi);
2038
}
2039

    
2040
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2041
                               int rd)
2042
{
2043
    TCGv r_val1;
2044
    TCGv_i32 r_asi;
2045

    
2046
    r_val1 = tcg_temp_new();
2047
    gen_movl_reg_TN(rd, r_val1);
2048
    r_asi = gen_get_asi(insn, addr);
2049
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
2050
    tcg_temp_free_i32(r_asi);
2051
    tcg_temp_free(r_val1);
2052
}
2053

    
2054
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2055
                                int rd)
2056
{
2057
    TCGv_i32 r_asi;
2058

    
2059
    gen_movl_reg_TN(rd, cpu_tmp64);
2060
    r_asi = gen_get_asi(insn, addr);
2061
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
2062
    tcg_temp_free_i32(r_asi);
2063
}
2064

    
2065
#elif !defined(CONFIG_USER_ONLY)
2066

    
2067
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2068
                              int sign)
2069
{
2070
    TCGv_i32 r_asi, r_size, r_sign;
2071

    
2072
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2073
    r_size = tcg_const_i32(size);
2074
    r_sign = tcg_const_i32(sign);
2075
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2076
    tcg_temp_free(r_sign);
2077
    tcg_temp_free(r_size);
2078
    tcg_temp_free(r_asi);
2079
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2080
}
2081

    
2082
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2083
{
2084
    TCGv_i32 r_asi, r_size;
2085

    
2086
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
2087
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2088
    r_size = tcg_const_i32(size);
2089
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2090
    tcg_temp_free(r_size);
2091
    tcg_temp_free(r_asi);
2092
}
2093

    
2094
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2095
{
2096
    TCGv_i32 r_asi, r_size, r_sign;
2097
    TCGv_i64 r_val;
2098

    
2099
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2100
    r_size = tcg_const_i32(4);
2101
    r_sign = tcg_const_i32(0);
2102
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2103
    tcg_temp_free(r_sign);
2104
    r_val = tcg_temp_new_i64();
2105
    tcg_gen_extu_tl_i64(r_val, dst);
2106
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
2107
    tcg_temp_free_i64(r_val);
2108
    tcg_temp_free(r_size);
2109
    tcg_temp_free(r_asi);
2110
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2111
}
2112

    
2113
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2114
{
2115
    TCGv_i32 r_asi, r_size, r_sign;
2116

    
2117
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2118
    r_size = tcg_const_i32(8);
2119
    r_sign = tcg_const_i32(0);
2120
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2121
    tcg_temp_free(r_sign);
2122
    tcg_temp_free(r_size);
2123
    tcg_temp_free(r_asi);
2124
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2125
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
2126
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2127
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2128
    gen_movl_TN_reg(rd, hi);
2129
}
2130

    
2131
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2132
{
2133
    TCGv_i32 r_asi, r_size;
2134

    
2135
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
2136
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2137
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2138
    r_size = tcg_const_i32(8);
2139
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2140
    tcg_temp_free(r_size);
2141
    tcg_temp_free(r_asi);
2142
}
2143
#endif
2144

    
2145
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2146
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2147
{
2148
    TCGv_i64 r_val;
2149
    TCGv_i32 r_asi, r_size;
2150

    
2151
    gen_ld_asi(dst, addr, insn, 1, 0);
2152

    
2153
    r_val = tcg_const_i64(0xffULL);
2154
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2155
    r_size = tcg_const_i32(1);
2156
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
2157
    tcg_temp_free_i32(r_size);
2158
    tcg_temp_free_i32(r_asi);
2159
    tcg_temp_free_i64(r_val);
2160
}
2161
#endif
2162

    
2163
static inline TCGv get_src1(unsigned int insn, TCGv def)
2164
{
2165
    TCGv r_rs1 = def;
2166
    unsigned int rs1;
2167

    
2168
    rs1 = GET_FIELD(insn, 13, 17);
2169
    if (rs1 == 0) {
2170
        tcg_gen_movi_tl(def, 0);
2171
    } else if (rs1 < 8) {
2172
        r_rs1 = cpu_gregs[rs1];
2173
    } else {
2174
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2175
    }
2176
    return r_rs1;
2177
}
2178

    
2179
static inline TCGv get_src2(unsigned int insn, TCGv def)
2180
{
2181
    TCGv r_rs2 = def;
2182

    
2183
    if (IS_IMM) { /* immediate */
2184
        target_long simm = GET_FIELDs(insn, 19, 31);
2185
        tcg_gen_movi_tl(def, simm);
2186
    } else { /* register */
2187
        unsigned int rs2 = GET_FIELD(insn, 27, 31);
2188
        if (rs2 == 0) {
2189
            tcg_gen_movi_tl(def, 0);
2190
        } else if (rs2 < 8) {
2191
            r_rs2 = cpu_gregs[rs2];
2192
        } else {
2193
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2194
        }
2195
    }
2196
    return r_rs2;
2197
}
2198

    
2199
#ifdef TARGET_SPARC64
2200
static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2201
{
2202
    TCGv_i32 r_tl = tcg_temp_new_i32();
2203

    
2204
    /* load env->tl into r_tl */
2205
    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2206

    
2207
    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2208
    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2209

    
2210
    /* calculate offset to current trap state from env->ts, reuse r_tl */
2211
    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2212
    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
2213

    
2214
    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2215
    {
2216
        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2217
        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2218
        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2219
        tcg_temp_free_ptr(r_tl_tmp);
2220
    }
2221

    
2222
    tcg_temp_free_i32(r_tl);
2223
}
2224

    
2225
static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2226
                     int width, bool cc, bool left)
2227
{
2228
    TCGv lo1, lo2, t1, t2;
2229
    uint64_t amask, tabl, tabr;
2230
    int shift, imask, omask;
2231

    
2232
    if (cc) {
2233
        tcg_gen_mov_tl(cpu_cc_src, s1);
2234
        tcg_gen_mov_tl(cpu_cc_src2, s2);
2235
        tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2236
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2237
        dc->cc_op = CC_OP_SUB;
2238
    }
2239

    
2240
    /* Theory of operation: there are two tables, left and right (not to
2241
       be confused with the left and right versions of the opcode).  These
2242
       are indexed by the low 3 bits of the inputs.  To make things "easy",
2243
       these tables are loaded into two constants, TABL and TABR below.
2244
       The operation index = (input & imask) << shift calculates the index
2245
       into the constant, while val = (table >> index) & omask calculates
2246
       the value we're looking for.  */
2247
    switch (width) {
2248
    case 8:
2249
        imask = 0x7;
2250
        shift = 3;
2251
        omask = 0xff;
2252
        if (left) {
2253
            tabl = 0x80c0e0f0f8fcfeffULL;
2254
            tabr = 0xff7f3f1f0f070301ULL;
2255
        } else {
2256
            tabl = 0x0103070f1f3f7fffULL;
2257
            tabr = 0xfffefcf8f0e0c080ULL;
2258
        }
2259
        break;
2260
    case 16:
2261
        imask = 0x6;
2262
        shift = 1;
2263
        omask = 0xf;
2264
        if (left) {
2265
            tabl = 0x8cef;
2266
            tabr = 0xf731;
2267
        } else {
2268
            tabl = 0x137f;
2269
            tabr = 0xfec8;
2270
        }
2271
        break;
2272
    case 32:
2273
        imask = 0x4;
2274
        shift = 0;
2275
        omask = 0x3;
2276
        if (left) {
2277
            tabl = (2 << 2) | 3;
2278
            tabr = (3 << 2) | 1;
2279
        } else {
2280
            tabl = (1 << 2) | 3;
2281
            tabr = (3 << 2) | 2;
2282
        }
2283
        break;
2284
    default:
2285
        abort();
2286
    }
2287

    
2288
    lo1 = tcg_temp_new();
2289
    lo2 = tcg_temp_new();
2290
    tcg_gen_andi_tl(lo1, s1, imask);
2291
    tcg_gen_andi_tl(lo2, s2, imask);
2292
    tcg_gen_shli_tl(lo1, lo1, shift);
2293
    tcg_gen_shli_tl(lo2, lo2, shift);
2294

    
2295
    t1 = tcg_const_tl(tabl);
2296
    t2 = tcg_const_tl(tabr);
2297
    tcg_gen_shr_tl(lo1, t1, lo1);
2298
    tcg_gen_shr_tl(lo2, t2, lo2);
2299
    tcg_gen_andi_tl(dst, lo1, omask);
2300
    tcg_gen_andi_tl(lo2, lo2, omask);
2301

    
2302
    amask = -8;
2303
    if (AM_CHECK(dc)) {
2304
        amask &= 0xffffffffULL;
2305
    }
2306
    tcg_gen_andi_tl(s1, s1, amask);
2307
    tcg_gen_andi_tl(s2, s2, amask);
2308

    
2309
    /* We want to compute
2310
        dst = (s1 == s2 ? lo1 : lo1 & lo2).
2311
       We've already done dst = lo1, so this reduces to
2312
        dst &= (s1 == s2 ? -1 : lo2)
2313
       Which we perform by
2314
        lo2 |= -(s1 == s2)
2315
        dst &= lo2
2316
    */
2317
    tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2318
    tcg_gen_neg_tl(t1, t1);
2319
    tcg_gen_or_tl(lo2, lo2, t1);
2320
    tcg_gen_and_tl(dst, dst, lo2);
2321

    
2322
    tcg_temp_free(lo1);
2323
    tcg_temp_free(lo2);
2324
    tcg_temp_free(t1);
2325
    tcg_temp_free(t2);
2326
}
2327
#endif
2328

    
2329
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
2330
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2331
        goto illegal_insn;
2332
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2333
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2334
        goto nfpu_insn;
2335

    
2336
/* before an instruction, dc->pc must be static */
2337
static void disas_sparc_insn(DisasContext * dc)
2338
{
2339
    unsigned int insn, opc, rs1, rs2, rd;
2340
    TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2341
    TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2342
    TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2343
    target_long simm;
2344

    
2345
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2346
        tcg_gen_debug_insn_start(dc->pc);
2347
    insn = ldl_code(dc->pc);
2348
    opc = GET_FIELD(insn, 0, 1);
2349

    
2350
    rd = GET_FIELD(insn, 2, 6);
2351

    
2352
    cpu_tmp1 = cpu_src1 = tcg_temp_new();
2353
    cpu_tmp2 = cpu_src2 = tcg_temp_new();
2354

    
2355
    switch (opc) {
2356
    case 0:                     /* branches/sethi */
2357
        {
2358
            unsigned int xop = GET_FIELD(insn, 7, 9);
2359
            int32_t target;
2360
            switch (xop) {
2361
#ifdef TARGET_SPARC64
2362
            case 0x1:           /* V9 BPcc */
2363
                {
2364
                    int cc;
2365

    
2366
                    target = GET_FIELD_SP(insn, 0, 18);
2367
                    target = sign_extend(target, 19);
2368
                    target <<= 2;
2369
                    cc = GET_FIELD_SP(insn, 20, 21);
2370
                    if (cc == 0)
2371
                        do_branch(dc, target, insn, 0, cpu_cond);
2372
                    else if (cc == 2)
2373
                        do_branch(dc, target, insn, 1, cpu_cond);
2374
                    else
2375
                        goto illegal_insn;
2376
                    goto jmp_insn;
2377
                }
2378
            case 0x3:           /* V9 BPr */
2379
                {
2380
                    target = GET_FIELD_SP(insn, 0, 13) |
2381
                        (GET_FIELD_SP(insn, 20, 21) << 14);
2382
                    target = sign_extend(target, 16);
2383
                    target <<= 2;
2384
                    cpu_src1 = get_src1(insn, cpu_src1);
2385
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2386
                    goto jmp_insn;
2387
                }
2388
            case 0x5:           /* V9 FBPcc */
2389
                {
2390
                    int cc = GET_FIELD_SP(insn, 20, 21);
2391
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2392
                        goto jmp_insn;
2393
                    target = GET_FIELD_SP(insn, 0, 18);
2394
                    target = sign_extend(target, 19);
2395
                    target <<= 2;
2396
                    do_fbranch(dc, target, insn, cc, cpu_cond);
2397
                    goto jmp_insn;
2398
                }
2399
#else
2400
            case 0x7:           /* CBN+x */
2401
                {
2402
                    goto ncp_insn;
2403
                }
2404
#endif
2405
            case 0x2:           /* BN+x */
2406
                {
2407
                    target = GET_FIELD(insn, 10, 31);
2408
                    target = sign_extend(target, 22);
2409
                    target <<= 2;
2410
                    do_branch(dc, target, insn, 0, cpu_cond);
2411
                    goto jmp_insn;
2412
                }
2413
            case 0x6:           /* FBN+x */
2414
                {
2415
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2416
                        goto jmp_insn;
2417
                    target = GET_FIELD(insn, 10, 31);
2418
                    target = sign_extend(target, 22);
2419
                    target <<= 2;
2420
                    do_fbranch(dc, target, insn, 0, cpu_cond);
2421
                    goto jmp_insn;
2422
                }
2423
            case 0x4:           /* SETHI */
2424
                if (rd) { // nop
2425
                    uint32_t value = GET_FIELD(insn, 10, 31);
2426
                    TCGv r_const;
2427

    
2428
                    r_const = tcg_const_tl(value << 10);
2429
                    gen_movl_TN_reg(rd, r_const);
2430
                    tcg_temp_free(r_const);
2431
                }
2432
                break;
2433
            case 0x0:           /* UNIMPL */
2434
            default:
2435
                goto illegal_insn;
2436
            }
2437
            break;
2438
        }
2439
        break;
2440
    case 1:                     /*CALL*/
2441
        {
2442
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
2443
            TCGv r_const;
2444

    
2445
            r_const = tcg_const_tl(dc->pc);
2446
            gen_movl_TN_reg(15, r_const);
2447
            tcg_temp_free(r_const);
2448
            target += dc->pc;
2449
            gen_mov_pc_npc(dc, cpu_cond);
2450
            dc->npc = target;
2451
        }
2452
        goto jmp_insn;
2453
    case 2:                     /* FPU & Logical Operations */
2454
        {
2455
            unsigned int xop = GET_FIELD(insn, 7, 12);
2456
            if (xop == 0x3a) {  /* generate trap */
2457
                int cond;
2458

    
2459
                cpu_src1 = get_src1(insn, cpu_src1);
2460
                if (IS_IMM) {
2461
                    rs2 = GET_FIELD(insn, 25, 31);
2462
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2463
                } else {
2464
                    rs2 = GET_FIELD(insn, 27, 31);
2465
                    if (rs2 != 0) {
2466
                        gen_movl_reg_TN(rs2, cpu_src2);
2467
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2468
                    } else
2469
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
2470
                }
2471

    
2472
                cond = GET_FIELD(insn, 3, 6);
2473
                if (cond == 0x8) { /* Trap Always */
2474
                    save_state(dc, cpu_cond);
2475
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2476
                        supervisor(dc))
2477
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2478
                    else
2479
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2480
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2481
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2482

    
2483
                    if (rs2 == 0 &&
2484
                        dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2485

    
2486
                        gen_helper_shutdown();
2487

    
2488
                    } else {
2489
                        gen_helper_raise_exception(cpu_env, cpu_tmp32);
2490
                    }
2491
                } else if (cond != 0) {
2492
                    TCGv r_cond = tcg_temp_new();
2493
                    int l1;
2494
#ifdef TARGET_SPARC64
2495
                    /* V9 icc/xcc */
2496
                    int cc = GET_FIELD_SP(insn, 11, 12);
2497

    
2498
                    save_state(dc, cpu_cond);
2499
                    if (cc == 0)
2500
                        gen_cond(r_cond, 0, cond, dc);
2501
                    else if (cc == 2)
2502
                        gen_cond(r_cond, 1, cond, dc);
2503
                    else
2504
                        goto illegal_insn;
2505
#else
2506
                    save_state(dc, cpu_cond);
2507
                    gen_cond(r_cond, 0, cond, dc);
2508
#endif
2509
                    l1 = gen_new_label();
2510
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2511

    
2512
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2513
                        supervisor(dc))
2514
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2515
                    else
2516
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2517
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2518
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2519
                    gen_helper_raise_exception(cpu_env, cpu_tmp32);
2520

    
2521
                    gen_set_label(l1);
2522
                    tcg_temp_free(r_cond);
2523
                }
2524
                gen_op_next_insn();
2525
                tcg_gen_exit_tb(0);
2526
                dc->is_br = 1;
2527
                goto jmp_insn;
2528
            } else if (xop == 0x28) {
2529
                rs1 = GET_FIELD(insn, 13, 17);
2530
                switch(rs1) {
2531
                case 0: /* rdy */
2532
#ifndef TARGET_SPARC64
2533
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
2534
                                       manual, rdy on the microSPARC
2535
                                       II */
2536
                case 0x0f:          /* stbar in the SPARCv8 manual,
2537
                                       rdy on the microSPARC II */
2538
                case 0x10 ... 0x1f: /* implementation-dependent in the
2539
                                       SPARCv8 manual, rdy on the
2540
                                       microSPARC II */
2541
                    /* Read Asr17 */
2542
                    if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2543
                        TCGv r_const;
2544

    
2545
                        /* Read Asr17 for a Leon3 monoprocessor */
2546
                        r_const = tcg_const_tl((1 << 8)
2547
                                               | (dc->def->nwindows - 1));
2548
                        gen_movl_TN_reg(rd, r_const);
2549
                        tcg_temp_free(r_const);
2550
                        break;
2551
                    }
2552
#endif
2553
                    gen_movl_TN_reg(rd, cpu_y);
2554
                    break;
2555
#ifdef TARGET_SPARC64
2556
                case 0x2: /* V9 rdccr */
2557
                    gen_helper_compute_psr(cpu_env);
2558
                    gen_helper_rdccr(cpu_dst, cpu_env);
2559
                    gen_movl_TN_reg(rd, cpu_dst);
2560
                    break;
2561
                case 0x3: /* V9 rdasi */
2562
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2563
                    gen_movl_TN_reg(rd, cpu_dst);
2564
                    break;
2565
                case 0x4: /* V9 rdtick */
2566
                    {
2567
                        TCGv_ptr r_tickptr;
2568

    
2569
                        r_tickptr = tcg_temp_new_ptr();
2570
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2571
                                       offsetof(CPUState, tick));
2572
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2573
                        tcg_temp_free_ptr(r_tickptr);
2574
                        gen_movl_TN_reg(rd, cpu_dst);
2575
                    }
2576
                    break;
2577
                case 0x5: /* V9 rdpc */
2578
                    {
2579
                        TCGv r_const;
2580

    
2581
                        r_const = tcg_const_tl(dc->pc);
2582
                        gen_movl_TN_reg(rd, r_const);
2583
                        tcg_temp_free(r_const);
2584
                    }
2585
                    break;
2586
                case 0x6: /* V9 rdfprs */
2587
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2588
                    gen_movl_TN_reg(rd, cpu_dst);
2589
                    break;
2590
                case 0xf: /* V9 membar */
2591
                    break; /* no effect */
2592
                case 0x13: /* Graphics Status */
2593
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2594
                        goto jmp_insn;
2595
                    gen_movl_TN_reg(rd, cpu_gsr);
2596
                    break;
2597
                case 0x16: /* Softint */
2598
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2599
                    gen_movl_TN_reg(rd, cpu_dst);
2600
                    break;
2601
                case 0x17: /* Tick compare */
2602
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
2603
                    break;
2604
                case 0x18: /* System tick */
2605
                    {
2606
                        TCGv_ptr r_tickptr;
2607

    
2608
                        r_tickptr = tcg_temp_new_ptr();
2609
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2610
                                       offsetof(CPUState, stick));
2611
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2612
                        tcg_temp_free_ptr(r_tickptr);
2613
                        gen_movl_TN_reg(rd, cpu_dst);
2614
                    }
2615
                    break;
2616
                case 0x19: /* System tick compare */
2617
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
2618
                    break;
2619
                case 0x10: /* Performance Control */
2620
                case 0x11: /* Performance Instrumentation Counter */
2621
                case 0x12: /* Dispatch Control */
2622
                case 0x14: /* Softint set, WO */
2623
                case 0x15: /* Softint clear, WO */
2624
#endif
2625
                default:
2626
                    goto illegal_insn;
2627
                }
2628
#if !defined(CONFIG_USER_ONLY)
2629
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2630
#ifndef TARGET_SPARC64
2631
                if (!supervisor(dc))
2632
                    goto priv_insn;
2633
                gen_helper_compute_psr(cpu_env);
2634
                dc->cc_op = CC_OP_FLAGS;
2635
                gen_helper_rdpsr(cpu_dst, cpu_env);
2636
#else
2637
                CHECK_IU_FEATURE(dc, HYPV);
2638
                if (!hypervisor(dc))
2639
                    goto priv_insn;
2640
                rs1 = GET_FIELD(insn, 13, 17);
2641
                switch (rs1) {
2642
                case 0: // hpstate
2643
                    // gen_op_rdhpstate();
2644
                    break;
2645
                case 1: // htstate
2646
                    // gen_op_rdhtstate();
2647
                    break;
2648
                case 3: // hintp
2649
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2650
                    break;
2651
                case 5: // htba
2652
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
2653
                    break;
2654
                case 6: // hver
2655
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
2656
                    break;
2657
                case 31: // hstick_cmpr
2658
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2659
                    break;
2660
                default:
2661
                    goto illegal_insn;
2662
                }
2663
#endif
2664
                gen_movl_TN_reg(rd, cpu_dst);
2665
                break;
2666
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2667
                if (!supervisor(dc))
2668
                    goto priv_insn;
2669
#ifdef TARGET_SPARC64
2670
                rs1 = GET_FIELD(insn, 13, 17);
2671
                switch (rs1) {
2672
                case 0: // tpc
2673
                    {
2674
                        TCGv_ptr r_tsptr;
2675

    
2676
                        r_tsptr = tcg_temp_new_ptr();
2677
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2678
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2679
                                      offsetof(trap_state, tpc));
2680
                        tcg_temp_free_ptr(r_tsptr);
2681
                    }
2682
                    break;
2683
                case 1: // tnpc
2684
                    {
2685
                        TCGv_ptr r_tsptr;
2686

    
2687
                        r_tsptr = tcg_temp_new_ptr();
2688
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2689
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2690
                                      offsetof(trap_state, tnpc));
2691
                        tcg_temp_free_ptr(r_tsptr);
2692
                    }
2693
                    break;
2694
                case 2: // tstate
2695
                    {
2696
                        TCGv_ptr r_tsptr;
2697

    
2698
                        r_tsptr = tcg_temp_new_ptr();
2699
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2700
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2701
                                      offsetof(trap_state, tstate));
2702
                        tcg_temp_free_ptr(r_tsptr);
2703
                    }
2704
                    break;
2705
                case 3: // tt
2706
                    {
2707
                        TCGv_ptr r_tsptr;
2708

    
2709
                        r_tsptr = tcg_temp_new_ptr();
2710
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2711
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2712
                                       offsetof(trap_state, tt));
2713
                        tcg_temp_free_ptr(r_tsptr);
2714
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2715
                    }
2716
                    break;
2717
                case 4: // tick
2718
                    {
2719
                        TCGv_ptr r_tickptr;
2720

    
2721
                        r_tickptr = tcg_temp_new_ptr();
2722
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2723
                                       offsetof(CPUState, tick));
2724
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2725
                        gen_movl_TN_reg(rd, cpu_tmp0);
2726
                        tcg_temp_free_ptr(r_tickptr);
2727
                    }
2728
                    break;
2729
                case 5: // tba
2730
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2731
                    break;
2732
                case 6: // pstate
2733
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2734
                                   offsetof(CPUSPARCState, pstate));
2735
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2736
                    break;
2737
                case 7: // tl
2738
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2739
                                   offsetof(CPUSPARCState, tl));
2740
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2741
                    break;
2742
                case 8: // pil
2743
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2744
                                   offsetof(CPUSPARCState, psrpil));
2745
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2746
                    break;
2747
                case 9: // cwp
2748
                    gen_helper_rdcwp(cpu_tmp0, cpu_env);
2749
                    break;
2750
                case 10: // cansave
2751
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2752
                                   offsetof(CPUSPARCState, cansave));
2753
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2754
                    break;
2755
                case 11: // canrestore
2756
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2757
                                   offsetof(CPUSPARCState, canrestore));
2758
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2759
                    break;
2760
                case 12: // cleanwin
2761
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2762
                                   offsetof(CPUSPARCState, cleanwin));
2763
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2764
                    break;
2765
                case 13: // otherwin
2766
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2767
                                   offsetof(CPUSPARCState, otherwin));
2768
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2769
                    break;
2770
                case 14: // wstate
2771
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2772
                                   offsetof(CPUSPARCState, wstate));
2773
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2774
                    break;
2775
                case 16: // UA2005 gl
2776
                    CHECK_IU_FEATURE(dc, GL);
2777
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2778
                                   offsetof(CPUSPARCState, gl));
2779
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2780
                    break;
2781
                case 26: // UA2005 strand status
2782
                    CHECK_IU_FEATURE(dc, HYPV);
2783
                    if (!hypervisor(dc))
2784
                        goto priv_insn;
2785
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2786
                    break;
2787
                case 31: // ver
2788
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2789
                    break;
2790
                case 15: // fq
2791
                default:
2792
                    goto illegal_insn;
2793
                }
2794
#else
2795
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2796
#endif
2797
                gen_movl_TN_reg(rd, cpu_tmp0);
2798
                break;
2799
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2800
#ifdef TARGET_SPARC64
2801
                save_state(dc, cpu_cond);
2802
                gen_helper_flushw(cpu_env);
2803
#else
2804
                if (!supervisor(dc))
2805
                    goto priv_insn;
2806
                gen_movl_TN_reg(rd, cpu_tbr);
2807
#endif
2808
                break;
2809
#endif
2810
            } else if (xop == 0x34) {   /* FPU Operations */
2811
                if (gen_trap_ifnofpu(dc, cpu_cond))
2812
                    goto jmp_insn;
2813
                gen_op_clear_ieee_excp_and_FTT();
2814
                rs1 = GET_FIELD(insn, 13, 17);
2815
                rs2 = GET_FIELD(insn, 27, 31);
2816
                xop = GET_FIELD(insn, 18, 26);
2817
                save_state(dc, cpu_cond);
2818
                switch (xop) {
2819
                case 0x1: /* fmovs */
2820
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2821
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
2822
                    break;
2823
                case 0x5: /* fnegs */
2824
                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2825
                    break;
2826
                case 0x9: /* fabss */
2827
                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2828
                    break;
2829
                case 0x29: /* fsqrts */
2830
                    CHECK_FPU_FEATURE(dc, FSQRT);
2831
                    gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2832
                    break;
2833
                case 0x2a: /* fsqrtd */
2834
                    CHECK_FPU_FEATURE(dc, FSQRT);
2835
                    gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2836
                    break;
2837
                case 0x2b: /* fsqrtq */
2838
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2839
                    gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2840
                    break;
2841
                case 0x41: /* fadds */
2842
                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2843
                    break;
2844
                case 0x42: /* faddd */
2845
                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2846
                    break;
2847
                case 0x43: /* faddq */
2848
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2849
                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2850
                    break;
2851
                case 0x45: /* fsubs */
2852
                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2853
                    break;
2854
                case 0x46: /* fsubd */
2855
                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2856
                    break;
2857
                case 0x47: /* fsubq */
2858
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2859
                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2860
                    break;
2861
                case 0x49: /* fmuls */
2862
                    CHECK_FPU_FEATURE(dc, FMUL);
2863
                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2864
                    break;
2865
                case 0x4a: /* fmuld */
2866
                    CHECK_FPU_FEATURE(dc, FMUL);
2867
                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2868
                    break;
2869
                case 0x4b: /* fmulq */
2870
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2871
                    CHECK_FPU_FEATURE(dc, FMUL);
2872
                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2873
                    break;
2874
                case 0x4d: /* fdivs */
2875
                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2876
                    break;
2877
                case 0x4e: /* fdivd */
2878
                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2879
                    break;
2880
                case 0x4f: /* fdivq */
2881
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2882
                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2883
                    break;
2884
                case 0x69: /* fsmuld */
2885
                    CHECK_FPU_FEATURE(dc, FSMULD);
2886
                    gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2887
                    break;
2888
                case 0x6e: /* fdmulq */
2889
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2890
                    gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2891
                    break;
2892
                case 0xc4: /* fitos */
2893
                    gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2894
                    break;
2895
                case 0xc6: /* fdtos */
2896
                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2897
                    break;
2898
                case 0xc7: /* fqtos */
2899
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2900
                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2901
                    break;
2902
                case 0xc8: /* fitod */
2903
                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2904
                    break;
2905
                case 0xc9: /* fstod */
2906
                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2907
                    break;
2908
                case 0xcb: /* fqtod */
2909
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2910
                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2911
                    break;
2912
                case 0xcc: /* fitoq */
2913
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2914
                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2915
                    break;
2916
                case 0xcd: /* fstoq */
2917
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2918
                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2919
                    break;
2920
                case 0xce: /* fdtoq */
2921
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2922
                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2923
                    break;
2924
                case 0xd1: /* fstoi */
2925
                    gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2926
                    break;
2927
                case 0xd2: /* fdtoi */
2928
                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2929
                    break;
2930
                case 0xd3: /* fqtoi */
2931
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2932
                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2933
                    break;
2934
#ifdef TARGET_SPARC64
2935
                case 0x2: /* V9 fmovd */
2936
                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2937
                    gen_store_fpr_D(dc, rd, cpu_src1_64);
2938
                    break;
2939
                case 0x3: /* V9 fmovq */
2940
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2941
                    gen_move_Q(rd, rs2);
2942
                    break;
2943
                case 0x6: /* V9 fnegd */
2944
                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
2945
                    break;
2946
                case 0x7: /* V9 fnegq */
2947
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2948
                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
2949
                    break;
2950
                case 0xa: /* V9 fabsd */
2951
                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
2952
                    break;
2953
                case 0xb: /* V9 fabsq */
2954
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2955
                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
2956
                    break;
2957
                case 0x81: /* V9 fstox */
2958
                    gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
2959
                    break;
2960
                case 0x82: /* V9 fdtox */
2961
                    gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
2962
                    break;
2963
                case 0x83: /* V9 fqtox */
2964
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2965
                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
2966
                    break;
2967
                case 0x84: /* V9 fxtos */
2968
                    gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
2969
                    break;
2970
                case 0x88: /* V9 fxtod */
2971
                    gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
2972
                    break;
2973
                case 0x8c: /* V9 fxtoq */
2974
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2975
                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
2976
                    break;
2977
#endif
2978
                default:
2979
                    goto illegal_insn;
2980
                }
2981
            } else if (xop == 0x35) {   /* FPU Operations */
2982
#ifdef TARGET_SPARC64
2983
                int cond;
2984
#endif
2985
                if (gen_trap_ifnofpu(dc, cpu_cond))
2986
                    goto jmp_insn;
2987
                gen_op_clear_ieee_excp_and_FTT();
2988
                rs1 = GET_FIELD(insn, 13, 17);
2989
                rs2 = GET_FIELD(insn, 27, 31);
2990
                xop = GET_FIELD(insn, 18, 26);
2991
                save_state(dc, cpu_cond);
2992
#ifdef TARGET_SPARC64
2993
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2994
                    int l1;
2995

    
2996
                    l1 = gen_new_label();
2997
                    cond = GET_FIELD_SP(insn, 14, 17);
2998
                    cpu_src1 = get_src1(insn, cpu_src1);
2999
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3000
                                       0, l1);
3001
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3002
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
3003
                    gen_set_label(l1);
3004
                    break;
3005
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3006
                    int l1;
3007

    
3008
                    l1 = gen_new_label();
3009
                    cond = GET_FIELD_SP(insn, 14, 17);
3010
                    cpu_src1 = get_src1(insn, cpu_src1);
3011
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3012
                                       0, l1);
3013
                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3014
                    gen_store_fpr_D(dc, rd, cpu_src1_64);
3015
                    gen_set_label(l1);
3016
                    break;
3017
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3018
                    int l1;
3019

    
3020
                    CHECK_FPU_FEATURE(dc, FLOAT128);
3021
                    l1 = gen_new_label();
3022
                    cond = GET_FIELD_SP(insn, 14, 17);
3023
                    cpu_src1 = get_src1(insn, cpu_src1);
3024
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3025
                                       0, l1);
3026
                    gen_move_Q(rd, rs2);
3027
                    gen_set_label(l1);
3028
                    break;
3029
                }
3030
#endif
3031
                switch (xop) {
3032
#ifdef TARGET_SPARC64
3033
#define FMOVSCC(fcc)                                                    \
3034
                    {                                                   \
3035
                        TCGv r_cond;                                    \
3036
                        int l1;                                         \
3037
                                                                        \
3038
                        l1 = gen_new_label();                           \
3039
                        r_cond = tcg_temp_new();                        \
3040
                        cond = GET_FIELD_SP(insn, 14, 17);              \
3041
                        gen_fcond(r_cond, fcc, cond);                   \
3042
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3043
                                           0, l1);                      \
3044
                        cpu_src1_32 = gen_load_fpr_F(dc, rs2);          \
3045
                        gen_store_fpr_F(dc, rd, cpu_src1_32);           \
3046
                        gen_set_label(l1);                              \
3047
                        tcg_temp_free(r_cond);                          \
3048
                    }
3049
#define FMOVDCC(fcc)                                                    \
3050
                    {                                                   \
3051
                        TCGv r_cond;                                    \
3052
                        int l1;                                         \
3053
                                                                        \
3054
                        l1 = gen_new_label();                           \
3055
                        r_cond = tcg_temp_new();                        \
3056
                        cond = GET_FIELD_SP(insn, 14, 17);              \
3057
                        gen_fcond(r_cond, fcc, cond);                   \
3058
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3059
                                           0, l1);                      \
3060
                        cpu_src1_64 = gen_load_fpr_D(dc, rs2);          \
3061
                        gen_store_fpr_D(dc, rd, cpu_src1_64);           \
3062
                        gen_set_label(l1);                              \
3063
                        tcg_temp_free(r_cond);                          \
3064
                    }
3065
#define FMOVQCC(fcc)                                                    \
3066
                    {                                                   \
3067
                        TCGv r_cond;                                    \
3068
                        int l1;                                         \
3069
                                                                        \
3070
                        l1 = gen_new_label();                           \
3071
                        r_cond = tcg_temp_new();                        \
3072
                        cond = GET_FIELD_SP(insn, 14, 17);              \
3073
                        gen_fcond(r_cond, fcc, cond);                   \
3074
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3075
                                           0, l1);                      \
3076
                        gen_move_Q(rd, rs2);                            \
3077
                        gen_set_label(l1);                              \
3078
                        tcg_temp_free(r_cond);                          \
3079
                    }
3080
                    case 0x001: /* V9 fmovscc %fcc0 */
3081
                        FMOVSCC(0);
3082
                        break;
3083
                    case 0x002: /* V9 fmovdcc %fcc0 */
3084
                        FMOVDCC(0);
3085
                        break;
3086
                    case 0x003: /* V9 fmovqcc %fcc0 */
3087
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3088
                        FMOVQCC(0);
3089
                        break;
3090
                    case 0x041: /* V9 fmovscc %fcc1 */
3091
                        FMOVSCC(1);
3092
                        break;
3093
                    case 0x042: /* V9 fmovdcc %fcc1 */
3094
                        FMOVDCC(1);
3095
                        break;
3096
                    case 0x043: /* V9 fmovqcc %fcc1 */
3097
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3098
                        FMOVQCC(1);
3099
                        break;
3100
                    case 0x081: /* V9 fmovscc %fcc2 */
3101
                        FMOVSCC(2);
3102
                        break;
3103
                    case 0x082: /* V9 fmovdcc %fcc2 */
3104
                        FMOVDCC(2);
3105
                        break;
3106
                    case 0x083: /* V9 fmovqcc %fcc2 */
3107
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3108
                        FMOVQCC(2);
3109
                        break;
3110
                    case 0x0c1: /* V9 fmovscc %fcc3 */
3111
                        FMOVSCC(3);
3112
                        break;
3113
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
3114
                        FMOVDCC(3);
3115
                        break;
3116
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
3117
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3118
                        FMOVQCC(3);
3119
                        break;
3120
#undef FMOVSCC
3121
#undef FMOVDCC
3122
#undef FMOVQCC
3123
#define FMOVSCC(icc)                                                    \
3124
                    {                                                   \
3125
                        TCGv r_cond;                                    \
3126
                        int l1;                                         \
3127
                                                                        \
3128
                        l1 = gen_new_label();                           \
3129
                        r_cond = tcg_temp_new();                        \
3130
                        cond = GET_FIELD_SP(insn, 14, 17);              \
3131
                        gen_cond(r_cond, icc, cond, dc);                \
3132
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3133
                                           0, l1);                      \
3134
                        cpu_src1_32 = gen_load_fpr_F(dc, rs2);          \
3135
                        gen_store_fpr_F(dc, rd, cpu_src1_32);           \
3136
                        gen_set_label(l1);                              \
3137
                        tcg_temp_free(r_cond);                          \
3138
                    }
3139
#define FMOVDCC(icc)                                                    \
3140
                    {                                                   \
3141
                        TCGv r_cond;                                    \
3142
                        int l1;                                         \
3143
                                                                        \
3144
                        l1 = gen_new_label();                           \
3145
                        r_cond = tcg_temp_new();                        \
3146
                        cond = GET_FIELD_SP(insn, 14, 17);              \
3147
                        gen_cond(r_cond, icc, cond, dc);                \
3148
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3149
                                           0, l1);                      \
3150
                        cpu_src1_64 = gen_load_fpr_D(dc, rs2);          \
3151
                        gen_store_fpr_D(dc, rd, cpu_src1_64);           \
3152
                        gen_update_fprs_dirty(DFPREG(rd));              \
3153
                        gen_set_label(l1);                              \
3154
                        tcg_temp_free(r_cond);                          \
3155
                    }
3156
#define FMOVQCC(icc)                                                    \
3157
                    {                                                   \
3158
                        TCGv r_cond;                                    \
3159
                        int l1;                                         \
3160
                                                                        \
3161
                        l1 = gen_new_label();                           \
3162
                        r_cond = tcg_temp_new();                        \
3163
                        cond = GET_FIELD_SP(insn, 14, 17);              \
3164
                        gen_cond(r_cond, icc, cond, dc);                \
3165
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
3166
                                           0, l1);                      \
3167
                        gen_move_Q(rd, rs2);                            \
3168
                        gen_set_label(l1);                              \
3169
                        tcg_temp_free(r_cond);                          \
3170
                    }
3171

    
3172
                    case 0x101: /* V9 fmovscc %icc */
3173
                        FMOVSCC(0);
3174
                        break;
3175
                    case 0x102: /* V9 fmovdcc %icc */
3176
                        FMOVDCC(0);
3177
                        break;
3178
                    case 0x103: /* V9 fmovqcc %icc */
3179
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3180
                        FMOVQCC(0);
3181
                        break;
3182
                    case 0x181: /* V9 fmovscc %xcc */
3183
                        FMOVSCC(1);
3184
                        break;
3185
                    case 0x182: /* V9 fmovdcc %xcc */
3186
                        FMOVDCC(1);
3187
                        break;
3188
                    case 0x183: /* V9 fmovqcc %xcc */
3189
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3190
                        FMOVQCC(1);
3191
                        break;
3192
#undef FMOVSCC
3193
#undef FMOVDCC
3194
#undef FMOVQCC
3195
#endif
3196
                    case 0x51: /* fcmps, V9 %fcc */
3197
                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3198
                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3199
                        gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3200
                        break;
3201
                    case 0x52: /* fcmpd, V9 %fcc */
3202
                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3203
                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3204
                        gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3205
                        break;
3206
                    case 0x53: /* fcmpq, V9 %fcc */
3207
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3208
                        gen_op_load_fpr_QT0(QFPREG(rs1));
3209
                        gen_op_load_fpr_QT1(QFPREG(rs2));
3210
                        gen_op_fcmpq(rd & 3);
3211
                        break;
3212
                    case 0x55: /* fcmpes, V9 %fcc */
3213
                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3214
                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3215
                        gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3216
                        break;
3217
                    case 0x56: /* fcmped, V9 %fcc */
3218
                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3219
                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3220
                        gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3221
                        break;
3222
                    case 0x57: /* fcmpeq, V9 %fcc */
3223
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3224
                        gen_op_load_fpr_QT0(QFPREG(rs1));
3225
                        gen_op_load_fpr_QT1(QFPREG(rs2));
3226
                        gen_op_fcmpeq(rd & 3);
3227
                        break;
3228
                    default:
3229
                        goto illegal_insn;
3230
                }
3231
            } else if (xop == 0x2) {
3232
                // clr/mov shortcut
3233

    
3234
                rs1 = GET_FIELD(insn, 13, 17);
3235
                if (rs1 == 0) {
3236
                    // or %g0, x, y -> mov T0, x; mov y, T0
3237
                    if (IS_IMM) {       /* immediate */
3238
                        TCGv r_const;
3239

    
3240
                        simm = GET_FIELDs(insn, 19, 31);
3241
                        r_const = tcg_const_tl(simm);
3242
                        gen_movl_TN_reg(rd, r_const);
3243
                        tcg_temp_free(r_const);
3244
                    } else {            /* register */
3245
                        rs2 = GET_FIELD(insn, 27, 31);
3246
                        gen_movl_reg_TN(rs2, cpu_dst);
3247
                        gen_movl_TN_reg(rd, cpu_dst);
3248
                    }
3249
                } else {
3250
                    cpu_src1 = get_src1(insn, cpu_src1);
3251
                    if (IS_IMM) {       /* immediate */
3252
                        simm = GET_FIELDs(insn, 19, 31);
3253
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3254
                        gen_movl_TN_reg(rd, cpu_dst);
3255
                    } else {            /* register */
3256
                        // or x, %g0, y -> mov T1, x; mov y, T1
3257
                        rs2 = GET_FIELD(insn, 27, 31);
3258
                        if (rs2 != 0) {
3259
                            gen_movl_reg_TN(rs2, cpu_src2);
3260
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3261
                            gen_movl_TN_reg(rd, cpu_dst);
3262
                        } else
3263
                            gen_movl_TN_reg(rd, cpu_src1);
3264
                    }
3265
                }
3266
#ifdef TARGET_SPARC64
3267
            } else if (xop == 0x25) { /* sll, V9 sllx */
3268
                cpu_src1 = get_src1(insn, cpu_src1);
3269
                if (IS_IMM) {   /* immediate */
3270
                    simm = GET_FIELDs(insn, 20, 31);
3271
                    if (insn & (1 << 12)) {
3272
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3273
                    } else {
3274
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3275
                    }
3276
                } else {                /* register */
3277
                    rs2 = GET_FIELD(insn, 27, 31);
3278
                    gen_movl_reg_TN(rs2, cpu_src2);
3279
                    if (insn & (1 << 12)) {
3280
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3281
                    } else {
3282
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3283
                    }
3284
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3285
                }
3286
                gen_movl_TN_reg(rd, cpu_dst);
3287
            } else if (xop == 0x26) { /* srl, V9 srlx */
3288
                cpu_src1 = get_src1(insn, cpu_src1);
3289
                if (IS_IMM) {   /* immediate */
3290
                    simm = GET_FIELDs(insn, 20, 31);
3291
                    if (insn & (1 << 12)) {
3292
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3293
                    } else {
3294
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3295
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3296
                    }
3297
                } else {                /* register */
3298
                    rs2 = GET_FIELD(insn, 27, 31);
3299
                    gen_movl_reg_TN(rs2, cpu_src2);
3300
                    if (insn & (1 << 12)) {
3301
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3302
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3303
                    } else {
3304
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3305
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3306
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3307
                    }
3308
                }
3309
                gen_movl_TN_reg(rd, cpu_dst);
3310
            } else if (xop == 0x27) { /* sra, V9 srax */
3311
                cpu_src1 = get_src1(insn, cpu_src1);
3312
                if (IS_IMM) {   /* immediate */
3313
                    simm = GET_FIELDs(insn, 20, 31);
3314
                    if (insn & (1 << 12)) {
3315
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3316
                    } else {
3317
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3318
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3319
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3320
                    }
3321
                } else {                /* register */
3322
                    rs2 = GET_FIELD(insn, 27, 31);
3323
                    gen_movl_reg_TN(rs2, cpu_src2);
3324
                    if (insn & (1 << 12)) {
3325
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3326
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3327
                    } else {
3328
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3329
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3330
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3331
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3332
                    }
3333
                }
3334
                gen_movl_TN_reg(rd, cpu_dst);
3335
#endif
3336
            } else if (xop < 0x36) {
3337
                if (xop < 0x20) {
3338
                    cpu_src1 = get_src1(insn, cpu_src1);
3339
                    cpu_src2 = get_src2(insn, cpu_src2);
3340
                    switch (xop & ~0x10) {
3341
                    case 0x0: /* add */
3342
                        if (IS_IMM) {
3343
                            simm = GET_FIELDs(insn, 19, 31);
3344
                            if (xop & 0x10) {
3345
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3346
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3347
                                dc->cc_op = CC_OP_ADD;
3348
                            } else {
3349
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3350
                            }
3351
                        } else {
3352
                            if (xop & 0x10) {
3353
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3354
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3355
                                dc->cc_op = CC_OP_ADD;
3356
                            } else {
3357
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3358
                            }
3359
                        }
3360
                        break;
3361
                    case 0x1: /* and */
3362
                        if (IS_IMM) {
3363
                            simm = GET_FIELDs(insn, 19, 31);
3364
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3365
                        } else {
3366
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3367
                        }
3368
                        if (xop & 0x10) {
3369
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3370
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3371
                            dc->cc_op = CC_OP_LOGIC;
3372
                        }
3373
                        break;
3374
                    case 0x2: /* or */
3375
                        if (IS_IMM) {
3376
                            simm = GET_FIELDs(insn, 19, 31);
3377
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3378
                        } else {
3379
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3380
                        }
3381
                        if (xop & 0x10) {
3382
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3383
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3384
                            dc->cc_op = CC_OP_LOGIC;
3385
                        }
3386
                        break;
3387
                    case 0x3: /* xor */
3388
                        if (IS_IMM) {
3389
                            simm = GET_FIELDs(insn, 19, 31);
3390
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3391
                        } else {
3392
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3393
                        }
3394
                        if (xop & 0x10) {
3395
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3396
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3397
                            dc->cc_op = CC_OP_LOGIC;
3398
                        }
3399
                        break;
3400
                    case 0x4: /* sub */
3401
                        if (IS_IMM) {
3402
                            simm = GET_FIELDs(insn, 19, 31);
3403
                            if (xop & 0x10) {
3404
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3405
                            } else {
3406
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3407
                            }
3408
                        } else {
3409
                            if (xop & 0x10) {
3410
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3411
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3412
                                dc->cc_op = CC_OP_SUB;
3413
                            } else {
3414
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3415
                            }
3416
                        }
3417
                        break;
3418
                    case 0x5: /* andn */
3419
                        if (IS_IMM) {
3420
                            simm = GET_FIELDs(insn, 19, 31);
3421
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3422
                        } else {
3423
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3424
                        }
3425
                        if (xop & 0x10) {
3426
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3427
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3428
                            dc->cc_op = CC_OP_LOGIC;
3429
                        }
3430
                        break;
3431
                    case 0x6: /* orn */
3432
                        if (IS_IMM) {
3433
                            simm = GET_FIELDs(insn, 19, 31);
3434
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3435
                        } else {
3436
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3437
                        }
3438
                        if (xop & 0x10) {
3439
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3440
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3441
                            dc->cc_op = CC_OP_LOGIC;
3442
                        }
3443
                        break;
3444
                    case 0x7: /* xorn */
3445
                        if (IS_IMM) {
3446
                            simm = GET_FIELDs(insn, 19, 31);
3447
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3448
                        } else {
3449
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3450
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3451
                        }
3452
                        if (xop & 0x10) {
3453
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3454
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3455
                            dc->cc_op = CC_OP_LOGIC;
3456
                        }
3457
                        break;
3458
                    case 0x8: /* addx, V9 addc */
3459
                        gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3460
                                        (xop & 0x10));
3461
                        break;
3462
#ifdef TARGET_SPARC64
3463
                    case 0x9: /* V9 mulx */
3464
                        if (IS_IMM) {
3465
                            simm = GET_FIELDs(insn, 19, 31);
3466
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3467
                        } else {
3468
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3469
                        }
3470
                        break;
3471
#endif
3472
                    case 0xa: /* umul */
3473
                        CHECK_IU_FEATURE(dc, MUL);
3474
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3475
                        if (xop & 0x10) {
3476
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3477
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3478
                            dc->cc_op = CC_OP_LOGIC;
3479
                        }
3480
                        break;
3481
                    case 0xb: /* smul */
3482
                        CHECK_IU_FEATURE(dc, MUL);
3483
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3484
                        if (xop & 0x10) {
3485
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3486
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3487
                            dc->cc_op = CC_OP_LOGIC;
3488
                        }
3489
                        break;
3490
                    case 0xc: /* subx, V9 subc */
3491
                        gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3492
                                        (xop & 0x10));
3493
                        break;
3494
#ifdef TARGET_SPARC64
3495
                    case 0xd: /* V9 udivx */
3496
                        {
3497
                            TCGv r_temp1, r_temp2;
3498
                            r_temp1 = tcg_temp_local_new();
3499
                            r_temp2 = tcg_temp_local_new();
3500
                            tcg_gen_mov_tl(r_temp1, cpu_src1);
3501
                            tcg_gen_mov_tl(r_temp2, cpu_src2);
3502
                            gen_trap_ifdivzero_tl(r_temp2);
3503
                            tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3504
                            tcg_temp_free(r_temp1);
3505
                            tcg_temp_free(r_temp2);
3506
                        }
3507
                        break;
3508
#endif
3509
                    case 0xe: /* udiv */
3510
                        CHECK_IU_FEATURE(dc, DIV);
3511
                        if (xop & 0x10) {
3512
                            gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3513
                                               cpu_src2);
3514
                            dc->cc_op = CC_OP_DIV;
3515
                        } else {
3516
                            gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3517
                                            cpu_src2);
3518
                        }
3519
                        break;
3520
                    case 0xf: /* sdiv */
3521
                        CHECK_IU_FEATURE(dc, DIV);
3522
                        if (xop & 0x10) {
3523
                            gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3524
                                               cpu_src2);
3525
                            dc->cc_op = CC_OP_DIV;
3526
                        } else {
3527
                            gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3528
                                            cpu_src2);
3529
                        }
3530
                        break;
3531
                    default:
3532
                        goto illegal_insn;
3533
                    }
3534
                    gen_movl_TN_reg(rd, cpu_dst);
3535
                } else {
3536
                    cpu_src1 = get_src1(insn, cpu_src1);
3537
                    cpu_src2 = get_src2(insn, cpu_src2);
3538
                    switch (xop) {
3539
                    case 0x20: /* taddcc */
3540
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3541
                        gen_movl_TN_reg(rd, cpu_dst);
3542
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3543
                        dc->cc_op = CC_OP_TADD;
3544
                        break;
3545
                    case 0x21: /* tsubcc */
3546
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3547
                        gen_movl_TN_reg(rd, cpu_dst);
3548
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3549
                        dc->cc_op = CC_OP_TSUB;
3550
                        break;
3551
                    case 0x22: /* taddcctv */
3552
                        save_state(dc, cpu_cond);
3553
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3554
                        gen_movl_TN_reg(rd, cpu_dst);
3555
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3556
                        dc->cc_op = CC_OP_TADDTV;
3557
                        break;
3558
                    case 0x23: /* tsubcctv */
3559
                        save_state(dc, cpu_cond);
3560
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3561
                        gen_movl_TN_reg(rd, cpu_dst);
3562
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3563
                        dc->cc_op = CC_OP_TSUBTV;
3564
                        break;
3565
                    case 0x24: /* mulscc */
3566
                        gen_helper_compute_psr(cpu_env);
3567
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3568
                        gen_movl_TN_reg(rd, cpu_dst);
3569
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3570
                        dc->cc_op = CC_OP_ADD;
3571
                        break;
3572
#ifndef TARGET_SPARC64
3573
                    case 0x25:  /* sll */
3574
                        if (IS_IMM) { /* immediate */
3575
                            simm = GET_FIELDs(insn, 20, 31);
3576
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3577
                        } else { /* register */
3578
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3579
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3580
                        }
3581
                        gen_movl_TN_reg(rd, cpu_dst);
3582
                        break;
3583
                    case 0x26:  /* srl */
3584
                        if (IS_IMM) { /* immediate */
3585
                            simm = GET_FIELDs(insn, 20, 31);
3586
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3587
                        } else { /* register */
3588
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3589
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3590
                        }
3591
                        gen_movl_TN_reg(rd, cpu_dst);
3592
                        break;
3593
                    case 0x27:  /* sra */
3594
                        if (IS_IMM) { /* immediate */
3595
                            simm = GET_FIELDs(insn, 20, 31);
3596
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3597
                        } else { /* register */
3598
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3599
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3600
                        }
3601
                        gen_movl_TN_reg(rd, cpu_dst);
3602
                        break;
3603
#endif
3604
                    case 0x30:
3605
                        {
3606
                            switch(rd) {
3607
                            case 0: /* wry */
3608
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3609
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3610
                                break;
3611
#ifndef TARGET_SPARC64
3612
                            case 0x01 ... 0x0f: /* undefined in the
3613
                                                   SPARCv8 manual, nop
3614
                                                   on the microSPARC
3615
                                                   II */
3616
                            case 0x10 ... 0x1f: /* implementation-dependent
3617
                                                   in the SPARCv8
3618
                                                   manual, nop on the
3619
                                                   microSPARC II */
3620
                                break;
3621
#else
3622
                            case 0x2: /* V9 wrccr */
3623
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3624
                                gen_helper_wrccr(cpu_env, cpu_dst);
3625
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3626
                                dc->cc_op = CC_OP_FLAGS;
3627
                                break;
3628
                            case 0x3: /* V9 wrasi */
3629
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3630
                                tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3631
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3632
                                break;
3633
                            case 0x6: /* V9 wrfprs */
3634
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3635
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3636
                                save_state(dc, cpu_cond);
3637
                                gen_op_next_insn();
3638
                                tcg_gen_exit_tb(0);
3639
                                dc->is_br = 1;
3640
                                break;
3641
                            case 0xf: /* V9 sir, nop if user */
3642
#if !defined(CONFIG_USER_ONLY)
3643
                                if (supervisor(dc)) {
3644
                                    ; // XXX
3645
                                }
3646
#endif
3647
                                break;
3648
                            case 0x13: /* Graphics Status */
3649
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3650
                                    goto jmp_insn;
3651
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3652
                                break;
3653
                            case 0x14: /* Softint set */
3654
                                if (!supervisor(dc))
3655
                                    goto illegal_insn;
3656
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3657
                                gen_helper_set_softint(cpu_env, cpu_tmp64);
3658
                                break;
3659
                            case 0x15: /* Softint clear */
3660
                                if (!supervisor(dc))
3661
                                    goto illegal_insn;
3662
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3663
                                gen_helper_clear_softint(cpu_env, cpu_tmp64);
3664
                                break;
3665
                            case 0x16: /* Softint write */
3666
                                if (!supervisor(dc))
3667
                                    goto illegal_insn;
3668
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3669
                                gen_helper_write_softint(cpu_env, cpu_tmp64);
3670
                                break;
3671
                            case 0x17: /* Tick compare */
3672
#if !defined(CONFIG_USER_ONLY)
3673
                                if (!supervisor(dc))
3674
                                    goto illegal_insn;
3675
#endif
3676
                                {
3677
                                    TCGv_ptr r_tickptr;
3678

    
3679
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3680
                                                   cpu_src2);
3681
                                    r_tickptr = tcg_temp_new_ptr();
3682
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3683
                                                   offsetof(CPUState, tick));
3684
                                    gen_helper_tick_set_limit(r_tickptr,
3685
                                                              cpu_tick_cmpr);
3686
                                    tcg_temp_free_ptr(r_tickptr);
3687
                                }
3688
                                break;
3689
                            case 0x18: /* System tick */
3690
#if !defined(CONFIG_USER_ONLY)
3691
                                if (!supervisor(dc))
3692
                                    goto illegal_insn;
3693
#endif
3694
                                {
3695
                                    TCGv_ptr r_tickptr;
3696

    
3697
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3698
                                                   cpu_src2);
3699
                                    r_tickptr = tcg_temp_new_ptr();
3700
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3701
                                                   offsetof(CPUState, stick));
3702
                                    gen_helper_tick_set_count(r_tickptr,
3703
                                                              cpu_dst);
3704
                                    tcg_temp_free_ptr(r_tickptr);
3705
                                }
3706
                                break;
3707
                            case 0x19: /* System tick compare */
3708
#if !defined(CONFIG_USER_ONLY)
3709
                                if (!supervisor(dc))
3710
                                    goto illegal_insn;
3711
#endif
3712
                                {
3713
                                    TCGv_ptr r_tickptr;
3714

    
3715
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3716
                                                   cpu_src2);
3717
                                    r_tickptr = tcg_temp_new_ptr();
3718
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3719
                                                   offsetof(CPUState, stick));
3720
                                    gen_helper_tick_set_limit(r_tickptr,
3721
                                                              cpu_stick_cmpr);
3722
                                    tcg_temp_free_ptr(r_tickptr);
3723
                                }
3724
                                break;
3725

    
3726
                            case 0x10: /* Performance Control */
3727
                            case 0x11: /* Performance Instrumentation
3728
                                          Counter */
3729
                            case 0x12: /* Dispatch Control */
3730
#endif
3731
                            default:
3732
                                goto illegal_insn;
3733
                            }
3734
                        }
3735
                        break;
3736
#if !defined(CONFIG_USER_ONLY)
3737
                    case 0x31: /* wrpsr, V9 saved, restored */
3738
                        {
3739
                            if (!supervisor(dc))
3740
                                goto priv_insn;
3741
#ifdef TARGET_SPARC64
3742
                            switch (rd) {
3743
                            case 0:
3744
                                gen_helper_saved(cpu_env);
3745
                                break;
3746
                            case 1:
3747
                                gen_helper_restored(cpu_env);
3748
                                break;
3749
                            case 2: /* UA2005 allclean */
3750
                            case 3: /* UA2005 otherw */
3751
                            case 4: /* UA2005 normalw */
3752
                            case 5: /* UA2005 invalw */
3753
                                // XXX
3754
                            default:
3755
                                goto illegal_insn;
3756
                            }
3757
#else
3758
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3759
                            gen_helper_wrpsr(cpu_env, cpu_dst);
3760
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3761
                            dc->cc_op = CC_OP_FLAGS;
3762
                            save_state(dc, cpu_cond);
3763
                            gen_op_next_insn();
3764
                            tcg_gen_exit_tb(0);
3765
                            dc->is_br = 1;
3766
#endif
3767
                        }
3768
                        break;
3769
                    case 0x32: /* wrwim, V9 wrpr */
3770
                        {
3771
                            if (!supervisor(dc))
3772
                                goto priv_insn;
3773
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3774
#ifdef TARGET_SPARC64
3775
                            switch (rd) {
3776
                            case 0: // tpc
3777
                                {
3778
                                    TCGv_ptr r_tsptr;
3779

    
3780
                                    r_tsptr = tcg_temp_new_ptr();
3781
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3782
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3783
                                                  offsetof(trap_state, tpc));
3784
                                    tcg_temp_free_ptr(r_tsptr);
3785
                                }
3786
                                break;
3787
                            case 1: // tnpc
3788
                                {
3789
                                    TCGv_ptr r_tsptr;
3790

    
3791
                                    r_tsptr = tcg_temp_new_ptr();
3792
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3793
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3794
                                                  offsetof(trap_state, tnpc));
3795
                                    tcg_temp_free_ptr(r_tsptr);
3796
                                }
3797
                                break;
3798
                            case 2: // tstate
3799
                                {
3800
                                    TCGv_ptr r_tsptr;
3801

    
3802
                                    r_tsptr = tcg_temp_new_ptr();
3803
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3804
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3805
                                                  offsetof(trap_state,
3806
                                                           tstate));
3807
                                    tcg_temp_free_ptr(r_tsptr);
3808
                                }
3809
                                break;
3810
                            case 3: // tt
3811
                                {
3812
                                    TCGv_ptr r_tsptr;
3813

    
3814
                                    r_tsptr = tcg_temp_new_ptr();
3815
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3816
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3817
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3818
                                                   offsetof(trap_state, tt));
3819
                                    tcg_temp_free_ptr(r_tsptr);
3820
                                }
3821
                                break;
3822
                            case 4: // tick
3823
                                {
3824
                                    TCGv_ptr r_tickptr;
3825

    
3826
                                    r_tickptr = tcg_temp_new_ptr();
3827
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3828
                                                   offsetof(CPUState, tick));
3829
                                    gen_helper_tick_set_count(r_tickptr,
3830
                                                              cpu_tmp0);
3831
                                    tcg_temp_free_ptr(r_tickptr);
3832
                                }
3833
                                break;
3834
                            case 5: // tba
3835
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3836
                                break;
3837
                            case 6: // pstate
3838
                                {
3839
                                    TCGv r_tmp = tcg_temp_local_new();
3840

    
3841
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3842
                                    save_state(dc, cpu_cond);
3843
                                    gen_helper_wrpstate(cpu_env, r_tmp);
3844
                                    tcg_temp_free(r_tmp);
3845
                                    dc->npc = DYNAMIC_PC;
3846
                                }
3847
                                break;
3848
                            case 7: // tl
3849
                                {
3850
                                    TCGv r_tmp = tcg_temp_local_new();
3851

    
3852
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3853
                                    save_state(dc, cpu_cond);
3854
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3855
                                    tcg_temp_free(r_tmp);
3856
                                    tcg_gen_st_i32(cpu_tmp32, cpu_env,
3857
                                                   offsetof(CPUSPARCState, tl));
3858
                                    dc->npc = DYNAMIC_PC;
3859
                                }
3860
                                break;
3861
                            case 8: // pil
3862
                                gen_helper_wrpil(cpu_env, cpu_tmp0);
3863
                                break;
3864
                            case 9: // cwp
3865
                                gen_helper_wrcwp(cpu_env, cpu_tmp0);
3866
                                break;
3867
                            case 10: // cansave
3868
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3869
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3870
                                               offsetof(CPUSPARCState,
3871
                                                        cansave));
3872
                                break;
3873
                            case 11: // canrestore
3874
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3875
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3876
                                               offsetof(CPUSPARCState,
3877
                                                        canrestore));
3878
                                break;
3879
                            case 12: // cleanwin
3880
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3881
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3882
                                               offsetof(CPUSPARCState,
3883
                                                        cleanwin));
3884
                                break;
3885
                            case 13: // otherwin
3886
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3887
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3888
                                               offsetof(CPUSPARCState,
3889
                                                        otherwin));
3890
                                break;
3891
                            case 14: // wstate
3892
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3893
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3894
                                               offsetof(CPUSPARCState,
3895
                                                        wstate));
3896
                                break;
3897
                            case 16: // UA2005 gl
3898
                                CHECK_IU_FEATURE(dc, GL);
3899
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3900
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3901
                                               offsetof(CPUSPARCState, gl));
3902
                                break;
3903
                            case 26: // UA2005 strand status
3904
                                CHECK_IU_FEATURE(dc, HYPV);
3905
                                if (!hypervisor(dc))
3906
                                    goto priv_insn;
3907
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3908
                                break;
3909
                            default:
3910
                                goto illegal_insn;
3911
                            }
3912
#else
3913
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3914
                            if (dc->def->nwindows != 32)
3915
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3916
                                                (1 << dc->def->nwindows) - 1);
3917
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3918
#endif
3919
                        }
3920
                        break;
3921
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3922
                        {
3923
#ifndef TARGET_SPARC64
3924
                            if (!supervisor(dc))
3925
                                goto priv_insn;
3926
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3927
#else
3928
                            CHECK_IU_FEATURE(dc, HYPV);
3929
                            if (!hypervisor(dc))
3930
                                goto priv_insn;
3931
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3932
                            switch (rd) {
3933
                            case 0: // hpstate
3934
                                // XXX gen_op_wrhpstate();
3935
                                save_state(dc, cpu_cond);
3936
                                gen_op_next_insn();
3937
                                tcg_gen_exit_tb(0);
3938
                                dc->is_br = 1;
3939
                                break;
3940
                            case 1: // htstate
3941
                                // XXX gen_op_wrhtstate();
3942
                                break;
3943
                            case 3: // hintp
3944
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3945
                                break;
3946
                            case 5: // htba
3947
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3948
                                break;
3949
                            case 31: // hstick_cmpr
3950
                                {
3951
                                    TCGv_ptr r_tickptr;
3952

    
3953
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3954
                                    r_tickptr = tcg_temp_new_ptr();
3955
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3956
                                                   offsetof(CPUState, hstick));
3957
                                    gen_helper_tick_set_limit(r_tickptr,
3958
                                                              cpu_hstick_cmpr);
3959
                                    tcg_temp_free_ptr(r_tickptr);
3960
                                }
3961
                                break;
3962
                            case 6: // hver readonly
3963
                            default:
3964
                                goto illegal_insn;
3965
                            }
3966
#endif
3967
                        }
3968
                        break;
3969
#endif
3970
#ifdef TARGET_SPARC64
3971
                    case 0x2c: /* V9 movcc */
3972
                        {
3973
                            int cc = GET_FIELD_SP(insn, 11, 12);
3974
                            int cond = GET_FIELD_SP(insn, 14, 17);
3975
                            TCGv r_cond;
3976
                            int l1;
3977

    
3978
                            r_cond = tcg_temp_new();
3979
                            if (insn & (1 << 18)) {
3980
                                if (cc == 0)
3981
                                    gen_cond(r_cond, 0, cond, dc);
3982
                                else if (cc == 2)
3983
                                    gen_cond(r_cond, 1, cond, dc);
3984
                                else
3985
                                    goto illegal_insn;
3986
                            } else {
3987
                                gen_fcond(r_cond, cc, cond);
3988
                            }
3989

    
3990
                            l1 = gen_new_label();
3991

    
3992
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3993
                            if (IS_IMM) {       /* immediate */
3994
                                TCGv r_const;
3995

    
3996
                                simm = GET_FIELD_SPs(insn, 0, 10);
3997
                                r_const = tcg_const_tl(simm);
3998
                                gen_movl_TN_reg(rd, r_const);
3999
                                tcg_temp_free(r_const);
4000
                            } else {
4001
                                rs2 = GET_FIELD_SP(insn, 0, 4);
4002
                                gen_movl_reg_TN(rs2, cpu_tmp0);
4003
                                gen_movl_TN_reg(rd, cpu_tmp0);
4004
                            }
4005
                            gen_set_label(l1);
4006
                            tcg_temp_free(r_cond);
4007
                            break;
4008
                        }
4009
                    case 0x2d: /* V9 sdivx */
4010
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
4011
                        gen_movl_TN_reg(rd, cpu_dst);
4012
                        break;
4013
                    case 0x2e: /* V9 popc */
4014
                        {
4015
                            cpu_src2 = get_src2(insn, cpu_src2);
4016
                            gen_helper_popc(cpu_dst, cpu_src2);
4017
                            gen_movl_TN_reg(rd, cpu_dst);
4018
                        }
4019
                    case 0x2f: /* V9 movr */
4020
                        {
4021
                            int cond = GET_FIELD_SP(insn, 10, 12);
4022
                            int l1;
4023

    
4024
                            cpu_src1 = get_src1(insn, cpu_src1);
4025

    
4026
                            l1 = gen_new_label();
4027

    
4028
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
4029
                                              cpu_src1, 0, l1);
4030
                            if (IS_IMM) {       /* immediate */
4031
                                TCGv r_const;
4032

    
4033
                                simm = GET_FIELD_SPs(insn, 0, 9);
4034
                                r_const = tcg_const_tl(simm);
4035
                                gen_movl_TN_reg(rd, r_const);
4036
                                tcg_temp_free(r_const);
4037
                            } else {
4038
                                rs2 = GET_FIELD_SP(insn, 0, 4);
4039
                                gen_movl_reg_TN(rs2, cpu_tmp0);
4040
                                gen_movl_TN_reg(rd, cpu_tmp0);
4041
                            }
4042
                            gen_set_label(l1);
4043
                            break;
4044
                        }
4045
#endif
4046
                    default:
4047
                        goto illegal_insn;
4048
                    }
4049
                }
4050
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4051
#ifdef TARGET_SPARC64
4052
                int opf = GET_FIELD_SP(insn, 5, 13);
4053
                rs1 = GET_FIELD(insn, 13, 17);
4054
                rs2 = GET_FIELD(insn, 27, 31);
4055
                if (gen_trap_ifnofpu(dc, cpu_cond))
4056
                    goto jmp_insn;
4057

    
4058
                switch (opf) {
4059
                case 0x000: /* VIS I edge8cc */
4060
                    CHECK_FPU_FEATURE(dc, VIS1);
4061
                    gen_movl_reg_TN(rs1, cpu_src1);
4062
                    gen_movl_reg_TN(rs2, cpu_src2);
4063
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4064
                    gen_movl_TN_reg(rd, cpu_dst);
4065
                    break;
4066
                case 0x001: /* VIS II edge8n */
4067
                    CHECK_FPU_FEATURE(dc, VIS2);
4068
                    gen_movl_reg_TN(rs1, cpu_src1);
4069
                    gen_movl_reg_TN(rs2, cpu_src2);
4070
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4071
                    gen_movl_TN_reg(rd, cpu_dst);
4072
                    break;
4073
                case 0x002: /* VIS I edge8lcc */
4074
                    CHECK_FPU_FEATURE(dc, VIS1);
4075
                    gen_movl_reg_TN(rs1, cpu_src1);
4076
                    gen_movl_reg_TN(rs2, cpu_src2);
4077
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4078
                    gen_movl_TN_reg(rd, cpu_dst);
4079
                    break;
4080
                case 0x003: /* VIS II edge8ln */
4081
                    CHECK_FPU_FEATURE(dc, VIS2);
4082
                    gen_movl_reg_TN(rs1, cpu_src1);
4083
                    gen_movl_reg_TN(rs2, cpu_src2);
4084
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4085
                    gen_movl_TN_reg(rd, cpu_dst);
4086
                    break;
4087
                case 0x004: /* VIS I edge16cc */
4088
                    CHECK_FPU_FEATURE(dc, VIS1);
4089
                    gen_movl_reg_TN(rs1, cpu_src1);
4090
                    gen_movl_reg_TN(rs2, cpu_src2);
4091
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4092
                    gen_movl_TN_reg(rd, cpu_dst);
4093
                    break;
4094
                case 0x005: /* VIS II edge16n */
4095
                    CHECK_FPU_FEATURE(dc, VIS2);
4096
                    gen_movl_reg_TN(rs1, cpu_src1);
4097
                    gen_movl_reg_TN(rs2, cpu_src2);
4098
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4099
                    gen_movl_TN_reg(rd, cpu_dst);
4100
                    break;
4101
                case 0x006: /* VIS I edge16lcc */
4102
                    CHECK_FPU_FEATURE(dc, VIS1);
4103
                    gen_movl_reg_TN(rs1, cpu_src1);
4104
                    gen_movl_reg_TN(rs2, cpu_src2);
4105
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4106
                    gen_movl_TN_reg(rd, cpu_dst);
4107
                    break;
4108
                case 0x007: /* VIS II edge16ln */
4109
                    CHECK_FPU_FEATURE(dc, VIS2);
4110
                    gen_movl_reg_TN(rs1, cpu_src1);
4111
                    gen_movl_reg_TN(rs2, cpu_src2);
4112
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4113
                    gen_movl_TN_reg(rd, cpu_dst);
4114
                    break;
4115
                case 0x008: /* VIS I edge32cc */
4116
                    CHECK_FPU_FEATURE(dc, VIS1);
4117
                    gen_movl_reg_TN(rs1, cpu_src1);
4118
                    gen_movl_reg_TN(rs2, cpu_src2);
4119
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4120
                    gen_movl_TN_reg(rd, cpu_dst);
4121
                    break;
4122
                case 0x009: /* VIS II edge32n */
4123
                    CHECK_FPU_FEATURE(dc, VIS2);
4124
                    gen_movl_reg_TN(rs1, cpu_src1);
4125
                    gen_movl_reg_TN(rs2, cpu_src2);
4126
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4127
                    gen_movl_TN_reg(rd, cpu_dst);
4128
                    break;
4129
                case 0x00a: /* VIS I edge32lcc */
4130
                    CHECK_FPU_FEATURE(dc, VIS1);
4131
                    gen_movl_reg_TN(rs1, cpu_src1);
4132
                    gen_movl_reg_TN(rs2, cpu_src2);
4133
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4134
                    gen_movl_TN_reg(rd, cpu_dst);
4135
                    break;
4136
                case 0x00b: /* VIS II edge32ln */
4137
                    CHECK_FPU_FEATURE(dc, VIS2);
4138
                    gen_movl_reg_TN(rs1, cpu_src1);
4139
                    gen_movl_reg_TN(rs2, cpu_src2);
4140
                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4141
                    gen_movl_TN_reg(rd, cpu_dst);
4142
                    break;
4143
                case 0x010: /* VIS I array8 */
4144
                    CHECK_FPU_FEATURE(dc, VIS1);
4145
                    cpu_src1 = get_src1(insn, cpu_src1);
4146
                    gen_movl_reg_TN(rs2, cpu_src2);
4147
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4148
                    gen_movl_TN_reg(rd, cpu_dst);
4149
                    break;
4150
                case 0x012: /* VIS I array16 */
4151
                    CHECK_FPU_FEATURE(dc, VIS1);
4152
                    cpu_src1 = get_src1(insn, cpu_src1);
4153
                    gen_movl_reg_TN(rs2, cpu_src2);
4154
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4155
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4156
                    gen_movl_TN_reg(rd, cpu_dst);
4157
                    break;
4158
                case 0x014: /* VIS I array32 */
4159
                    CHECK_FPU_FEATURE(dc, VIS1);
4160
                    cpu_src1 = get_src1(insn, cpu_src1);
4161
                    gen_movl_reg_TN(rs2, cpu_src2);
4162
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4163
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4164
                    gen_movl_TN_reg(rd, cpu_dst);
4165
                    break;
4166
                case 0x018: /* VIS I alignaddr */
4167
                    CHECK_FPU_FEATURE(dc, VIS1);
4168
                    cpu_src1 = get_src1(insn, cpu_src1);
4169
                    gen_movl_reg_TN(rs2, cpu_src2);
4170
                    gen_helper_alignaddr(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4171
                    gen_movl_TN_reg(rd, cpu_dst);
4172
                    break;
4173
                case 0x019: /* VIS II bmask */
4174
                case 0x01a: /* VIS I alignaddrl */
4175
                    // XXX
4176
                    goto illegal_insn;
4177
                case 0x020: /* VIS I fcmple16 */
4178
                    CHECK_FPU_FEATURE(dc, VIS1);
4179
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4180
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4181
                    gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4182
                    gen_movl_TN_reg(rd, cpu_dst);
4183
                    break;
4184
                case 0x022: /* VIS I fcmpne16 */
4185
                    CHECK_FPU_FEATURE(dc, VIS1);
4186
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4187
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4188
                    gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4189
                    gen_movl_TN_reg(rd, cpu_dst);
4190
                    break;
4191
                case 0x024: /* VIS I fcmple32 */
4192
                    CHECK_FPU_FEATURE(dc, VIS1);
4193
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4194
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4195
                    gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4196
                    gen_movl_TN_reg(rd, cpu_dst);
4197
                    break;
4198
                case 0x026: /* VIS I fcmpne32 */
4199
                    CHECK_FPU_FEATURE(dc, VIS1);
4200
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4201
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4202
                    gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4203
                    gen_movl_TN_reg(rd, cpu_dst);
4204
                    break;
4205
                case 0x028: /* VIS I fcmpgt16 */
4206
                    CHECK_FPU_FEATURE(dc, VIS1);
4207
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4208
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4209
                    gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4210
                    gen_movl_TN_reg(rd, cpu_dst);
4211
                    break;
4212
                case 0x02a: /* VIS I fcmpeq16 */
4213
                    CHECK_FPU_FEATURE(dc, VIS1);
4214
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4215
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4216
                    gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4217
                    gen_movl_TN_reg(rd, cpu_dst);
4218
                    break;
4219
                case 0x02c: /* VIS I fcmpgt32 */
4220
                    CHECK_FPU_FEATURE(dc, VIS1);
4221
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4222
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4223
                    gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4224
                    gen_movl_TN_reg(rd, cpu_dst);
4225
                    break;
4226
                case 0x02e: /* VIS I fcmpeq32 */
4227
                    CHECK_FPU_FEATURE(dc, VIS1);
4228
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4229
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4230
                    gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4231
                    gen_movl_TN_reg(rd, cpu_dst);
4232
                    break;
4233
                case 0x031: /* VIS I fmul8x16 */
4234
                    CHECK_FPU_FEATURE(dc, VIS1);
4235
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4236
                    break;
4237
                case 0x033: /* VIS I fmul8x16au */
4238
                    CHECK_FPU_FEATURE(dc, VIS1);
4239
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4240
                    break;
4241
                case 0x035: /* VIS I fmul8x16al */
4242
                    CHECK_FPU_FEATURE(dc, VIS1);
4243
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4244
                    break;
4245
                case 0x036: /* VIS I fmul8sux16 */
4246
                    CHECK_FPU_FEATURE(dc, VIS1);
4247
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4248
                    break;
4249
                case 0x037: /* VIS I fmul8ulx16 */
4250
                    CHECK_FPU_FEATURE(dc, VIS1);
4251
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4252
                    break;
4253
                case 0x038: /* VIS I fmuld8sux16 */
4254
                    CHECK_FPU_FEATURE(dc, VIS1);
4255
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4256
                    break;
4257
                case 0x039: /* VIS I fmuld8ulx16 */
4258
                    CHECK_FPU_FEATURE(dc, VIS1);
4259
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4260
                    break;
4261
                case 0x03a: /* VIS I fpack32 */
4262
                    CHECK_FPU_FEATURE(dc, VIS1);
4263
                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4264
                    break;
4265
                case 0x03b: /* VIS I fpack16 */
4266
                    CHECK_FPU_FEATURE(dc, VIS1);
4267
                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4268
                    cpu_dst_32 = gen_dest_fpr_F();
4269
                    gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4270
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4271
                    break;
4272
                case 0x03d: /* VIS I fpackfix */
4273
                    CHECK_FPU_FEATURE(dc, VIS1);
4274
                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4275
                    cpu_dst_32 = gen_dest_fpr_F();
4276
                    gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4277
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4278
                    break;
4279
                case 0x03e: /* VIS I pdist */
4280
                    CHECK_FPU_FEATURE(dc, VIS1);
4281
                    gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4282
                    break;
4283
                case 0x048: /* VIS I faligndata */
4284
                    CHECK_FPU_FEATURE(dc, VIS1);
4285
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4286
                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4287
                    cpu_dst_64 = gen_dest_fpr_D();
4288
                    gen_helper_faligndata(cpu_dst_64, cpu_env,
4289
                                          cpu_src1_64, cpu_src2_64);
4290
                    gen_store_fpr_D(dc, rd, cpu_dst_64);
4291
                    break;
4292
                case 0x04b: /* VIS I fpmerge */
4293
                    CHECK_FPU_FEATURE(dc, VIS1);
4294
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4295
                    break;
4296
                case 0x04c: /* VIS II bshuffle */
4297
                    // XXX
4298
                    goto illegal_insn;
4299
                case 0x04d: /* VIS I fexpand */
4300
                    CHECK_FPU_FEATURE(dc, VIS1);
4301
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4302
                    break;
4303
                case 0x050: /* VIS I fpadd16 */
4304
                    CHECK_FPU_FEATURE(dc, VIS1);
4305
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4306
                    break;
4307
                case 0x051: /* VIS I fpadd16s */
4308
                    CHECK_FPU_FEATURE(dc, VIS1);
4309
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4310
                    break;
4311
                case 0x052: /* VIS I fpadd32 */
4312
                    CHECK_FPU_FEATURE(dc, VIS1);
4313
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4314
                    break;
4315
                case 0x053: /* VIS I fpadd32s */
4316
                    CHECK_FPU_FEATURE(dc, VIS1);
4317
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4318
                    break;
4319
                case 0x054: /* VIS I fpsub16 */
4320
                    CHECK_FPU_FEATURE(dc, VIS1);
4321
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4322
                    break;
4323
                case 0x055: /* VIS I fpsub16s */
4324
                    CHECK_FPU_FEATURE(dc, VIS1);
4325
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4326
                    break;
4327
                case 0x056: /* VIS I fpsub32 */
4328
                    CHECK_FPU_FEATURE(dc, VIS1);
4329
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4330
                    break;
4331
                case 0x057: /* VIS I fpsub32s */
4332
                    CHECK_FPU_FEATURE(dc, VIS1);
4333
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4334
                    break;
4335
                case 0x060: /* VIS I fzero */
4336
                    CHECK_FPU_FEATURE(dc, VIS1);
4337
                    cpu_dst_64 = gen_dest_fpr_D();
4338
                    tcg_gen_movi_i64(cpu_dst_64, 0);
4339
                    gen_store_fpr_D(dc, rd, cpu_dst_64);
4340
                    break;
4341
                case 0x061: /* VIS I fzeros */
4342
                    CHECK_FPU_FEATURE(dc, VIS1);
4343
                    cpu_dst_32 = gen_dest_fpr_F();
4344
                    tcg_gen_movi_i32(cpu_dst_32, 0);
4345
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4346
                    break;
4347
                case 0x062: /* VIS I fnor */
4348
                    CHECK_FPU_FEATURE(dc, VIS1);
4349
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4350
                    break;
4351
                case 0x063: /* VIS I fnors */
4352
                    CHECK_FPU_FEATURE(dc, VIS1);
4353
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4354
                    break;
4355
                case 0x064: /* VIS I fandnot2 */
4356
                    CHECK_FPU_FEATURE(dc, VIS1);
4357
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4358
                    break;
4359
                case 0x065: /* VIS I fandnot2s */
4360
                    CHECK_FPU_FEATURE(dc, VIS1);
4361
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4362
                    break;
4363
                case 0x066: /* VIS I fnot2 */
4364
                    CHECK_FPU_FEATURE(dc, VIS1);
4365
                    gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4366
                    break;
4367
                case 0x067: /* VIS I fnot2s */
4368
                    CHECK_FPU_FEATURE(dc, VIS1);
4369
                    gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4370
                    break;
4371
                case 0x068: /* VIS I fandnot1 */
4372
                    CHECK_FPU_FEATURE(dc, VIS1);
4373
                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4374
                    break;
4375
                case 0x069: /* VIS I fandnot1s */
4376
                    CHECK_FPU_FEATURE(dc, VIS1);
4377
                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4378
                    break;
4379
                case 0x06a: /* VIS I fnot1 */
4380
                    CHECK_FPU_FEATURE(dc, VIS1);
4381
                    gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4382
                    break;
4383
                case 0x06b: /* VIS I fnot1s */
4384
                    CHECK_FPU_FEATURE(dc, VIS1);
4385
                    gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4386
                    break;
4387
                case 0x06c: /* VIS I fxor */
4388
                    CHECK_FPU_FEATURE(dc, VIS1);
4389
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4390
                    break;
4391
                case 0x06d: /* VIS I fxors */
4392
                    CHECK_FPU_FEATURE(dc, VIS1);
4393
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4394
                    break;
4395
                case 0x06e: /* VIS I fnand */
4396
                    CHECK_FPU_FEATURE(dc, VIS1);
4397
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4398
                    break;
4399
                case 0x06f: /* VIS I fnands */
4400
                    CHECK_FPU_FEATURE(dc, VIS1);
4401
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4402
                    break;
4403
                case 0x070: /* VIS I fand */
4404
                    CHECK_FPU_FEATURE(dc, VIS1);
4405
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4406
                    break;
4407
                case 0x071: /* VIS I fands */
4408
                    CHECK_FPU_FEATURE(dc, VIS1);
4409
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4410
                    break;
4411
                case 0x072: /* VIS I fxnor */
4412
                    CHECK_FPU_FEATURE(dc, VIS1);
4413
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4414
                    break;
4415
                case 0x073: /* VIS I fxnors */
4416
                    CHECK_FPU_FEATURE(dc, VIS1);
4417
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4418
                    break;
4419
                case 0x074: /* VIS I fsrc1 */
4420
                    CHECK_FPU_FEATURE(dc, VIS1);
4421
                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4422
                    gen_store_fpr_D(dc, rd, cpu_src1_64);
4423
                    break;
4424
                case 0x075: /* VIS I fsrc1s */
4425
                    CHECK_FPU_FEATURE(dc, VIS1);
4426
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4427
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
4428
                    break;
4429
                case 0x076: /* VIS I fornot2 */
4430
                    CHECK_FPU_FEATURE(dc, VIS1);
4431
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4432
                    break;
4433
                case 0x077: /* VIS I fornot2s */
4434
                    CHECK_FPU_FEATURE(dc, VIS1);
4435
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4436
                    break;
4437
                case 0x078: /* VIS I fsrc2 */
4438
                    CHECK_FPU_FEATURE(dc, VIS1);
4439
                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4440
                    gen_store_fpr_D(dc, rd, cpu_src1_64);
4441
                    break;
4442
                case 0x079: /* VIS I fsrc2s */
4443
                    CHECK_FPU_FEATURE(dc, VIS1);
4444
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4445
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
4446
                    break;
4447
                case 0x07a: /* VIS I fornot1 */
4448
                    CHECK_FPU_FEATURE(dc, VIS1);
4449
                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4450
                    break;
4451
                case 0x07b: /* VIS I fornot1s */
4452
                    CHECK_FPU_FEATURE(dc, VIS1);
4453
                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4454
                    break;
4455
                case 0x07c: /* VIS I for */
4456
                    CHECK_FPU_FEATURE(dc, VIS1);
4457
                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4458
                    break;
4459
                case 0x07d: /* VIS I fors */
4460
                    CHECK_FPU_FEATURE(dc, VIS1);
4461
                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4462
                    break;
4463
                case 0x07e: /* VIS I fone */
4464
                    CHECK_FPU_FEATURE(dc, VIS1);
4465
                    cpu_dst_64 = gen_dest_fpr_D();
4466
                    tcg_gen_movi_i64(cpu_dst_64, -1);
4467
                    gen_store_fpr_D(dc, rd, cpu_dst_64);
4468
                    break;
4469
                case 0x07f: /* VIS I fones */
4470
                    CHECK_FPU_FEATURE(dc, VIS1);
4471
                    cpu_dst_32 = gen_dest_fpr_F();
4472
                    tcg_gen_movi_i32(cpu_dst_32, -1);
4473
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4474
                    break;
4475
                case 0x080: /* VIS I shutdown */
4476
                case 0x081: /* VIS II siam */
4477
                    // XXX
4478
                    goto illegal_insn;
4479
                default:
4480
                    goto illegal_insn;
4481
                }
4482
#else
4483
                goto ncp_insn;
4484
#endif
4485
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4486
#ifdef TARGET_SPARC64
4487
                goto illegal_insn;
4488
#else
4489
                goto ncp_insn;
4490
#endif
4491
#ifdef TARGET_SPARC64
4492
            } else if (xop == 0x39) { /* V9 return */
4493
                TCGv_i32 r_const;
4494

    
4495
                save_state(dc, cpu_cond);
4496
                cpu_src1 = get_src1(insn, cpu_src1);
4497
                if (IS_IMM) {   /* immediate */
4498
                    simm = GET_FIELDs(insn, 19, 31);
4499
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4500
                } else {                /* register */
4501
                    rs2 = GET_FIELD(insn, 27, 31);
4502
                    if (rs2) {
4503
                        gen_movl_reg_TN(rs2, cpu_src2);
4504
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4505
                    } else
4506
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4507
                }
4508
                gen_helper_restore(cpu_env);
4509
                gen_mov_pc_npc(dc, cpu_cond);
4510
                r_const = tcg_const_i32(3);
4511
                gen_helper_check_align(cpu_dst, r_const);
4512
                tcg_temp_free_i32(r_const);
4513
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4514
                dc->npc = DYNAMIC_PC;
4515
                goto jmp_insn;
4516
#endif
4517
            } else {
4518
                cpu_src1 = get_src1(insn, cpu_src1);
4519
                if (IS_IMM) {   /* immediate */
4520
                    simm = GET_FIELDs(insn, 19, 31);
4521
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4522
                } else {                /* register */
4523
                    rs2 = GET_FIELD(insn, 27, 31);
4524
                    if (rs2) {
4525
                        gen_movl_reg_TN(rs2, cpu_src2);
4526
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4527
                    } else
4528
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4529
                }
4530
                switch (xop) {
4531
                case 0x38:      /* jmpl */
4532
                    {
4533
                        TCGv r_pc;
4534
                        TCGv_i32 r_const;
4535

    
4536
                        r_pc = tcg_const_tl(dc->pc);
4537
                        gen_movl_TN_reg(rd, r_pc);
4538
                        tcg_temp_free(r_pc);
4539
                        gen_mov_pc_npc(dc, cpu_cond);
4540
                        r_const = tcg_const_i32(3);
4541
                        gen_helper_check_align(cpu_dst, r_const);
4542
                        tcg_temp_free_i32(r_const);
4543
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4544
                        dc->npc = DYNAMIC_PC;
4545
                    }
4546
                    goto jmp_insn;
4547
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4548
                case 0x39:      /* rett, V9 return */
4549
                    {
4550
                        TCGv_i32 r_const;
4551

    
4552
                        if (!supervisor(dc))
4553
                            goto priv_insn;
4554
                        gen_mov_pc_npc(dc, cpu_cond);
4555
                        r_const = tcg_const_i32(3);
4556
                        gen_helper_check_align(cpu_dst, r_const);
4557
                        tcg_temp_free_i32(r_const);
4558
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4559
                        dc->npc = DYNAMIC_PC;
4560
                        gen_helper_rett(cpu_env);
4561
                    }
4562
                    goto jmp_insn;
4563
#endif
4564
                case 0x3b: /* flush */
4565
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4566
                        goto unimp_flush;
4567
                    /* nop */
4568
                    break;
4569
                case 0x3c:      /* save */
4570
                    save_state(dc, cpu_cond);
4571
                    gen_helper_save(cpu_env);
4572
                    gen_movl_TN_reg(rd, cpu_dst);
4573
                    break;
4574
                case 0x3d:      /* restore */
4575
                    save_state(dc, cpu_cond);
4576
                    gen_helper_restore(cpu_env);
4577
                    gen_movl_TN_reg(rd, cpu_dst);
4578
                    break;
4579
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4580
                case 0x3e:      /* V9 done/retry */
4581
                    {
4582
                        switch (rd) {
4583
                        case 0:
4584
                            if (!supervisor(dc))
4585
                                goto priv_insn;
4586
                            dc->npc = DYNAMIC_PC;
4587
                            dc->pc = DYNAMIC_PC;
4588
                            gen_helper_done(cpu_env);
4589
                            goto jmp_insn;
4590
                        case 1:
4591
                            if (!supervisor(dc))
4592
                                goto priv_insn;
4593
                            dc->npc = DYNAMIC_PC;
4594
                            dc->pc = DYNAMIC_PC;
4595
                            gen_helper_retry(cpu_env);
4596
                            goto jmp_insn;
4597
                        default:
4598
                            goto illegal_insn;
4599
                        }
4600
                    }
4601
                    break;
4602
#endif
4603
                default:
4604
                    goto illegal_insn;
4605
                }
4606
            }
4607
            break;
4608
        }
4609
        break;
4610
    case 3:                     /* load/store instructions */
4611
        {
4612
            unsigned int xop = GET_FIELD(insn, 7, 12);
4613

    
4614
            /* flush pending conditional evaluations before exposing
4615
               cpu state */
4616
            if (dc->cc_op != CC_OP_FLAGS) {
4617
                dc->cc_op = CC_OP_FLAGS;
4618
                gen_helper_compute_psr(cpu_env);
4619
            }
4620
            cpu_src1 = get_src1(insn, cpu_src1);
4621
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4622
                rs2 = GET_FIELD(insn, 27, 31);
4623
                gen_movl_reg_TN(rs2, cpu_src2);
4624
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4625
            } else if (IS_IMM) {     /* immediate */
4626
                simm = GET_FIELDs(insn, 19, 31);
4627
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4628
            } else {            /* register */
4629
                rs2 = GET_FIELD(insn, 27, 31);
4630
                if (rs2 != 0) {
4631
                    gen_movl_reg_TN(rs2, cpu_src2);
4632
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4633
                } else
4634
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4635
            }
4636
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4637
                (xop > 0x17 && xop <= 0x1d ) ||
4638
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4639
                switch (xop) {
4640
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4641
                    gen_address_mask(dc, cpu_addr);
4642
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4643
                    break;
4644
                case 0x1:       /* ldub, load unsigned byte */
4645
                    gen_address_mask(dc, cpu_addr);
4646
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4647
                    break;
4648
                case 0x2:       /* lduh, load unsigned halfword */
4649
                    gen_address_mask(dc, cpu_addr);
4650
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4651
                    break;
4652
                case 0x3:       /* ldd, load double word */
4653
                    if (rd & 1)
4654
                        goto illegal_insn;
4655
                    else {
4656
                        TCGv_i32 r_const;
4657

    
4658
                        save_state(dc, cpu_cond);
4659
                        r_const = tcg_const_i32(7);
4660
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4661
                        tcg_temp_free_i32(r_const);
4662
                        gen_address_mask(dc, cpu_addr);
4663
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4664
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4665
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4666
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4667
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4668
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4669
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4670
                    }
4671
                    break;
4672
                case 0x9:       /* ldsb, load signed byte */
4673
                    gen_address_mask(dc, cpu_addr);
4674
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4675
                    break;
4676
                case 0xa:       /* ldsh, load signed halfword */
4677
                    gen_address_mask(dc, cpu_addr);
4678
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4679
                    break;
4680
                case 0xd:       /* ldstub -- XXX: should be atomically */
4681
                    {
4682
                        TCGv r_const;
4683

    
4684
                        gen_address_mask(dc, cpu_addr);
4685
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4686
                        r_const = tcg_const_tl(0xff);
4687
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4688
                        tcg_temp_free(r_const);
4689
                    }
4690
                    break;
4691
                case 0x0f:      /* swap, swap register with memory. Also
4692
                                   atomically */
4693
                    CHECK_IU_FEATURE(dc, SWAP);
4694
                    gen_movl_reg_TN(rd, cpu_val);
4695
                    gen_address_mask(dc, cpu_addr);
4696
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4697
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4698
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4699
                    break;
4700
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4701
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4702
#ifndef TARGET_SPARC64
4703
                    if (IS_IMM)
4704
                        goto illegal_insn;
4705
                    if (!supervisor(dc))
4706
                        goto priv_insn;
4707
#endif
4708
                    save_state(dc, cpu_cond);
4709
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4710
                    break;
4711
                case 0x11:      /* lduba, load unsigned byte alternate */
4712
#ifndef TARGET_SPARC64
4713
                    if (IS_IMM)
4714
                        goto illegal_insn;
4715
                    if (!supervisor(dc))
4716
                        goto priv_insn;
4717
#endif
4718
                    save_state(dc, cpu_cond);
4719
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4720
                    break;
4721
                case 0x12:      /* lduha, load unsigned halfword alternate */
4722
#ifndef TARGET_SPARC64
4723
                    if (IS_IMM)
4724
                        goto illegal_insn;
4725
                    if (!supervisor(dc))
4726
                        goto priv_insn;
4727
#endif
4728
                    save_state(dc, cpu_cond);
4729
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4730
                    break;
4731
                case 0x13:      /* ldda, load double word alternate */
4732
#ifndef TARGET_SPARC64
4733
                    if (IS_IMM)
4734
                        goto illegal_insn;
4735
                    if (!supervisor(dc))
4736
                        goto priv_insn;
4737
#endif
4738
                    if (rd & 1)
4739
                        goto illegal_insn;
4740
                    save_state(dc, cpu_cond);
4741
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4742
                    goto skip_move;
4743
                case 0x19:      /* ldsba, load signed byte alternate */
4744
#ifndef TARGET_SPARC64
4745
                    if (IS_IMM)
4746
                        goto illegal_insn;
4747
                    if (!supervisor(dc))
4748
                        goto priv_insn;
4749
#endif
4750
                    save_state(dc, cpu_cond);
4751
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4752
                    break;
4753
                case 0x1a:      /* ldsha, load signed halfword alternate */
4754
#ifndef TARGET_SPARC64
4755
                    if (IS_IMM)
4756
                        goto illegal_insn;
4757
                    if (!supervisor(dc))
4758
                        goto priv_insn;
4759
#endif
4760
                    save_state(dc, cpu_cond);
4761
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4762
                    break;
4763
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4764
#ifndef TARGET_SPARC64
4765
                    if (IS_IMM)
4766
                        goto illegal_insn;
4767
                    if (!supervisor(dc))
4768
                        goto priv_insn;
4769
#endif
4770
                    save_state(dc, cpu_cond);
4771
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4772
                    break;
4773
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4774
                                   atomically */
4775
                    CHECK_IU_FEATURE(dc, SWAP);
4776
#ifndef TARGET_SPARC64
4777
                    if (IS_IMM)
4778
                        goto illegal_insn;
4779
                    if (!supervisor(dc))
4780
                        goto priv_insn;
4781
#endif
4782
                    save_state(dc, cpu_cond);
4783
                    gen_movl_reg_TN(rd, cpu_val);
4784
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4785
                    break;
4786

    
4787
#ifndef TARGET_SPARC64
4788
                case 0x30: /* ldc */
4789
                case 0x31: /* ldcsr */
4790
                case 0x33: /* lddc */
4791
                    goto ncp_insn;
4792
#endif
4793
#endif
4794
#ifdef TARGET_SPARC64
4795
                case 0x08: /* V9 ldsw */
4796
                    gen_address_mask(dc, cpu_addr);
4797
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4798
                    break;
4799
                case 0x0b: /* V9 ldx */
4800
                    gen_address_mask(dc, cpu_addr);
4801
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4802
                    break;
4803
                case 0x18: /* V9 ldswa */
4804
                    save_state(dc, cpu_cond);
4805
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4806
                    break;
4807
                case 0x1b: /* V9 ldxa */
4808
                    save_state(dc, cpu_cond);
4809
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4810
                    break;
4811
                case 0x2d: /* V9 prefetch, no effect */
4812
                    goto skip_move;
4813
                case 0x30: /* V9 ldfa */
4814
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4815
                        goto jmp_insn;
4816
                    }
4817
                    save_state(dc, cpu_cond);
4818
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4819
                    gen_update_fprs_dirty(rd);
4820
                    goto skip_move;
4821
                case 0x33: /* V9 lddfa */
4822
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4823
                        goto jmp_insn;
4824
                    }
4825
                    save_state(dc, cpu_cond);
4826
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4827
                    gen_update_fprs_dirty(DFPREG(rd));
4828
                    goto skip_move;
4829
                case 0x3d: /* V9 prefetcha, no effect */
4830
                    goto skip_move;
4831
                case 0x32: /* V9 ldqfa */
4832
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4833
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4834
                        goto jmp_insn;
4835
                    }
4836
                    save_state(dc, cpu_cond);
4837
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4838
                    gen_update_fprs_dirty(QFPREG(rd));
4839
                    goto skip_move;
4840
#endif
4841
                default:
4842
                    goto illegal_insn;
4843
                }
4844
                gen_movl_TN_reg(rd, cpu_val);
4845
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4846
            skip_move: ;
4847
#endif
4848
            } else if (xop >= 0x20 && xop < 0x24) {
4849
                if (gen_trap_ifnofpu(dc, cpu_cond))
4850
                    goto jmp_insn;
4851
                save_state(dc, cpu_cond);
4852
                switch (xop) {
4853
                case 0x20:      /* ldf, load fpreg */
4854
                    gen_address_mask(dc, cpu_addr);
4855
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4856
                    cpu_dst_32 = gen_dest_fpr_F();
4857
                    tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4858
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4859
                    break;
4860
                case 0x21:      /* ldfsr, V9 ldxfsr */
4861
#ifdef TARGET_SPARC64
4862
                    gen_address_mask(dc, cpu_addr);
4863
                    if (rd == 1) {
4864
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4865
                        gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4866
                    } else {
4867
                        tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4868
                        tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4869
                        gen_helper_ldfsr(cpu_env, cpu_tmp32);
4870
                    }
4871
#else
4872
                    {
4873
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4874
                        gen_helper_ldfsr(cpu_env, cpu_tmp32);
4875
                    }
4876
#endif
4877
                    break;
4878
                case 0x22:      /* ldqf, load quad fpreg */
4879
                    {
4880
                        TCGv_i32 r_const;
4881

    
4882
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4883
                        r_const = tcg_const_i32(dc->mem_idx);
4884
                        gen_address_mask(dc, cpu_addr);
4885
                        gen_helper_ldqf(cpu_addr, r_const);
4886
                        tcg_temp_free_i32(r_const);
4887
                        gen_op_store_QT0_fpr(QFPREG(rd));
4888
                        gen_update_fprs_dirty(QFPREG(rd));
4889
                    }
4890
                    break;
4891
                case 0x23:      /* lddf, load double fpreg */
4892
                    gen_address_mask(dc, cpu_addr);
4893
                    cpu_dst_64 = gen_dest_fpr_D();
4894
                    tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4895
                    gen_store_fpr_D(dc, rd, cpu_dst_64);
4896
                    break;
4897
                default:
4898
                    goto illegal_insn;
4899
                }
4900
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4901
                       xop == 0xe || xop == 0x1e) {
4902
                gen_movl_reg_TN(rd, cpu_val);
4903
                switch (xop) {
4904
                case 0x4: /* st, store word */
4905
                    gen_address_mask(dc, cpu_addr);
4906
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4907
                    break;
4908
                case 0x5: /* stb, store byte */
4909
                    gen_address_mask(dc, cpu_addr);
4910
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4911
                    break;
4912
                case 0x6: /* sth, store halfword */
4913
                    gen_address_mask(dc, cpu_addr);
4914
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4915
                    break;
4916
                case 0x7: /* std, store double word */
4917
                    if (rd & 1)
4918
                        goto illegal_insn;
4919
                    else {
4920
                        TCGv_i32 r_const;
4921

    
4922
                        save_state(dc, cpu_cond);
4923
                        gen_address_mask(dc, cpu_addr);
4924
                        r_const = tcg_const_i32(7);
4925
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4926
                        tcg_temp_free_i32(r_const);
4927
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4928
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4929
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4930
                    }
4931
                    break;
4932
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4933
                case 0x14: /* sta, V9 stwa, store word alternate */
4934
#ifndef TARGET_SPARC64
4935
                    if (IS_IMM)
4936
                        goto illegal_insn;
4937
                    if (!supervisor(dc))
4938
                        goto priv_insn;
4939
#endif
4940
                    save_state(dc, cpu_cond);
4941
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4942
                    dc->npc = DYNAMIC_PC;
4943
                    break;
4944
                case 0x15: /* stba, store byte alternate */
4945
#ifndef TARGET_SPARC64
4946
                    if (IS_IMM)
4947
                        goto illegal_insn;
4948
                    if (!supervisor(dc))
4949
                        goto priv_insn;
4950
#endif
4951
                    save_state(dc, cpu_cond);
4952
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4953
                    dc->npc = DYNAMIC_PC;
4954
                    break;
4955
                case 0x16: /* stha, store halfword alternate */
4956
#ifndef TARGET_SPARC64
4957
                    if (IS_IMM)
4958
                        goto illegal_insn;
4959
                    if (!supervisor(dc))
4960
                        goto priv_insn;
4961
#endif
4962
                    save_state(dc, cpu_cond);
4963
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4964
                    dc->npc = DYNAMIC_PC;
4965
                    break;
4966
                case 0x17: /* stda, store double word alternate */
4967
#ifndef TARGET_SPARC64
4968
                    if (IS_IMM)
4969
                        goto illegal_insn;
4970
                    if (!supervisor(dc))
4971
                        goto priv_insn;
4972
#endif
4973
                    if (rd & 1)
4974
                        goto illegal_insn;
4975
                    else {
4976
                        save_state(dc, cpu_cond);
4977
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4978
                    }
4979
                    break;
4980
#endif
4981
#ifdef TARGET_SPARC64
4982
                case 0x0e: /* V9 stx */
4983
                    gen_address_mask(dc, cpu_addr);
4984
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4985
                    break;
4986
                case 0x1e: /* V9 stxa */
4987
                    save_state(dc, cpu_cond);
4988
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4989
                    dc->npc = DYNAMIC_PC;
4990
                    break;
4991
#endif
4992
                default:
4993
                    goto illegal_insn;
4994
                }
4995
            } else if (xop > 0x23 && xop < 0x28) {
4996
                if (gen_trap_ifnofpu(dc, cpu_cond))
4997
                    goto jmp_insn;
4998
                save_state(dc, cpu_cond);
4999
                switch (xop) {
5000
                case 0x24: /* stf, store fpreg */
5001
                    gen_address_mask(dc, cpu_addr);
5002
                    cpu_src1_32 = gen_load_fpr_F(dc, rd);
5003
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
5004
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
5005
                    break;
5006
                case 0x25: /* stfsr, V9 stxfsr */
5007
#ifdef TARGET_SPARC64
5008
                    gen_address_mask(dc, cpu_addr);
5009
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
5010
                    if (rd == 1)
5011
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5012
                    else
5013
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
5014
#else
5015
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
5016
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
5017
#endif
5018
                    break;
5019
                case 0x26:
5020
#ifdef TARGET_SPARC64
5021
                    /* V9 stqf, store quad fpreg */
5022
                    {
5023
                        TCGv_i32 r_const;
5024

    
5025
                        CHECK_FPU_FEATURE(dc, FLOAT128);
5026
                        gen_op_load_fpr_QT0(QFPREG(rd));
5027
                        r_const = tcg_const_i32(dc->mem_idx);
5028
                        gen_address_mask(dc, cpu_addr);
5029
                        gen_helper_stqf(cpu_addr, r_const);
5030
                        tcg_temp_free_i32(r_const);
5031
                    }
5032
                    break;
5033
#else /* !TARGET_SPARC64 */
5034
                    /* stdfq, store floating point queue */
5035
#if defined(CONFIG_USER_ONLY)
5036
                    goto illegal_insn;
5037
#else
5038
                    if (!supervisor(dc))
5039
                        goto priv_insn;
5040
                    if (gen_trap_ifnofpu(dc, cpu_cond))
5041
                        goto jmp_insn;
5042
                    goto nfq_insn;
5043
#endif
5044
#endif
5045
                case 0x27: /* stdf, store double fpreg */
5046
                    gen_address_mask(dc, cpu_addr);
5047
                    cpu_src1_64 = gen_load_fpr_D(dc, rd);
5048
                    tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5049
                    break;
5050
                default:
5051
                    goto illegal_insn;
5052
                }
5053
            } else if (xop > 0x33 && xop < 0x3f) {
5054
                save_state(dc, cpu_cond);
5055
                switch (xop) {
5056
#ifdef TARGET_SPARC64
5057
                case 0x34: /* V9 stfa */
5058
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
5059
                        goto jmp_insn;
5060
                    }
5061
                    gen_stf_asi(cpu_addr, insn, 4, rd);
5062
                    break;
5063
                case 0x36: /* V9 stqfa */
5064
                    {
5065
                        TCGv_i32 r_const;
5066

    
5067
                        CHECK_FPU_FEATURE(dc, FLOAT128);
5068
                        if (gen_trap_ifnofpu(dc, cpu_cond)) {
5069
                            goto jmp_insn;
5070
                        }
5071
                        r_const = tcg_const_i32(7);
5072
                        gen_helper_check_align(cpu_addr, r_const);
5073
                        tcg_temp_free_i32(r_const);
5074
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5075
                    }
5076
                    break;
5077
                case 0x37: /* V9 stdfa */
5078
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
5079
                        goto jmp_insn;
5080
                    }
5081
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5082
                    break;
5083
                case 0x3c: /* V9 casa */
5084
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5085
                    gen_movl_TN_reg(rd, cpu_val);
5086
                    break;
5087
                case 0x3e: /* V9 casxa */
5088
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5089
                    gen_movl_TN_reg(rd, cpu_val);
5090
                    break;
5091
#else
5092
                case 0x34: /* stc */
5093
                case 0x35: /* stcsr */
5094
                case 0x36: /* stdcq */
5095
                case 0x37: /* stdc */
5096
                    goto ncp_insn;
5097
#endif
5098
                default:
5099
                    goto illegal_insn;
5100
                }
5101
            } else
5102
                goto illegal_insn;
5103
        }
5104
        break;
5105
    }
5106
    /* default case for non jump instructions */
5107
    if (dc->npc == DYNAMIC_PC) {
5108
        dc->pc = DYNAMIC_PC;
5109
        gen_op_next_insn();
5110
    } else if (dc->npc == JUMP_PC) {
5111
        /* we can do a static jump */
5112
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5113
        dc->is_br = 1;
5114
    } else {
5115
        dc->pc = dc->npc;
5116
        dc->npc = dc->npc + 4;
5117
    }
5118
 jmp_insn:
5119
    goto egress;
5120
 illegal_insn:
5121
    {
5122
        TCGv_i32 r_const;
5123

    
5124
        save_state(dc, cpu_cond);
5125
        r_const = tcg_const_i32(TT_ILL_INSN);
5126
        gen_helper_raise_exception(cpu_env, r_const);
5127
        tcg_temp_free_i32(r_const);
5128
        dc->is_br = 1;
5129
    }
5130
    goto egress;
5131
 unimp_flush:
5132
    {
5133
        TCGv_i32 r_const;
5134

    
5135
        save_state(dc, cpu_cond);
5136
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5137
        gen_helper_raise_exception(cpu_env, r_const);
5138
        tcg_temp_free_i32(r_const);
5139
        dc->is_br = 1;
5140
    }
5141
    goto egress;
5142
#if !defined(CONFIG_USER_ONLY)
5143
 priv_insn:
5144
    {
5145
        TCGv_i32 r_const;
5146

    
5147
        save_state(dc, cpu_cond);
5148
        r_const = tcg_const_i32(TT_PRIV_INSN);
5149
        gen_helper_raise_exception(cpu_env, r_const);
5150
        tcg_temp_free_i32(r_const);
5151
        dc->is_br = 1;
5152
    }
5153
    goto egress;
5154
#endif
5155
 nfpu_insn:
5156
    save_state(dc, cpu_cond);
5157
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5158
    dc->is_br = 1;
5159
    goto egress;
5160
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5161
 nfq_insn:
5162
    save_state(dc, cpu_cond);
5163
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5164
    dc->is_br = 1;
5165
    goto egress;
5166
#endif
5167
#ifndef TARGET_SPARC64
5168
 ncp_insn:
5169
    {
5170
        TCGv r_const;
5171

    
5172
        save_state(dc, cpu_cond);
5173
        r_const = tcg_const_i32(TT_NCP_INSN);
5174
        gen_helper_raise_exception(cpu_env, r_const);
5175
        tcg_temp_free(r_const);
5176
        dc->is_br = 1;
5177
    }
5178
    goto egress;
5179
#endif
5180
 egress:
5181
    tcg_temp_free(cpu_tmp1);
5182
    tcg_temp_free(cpu_tmp2);
5183
    if (dc->n_t32 != 0) {
5184
        int i;
5185
        for (i = dc->n_t32 - 1; i >= 0; --i) {
5186
            tcg_temp_free_i32(dc->t32[i]);
5187
        }
5188
        dc->n_t32 = 0;
5189
    }
5190
}
5191

    
5192
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5193
                                                  int spc, CPUSPARCState *env)
5194
{
5195
    target_ulong pc_start, last_pc;
5196
    uint16_t *gen_opc_end;
5197
    DisasContext dc1, *dc = &dc1;
5198
    CPUBreakpoint *bp;
5199
    int j, lj = -1;
5200
    int num_insns;
5201
    int max_insns;
5202

    
5203
    memset(dc, 0, sizeof(DisasContext));
5204
    dc->tb = tb;
5205
    pc_start = tb->pc;
5206
    dc->pc = pc_start;
5207
    last_pc = dc->pc;
5208
    dc->npc = (target_ulong) tb->cs_base;
5209
    dc->cc_op = CC_OP_DYNAMIC;
5210
    dc->mem_idx = cpu_mmu_index(env);
5211
    dc->def = env->def;
5212
    dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5213
    dc->address_mask_32bit = tb_am_enabled(tb->flags);
5214
    dc->singlestep = (env->singlestep_enabled || singlestep);
5215
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5216

    
5217
    cpu_tmp0 = tcg_temp_new();
5218
    cpu_tmp32 = tcg_temp_new_i32();
5219
    cpu_tmp64 = tcg_temp_new_i64();
5220

    
5221
    cpu_dst = tcg_temp_local_new();
5222

    
5223
    // loads and stores
5224
    cpu_val = tcg_temp_local_new();
5225
    cpu_addr = tcg_temp_local_new();
5226

    
5227
    num_insns = 0;
5228
    max_insns = tb->cflags & CF_COUNT_MASK;
5229
    if (max_insns == 0)
5230
        max_insns = CF_COUNT_MASK;
5231
    gen_icount_start();
5232
    do {
5233
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5234
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5235
                if (bp->pc == dc->pc) {
5236
                    if (dc->pc != pc_start)
5237
                        save_state(dc, cpu_cond);
5238
                    gen_helper_debug(cpu_env);
5239
                    tcg_gen_exit_tb(0);
5240
                    dc->is_br = 1;
5241
                    goto exit_gen_loop;
5242
                }
5243
            }
5244
        }
5245
        if (spc) {
5246
            qemu_log("Search PC...\n");
5247
            j = gen_opc_ptr - gen_opc_buf;
5248
            if (lj < j) {
5249
                lj++;
5250
                while (lj < j)
5251
                    gen_opc_instr_start[lj++] = 0;
5252
                gen_opc_pc[lj] = dc->pc;
5253
                gen_opc_npc[lj] = dc->npc;
5254
                gen_opc_instr_start[lj] = 1;
5255
                gen_opc_icount[lj] = num_insns;
5256
            }
5257
        }
5258
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5259
            gen_io_start();
5260
        last_pc = dc->pc;
5261
        disas_sparc_insn(dc);
5262
        num_insns++;
5263

    
5264
        if (dc->is_br)
5265
            break;
5266
        /* if the next PC is different, we abort now */
5267
        if (dc->pc != (last_pc + 4))
5268
            break;
5269
        /* if we reach a page boundary, we stop generation so that the
5270
           PC of a TT_TFAULT exception is always in the right page */
5271
        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5272
            break;
5273
        /* if single step mode, we generate only one instruction and
5274
           generate an exception */
5275
        if (dc->singlestep) {
5276
            break;
5277
        }
5278
    } while ((gen_opc_ptr < gen_opc_end) &&
5279
             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5280
             num_insns < max_insns);
5281

    
5282
 exit_gen_loop:
5283
    tcg_temp_free(cpu_addr);
5284
    tcg_temp_free(cpu_val);
5285
    tcg_temp_free(cpu_dst);
5286
    tcg_temp_free_i64(cpu_tmp64);
5287
    tcg_temp_free_i32(cpu_tmp32);
5288
    tcg_temp_free(cpu_tmp0);
5289

    
5290
    if (tb->cflags & CF_LAST_IO)
5291
        gen_io_end();
5292
    if (!dc->is_br) {
5293
        if (dc->pc != DYNAMIC_PC &&
5294
            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5295
            /* static PC and NPC: we can use direct chaining */
5296
            gen_goto_tb(dc, 0, dc->pc, dc->npc);
5297
        } else {
5298
            if (dc->pc != DYNAMIC_PC)
5299
                tcg_gen_movi_tl(cpu_pc, dc->pc);
5300
            save_npc(dc, cpu_cond);
5301
            tcg_gen_exit_tb(0);
5302
        }
5303
    }
5304
    gen_icount_end(tb, num_insns);
5305
    *gen_opc_ptr = INDEX_op_end;
5306
    if (spc) {
5307
        j = gen_opc_ptr - gen_opc_buf;
5308
        lj++;
5309
        while (lj <= j)
5310
            gen_opc_instr_start[lj++] = 0;
5311
#if 0
5312
        log_page_dump();
5313
#endif
5314
        gen_opc_jump_pc[0] = dc->jump_pc[0];
5315
        gen_opc_jump_pc[1] = dc->jump_pc[1];
5316
    } else {
5317
        tb->size = last_pc + 4 - pc_start;
5318
        tb->icount = num_insns;
5319
    }
5320
#ifdef DEBUG_DISAS
5321
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5322
        qemu_log("--------------\n");
5323
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
5324
        log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5325
        qemu_log("\n");
5326
    }
5327
#endif
5328
}
5329

    
5330
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5331
{
5332
    gen_intermediate_code_internal(tb, 0, env);
5333
}
5334

    
5335
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5336
{
5337
    gen_intermediate_code_internal(tb, 1, env);
5338
}
5339

    
5340
void gen_intermediate_code_init(CPUSPARCState *env)
5341
{
5342
    unsigned int i;
5343
    static int inited;
5344
    static const char * const gregnames[8] = {
5345
        NULL, // g0 not used
5346
        "g1",
5347
        "g2",
5348
        "g3",
5349
        "g4",
5350
        "g5",
5351
        "g6",
5352
        "g7",
5353
    };
5354
    static const char * const fregnames[32] = {
5355
        "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5356
        "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5357
        "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5358
        "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5359
    };
5360

    
5361
    /* init various static tables */
5362
    if (!inited) {
5363
        inited = 1;
5364

    
5365
        cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5366
        cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5367
                                             offsetof(CPUState, regwptr),
5368
                                             "regwptr");
5369
#ifdef TARGET_SPARC64
5370
        cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5371
                                         "xcc");
5372
        cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5373
                                         "asi");
5374
        cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5375
                                          "fprs");
5376
        cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5377
                                     "gsr");
5378
        cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5379
                                           offsetof(CPUState, tick_cmpr),
5380
                                           "tick_cmpr");
5381
        cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5382
                                            offsetof(CPUState, stick_cmpr),
5383
                                            "stick_cmpr");
5384
        cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5385
                                             offsetof(CPUState, hstick_cmpr),
5386
                                             "hstick_cmpr");
5387
        cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5388
                                       "hintp");
5389
        cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5390
                                      "htba");
5391
        cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5392
                                      "hver");
5393
        cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5394
                                     offsetof(CPUState, ssr), "ssr");
5395
        cpu_ver = tcg_global_mem_new(TCG_AREG0,
5396
                                     offsetof(CPUState, version), "ver");
5397
        cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5398
                                             offsetof(CPUState, softint),
5399
                                             "softint");
5400
#else
5401
        cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5402
                                     "wim");
5403
#endif
5404
        cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5405
                                      "cond");
5406
        cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5407
                                        "cc_src");
5408
        cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5409
                                         offsetof(CPUState, cc_src2),
5410
                                         "cc_src2");
5411
        cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5412
                                        "cc_dst");
5413
        cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5414
                                           "cc_op");
5415
        cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5416
                                         "psr");
5417
        cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5418
                                     "fsr");
5419
        cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5420
                                    "pc");
5421
        cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5422
                                     "npc");
5423
        cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5424
#ifndef CONFIG_USER_ONLY
5425
        cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5426
                                     "tbr");
5427
#endif
5428
        for (i = 1; i < 8; i++) {
5429
            cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5430
                                              offsetof(CPUState, gregs[i]),
5431
                                              gregnames[i]);
5432
        }
5433
        for (i = 0; i < TARGET_DPREGS; i++) {
5434
            cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5435
                                                offsetof(CPUState, fpr[i]),
5436
                                                fregnames[i]);
5437
        }
5438

    
5439
        /* register helpers */
5440

    
5441
#define GEN_HELPER 2
5442
#include "helper.h"
5443
    }
5444
}
5445

    
5446
void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5447
{
5448
    target_ulong npc;
5449
    env->pc = gen_opc_pc[pc_pos];
5450
    npc = gen_opc_npc[pc_pos];
5451
    if (npc == 1) {
5452
        /* dynamic NPC: already stored */
5453
    } else if (npc == 2) {
5454
        /* jump PC: use 'cond' and the jump targets of the translation */
5455
        if (env->cond) {
5456
            env->npc = gen_opc_jump_pc[0];
5457
        } else {
5458
            env->npc = gen_opc_jump_pc[1];
5459
        }
5460
    } else {
5461
        env->npc = npc;
5462
    }
5463

    
5464
    /* flush pending conditional evaluations before exposing cpu state */
5465
    if (CC_OP != CC_OP_FLAGS) {
5466
        helper_compute_psr(env);
5467
    }
5468
}