Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ 208ae657

History | View | Annotate | Download (207.2 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20

    
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26

    
27
#include "cpu.h"
28
#include "disas.h"
29
#include "helper.h"
30
#include "tcg-op.h"
31

    
32
#define GEN_HELPER 1
33
#include "helper.h"
34

    
35
#define DEBUG_DISAS
36

    
37
#define DYNAMIC_PC  1 /* dynamic pc value */
38
#define JUMP_PC     2 /* dynamic pc value which takes only two values
39
                         according to jump_pc[T2] */
40

    
41
/* global register indexes */
42
static TCGv_ptr cpu_env, cpu_regwptr;
43
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44
static TCGv_i32 cpu_cc_op;
45
static TCGv_i32 cpu_psr;
46
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47
static TCGv cpu_y;
48
#ifndef CONFIG_USER_ONLY
49
static TCGv cpu_tbr;
50
#endif
51
static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52
#ifdef TARGET_SPARC64
53
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54
static TCGv cpu_gsr;
55
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57
static TCGv_i32 cpu_softint;
58
#else
59
static TCGv cpu_wim;
60
#endif
61
/* local register indexes (only used inside old micro ops) */
62
static TCGv cpu_tmp0;
63
static TCGv_i32 cpu_tmp32;
64
static TCGv_i64 cpu_tmp64;
65
/* Floating point registers */
66
static TCGv_i32 cpu__fpr[TARGET_FPREGS];
67

    
68
static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69
static target_ulong gen_opc_jump_pc[2];
70

    
71
#include "gen-icount.h"
72

    
73
typedef struct DisasContext {
74
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
75
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77
    int is_br;
78
    int mem_idx;
79
    int fpu_enabled;
80
    int address_mask_32bit;
81
    int singlestep;
82
    uint32_t cc_op;  /* current CC operation */
83
    struct TranslationBlock *tb;
84
    sparc_def_t *def;
85
} DisasContext;
86

    
87
// This function uses non-native bit order
88
#define GET_FIELD(X, FROM, TO)                                  \
89
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
90

    
91
// This function uses the order in the manuals, i.e. bit 0 is 2^0
92
#define GET_FIELD_SP(X, FROM, TO)               \
93
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
94

    
95
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
97

    
98
#ifdef TARGET_SPARC64
99
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101
#else
102
#define DFPREG(r) (r & 0x1e)
103
#define QFPREG(r) (r & 0x1c)
104
#endif
105

    
106
#define UA2005_HTRAP_MASK 0xff
107
#define V8_TRAP_MASK 0x7f
108

    
109
static int sign_extend(int x, int len)
110
{
111
    len = 32 - len;
112
    return (x << len) >> len;
113
}
114

    
115
#define IS_IMM (insn & (1<<13))
116

    
117
/* floating point registers moves */
118
static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
119
{
120
    return cpu__fpr[src];
121
}
122

    
123
static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
124
{
125
    tcg_gen_mov_i32 (cpu__fpr[dst], v);
126
}
127

    
128
static TCGv_i32 gen_dest_fpr_F(void)
129
{
130
    return cpu_tmp32;
131
}
132

    
133
static void gen_op_load_fpr_DT0(unsigned int src)
134
{
135
    tcg_gen_st_i32(cpu__fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
136
                   offsetof(CPU_DoubleU, l.upper));
137
    tcg_gen_st_i32(cpu__fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
138
                   offsetof(CPU_DoubleU, l.lower));
139
}
140

    
141
static void gen_op_load_fpr_DT1(unsigned int src)
142
{
143
    tcg_gen_st_i32(cpu__fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
144
                   offsetof(CPU_DoubleU, l.upper));
145
    tcg_gen_st_i32(cpu__fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
146
                   offsetof(CPU_DoubleU, l.lower));
147
}
148

    
149
static void gen_op_store_DT0_fpr(unsigned int dst)
150
{
151
    tcg_gen_ld_i32(cpu__fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
152
                   offsetof(CPU_DoubleU, l.upper));
153
    tcg_gen_ld_i32(cpu__fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
154
                   offsetof(CPU_DoubleU, l.lower));
155
}
156

    
157
static void gen_op_load_fpr_QT0(unsigned int src)
158
{
159
    tcg_gen_st_i32(cpu__fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
160
                   offsetof(CPU_QuadU, l.upmost));
161
    tcg_gen_st_i32(cpu__fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
162
                   offsetof(CPU_QuadU, l.upper));
163
    tcg_gen_st_i32(cpu__fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
164
                   offsetof(CPU_QuadU, l.lower));
165
    tcg_gen_st_i32(cpu__fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
166
                   offsetof(CPU_QuadU, l.lowest));
167
}
168

    
169
static void gen_op_load_fpr_QT1(unsigned int src)
170
{
171
    tcg_gen_st_i32(cpu__fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
172
                   offsetof(CPU_QuadU, l.upmost));
173
    tcg_gen_st_i32(cpu__fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
174
                   offsetof(CPU_QuadU, l.upper));
175
    tcg_gen_st_i32(cpu__fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
176
                   offsetof(CPU_QuadU, l.lower));
177
    tcg_gen_st_i32(cpu__fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
178
                   offsetof(CPU_QuadU, l.lowest));
179
}
180

    
181
static void gen_op_store_QT0_fpr(unsigned int dst)
182
{
183
    tcg_gen_ld_i32(cpu__fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
184
                   offsetof(CPU_QuadU, l.upmost));
185
    tcg_gen_ld_i32(cpu__fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
186
                   offsetof(CPU_QuadU, l.upper));
187
    tcg_gen_ld_i32(cpu__fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
188
                   offsetof(CPU_QuadU, l.lower));
189
    tcg_gen_ld_i32(cpu__fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
190
                   offsetof(CPU_QuadU, l.lowest));
191
}
192

    
193
/* moves */
194
#ifdef CONFIG_USER_ONLY
195
#define supervisor(dc) 0
196
#ifdef TARGET_SPARC64
197
#define hypervisor(dc) 0
198
#endif
199
#else
200
#define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
201
#ifdef TARGET_SPARC64
202
#define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
203
#else
204
#endif
205
#endif
206

    
207
#ifdef TARGET_SPARC64
208
#ifndef TARGET_ABI32
209
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
210
#else
211
#define AM_CHECK(dc) (1)
212
#endif
213
#endif
214

    
215
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
216
{
217
#ifdef TARGET_SPARC64
218
    if (AM_CHECK(dc))
219
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
220
#endif
221
}
222

    
223
static inline void gen_movl_reg_TN(int reg, TCGv tn)
224
{
225
    if (reg == 0)
226
        tcg_gen_movi_tl(tn, 0);
227
    else if (reg < 8)
228
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
229
    else {
230
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
231
    }
232
}
233

    
234
static inline void gen_movl_TN_reg(int reg, TCGv tn)
235
{
236
    if (reg == 0)
237
        return;
238
    else if (reg < 8)
239
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
240
    else {
241
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
242
    }
243
}
244

    
245
static inline void gen_goto_tb(DisasContext *s, int tb_num,
246
                               target_ulong pc, target_ulong npc)
247
{
248
    TranslationBlock *tb;
249

    
250
    tb = s->tb;
251
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
252
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
253
        !s->singlestep)  {
254
        /* jump to same page: we can use a direct jump */
255
        tcg_gen_goto_tb(tb_num);
256
        tcg_gen_movi_tl(cpu_pc, pc);
257
        tcg_gen_movi_tl(cpu_npc, npc);
258
        tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
259
    } else {
260
        /* jump to another page: currently not optimized */
261
        tcg_gen_movi_tl(cpu_pc, pc);
262
        tcg_gen_movi_tl(cpu_npc, npc);
263
        tcg_gen_exit_tb(0);
264
    }
265
}
266

    
267
// XXX suboptimal
268
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
269
{
270
    tcg_gen_extu_i32_tl(reg, src);
271
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
272
    tcg_gen_andi_tl(reg, reg, 0x1);
273
}
274

    
275
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
276
{
277
    tcg_gen_extu_i32_tl(reg, src);
278
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
279
    tcg_gen_andi_tl(reg, reg, 0x1);
280
}
281

    
282
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
283
{
284
    tcg_gen_extu_i32_tl(reg, src);
285
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
286
    tcg_gen_andi_tl(reg, reg, 0x1);
287
}
288

    
289
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
290
{
291
    tcg_gen_extu_i32_tl(reg, src);
292
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
293
    tcg_gen_andi_tl(reg, reg, 0x1);
294
}
295

    
296
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
297
{
298
    TCGv r_temp;
299
    TCGv_i32 r_const;
300
    int l1;
301

    
302
    l1 = gen_new_label();
303

    
304
    r_temp = tcg_temp_new();
305
    tcg_gen_xor_tl(r_temp, src1, src2);
306
    tcg_gen_not_tl(r_temp, r_temp);
307
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
308
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
309
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
310
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
311
    r_const = tcg_const_i32(TT_TOVF);
312
    gen_helper_raise_exception(cpu_env, r_const);
313
    tcg_temp_free_i32(r_const);
314
    gen_set_label(l1);
315
    tcg_temp_free(r_temp);
316
}
317

    
318
static inline void gen_tag_tv(TCGv src1, TCGv src2)
319
{
320
    int l1;
321
    TCGv_i32 r_const;
322

    
323
    l1 = gen_new_label();
324
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
325
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
326
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
327
    r_const = tcg_const_i32(TT_TOVF);
328
    gen_helper_raise_exception(cpu_env, r_const);
329
    tcg_temp_free_i32(r_const);
330
    gen_set_label(l1);
331
}
332

    
333
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
334
{
335
    tcg_gen_mov_tl(cpu_cc_src, src1);
336
    tcg_gen_movi_tl(cpu_cc_src2, src2);
337
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
338
    tcg_gen_mov_tl(dst, cpu_cc_dst);
339
}
340

    
341
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
342
{
343
    tcg_gen_mov_tl(cpu_cc_src, src1);
344
    tcg_gen_mov_tl(cpu_cc_src2, src2);
345
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
346
    tcg_gen_mov_tl(dst, cpu_cc_dst);
347
}
348

    
349
static TCGv_i32 gen_add32_carry32(void)
350
{
351
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
352

    
353
    /* Carry is computed from a previous add: (dst < src)  */
354
#if TARGET_LONG_BITS == 64
355
    cc_src1_32 = tcg_temp_new_i32();
356
    cc_src2_32 = tcg_temp_new_i32();
357
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
358
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
359
#else
360
    cc_src1_32 = cpu_cc_dst;
361
    cc_src2_32 = cpu_cc_src;
362
#endif
363

    
364
    carry_32 = tcg_temp_new_i32();
365
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
366

    
367
#if TARGET_LONG_BITS == 64
368
    tcg_temp_free_i32(cc_src1_32);
369
    tcg_temp_free_i32(cc_src2_32);
370
#endif
371

    
372
    return carry_32;
373
}
374

    
375
static TCGv_i32 gen_sub32_carry32(void)
376
{
377
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
378

    
379
    /* Carry is computed from a previous borrow: (src1 < src2)  */
380
#if TARGET_LONG_BITS == 64
381
    cc_src1_32 = tcg_temp_new_i32();
382
    cc_src2_32 = tcg_temp_new_i32();
383
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
384
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
385
#else
386
    cc_src1_32 = cpu_cc_src;
387
    cc_src2_32 = cpu_cc_src2;
388
#endif
389

    
390
    carry_32 = tcg_temp_new_i32();
391
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
392

    
393
#if TARGET_LONG_BITS == 64
394
    tcg_temp_free_i32(cc_src1_32);
395
    tcg_temp_free_i32(cc_src2_32);
396
#endif
397

    
398
    return carry_32;
399
}
400

    
401
static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
402
                            TCGv src2, int update_cc)
403
{
404
    TCGv_i32 carry_32;
405
    TCGv carry;
406

    
407
    switch (dc->cc_op) {
408
    case CC_OP_DIV:
409
    case CC_OP_LOGIC:
410
        /* Carry is known to be zero.  Fall back to plain ADD.  */
411
        if (update_cc) {
412
            gen_op_add_cc(dst, src1, src2);
413
        } else {
414
            tcg_gen_add_tl(dst, src1, src2);
415
        }
416
        return;
417

    
418
    case CC_OP_ADD:
419
    case CC_OP_TADD:
420
    case CC_OP_TADDTV:
421
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
422
        {
423
            /* For 32-bit hosts, we can re-use the host's hardware carry
424
               generation by using an ADD2 opcode.  We discard the low
425
               part of the output.  Ideally we'd combine this operation
426
               with the add that generated the carry in the first place.  */
427
            TCGv dst_low = tcg_temp_new();
428
            tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
429
                            cpu_cc_src, src1, cpu_cc_src2, src2);
430
            tcg_temp_free(dst_low);
431
            goto add_done;
432
        }
433
#endif
434
        carry_32 = gen_add32_carry32();
435
        break;
436

    
437
    case CC_OP_SUB:
438
    case CC_OP_TSUB:
439
    case CC_OP_TSUBTV:
440
        carry_32 = gen_sub32_carry32();
441
        break;
442

    
443
    default:
444
        /* We need external help to produce the carry.  */
445
        carry_32 = tcg_temp_new_i32();
446
        gen_helper_compute_C_icc(carry_32, cpu_env);
447
        break;
448
    }
449

    
450
#if TARGET_LONG_BITS == 64
451
    carry = tcg_temp_new();
452
    tcg_gen_extu_i32_i64(carry, carry_32);
453
#else
454
    carry = carry_32;
455
#endif
456

    
457
    tcg_gen_add_tl(dst, src1, src2);
458
    tcg_gen_add_tl(dst, dst, carry);
459

    
460
    tcg_temp_free_i32(carry_32);
461
#if TARGET_LONG_BITS == 64
462
    tcg_temp_free(carry);
463
#endif
464

    
465
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
466
 add_done:
467
#endif
468
    if (update_cc) {
469
        tcg_gen_mov_tl(cpu_cc_src, src1);
470
        tcg_gen_mov_tl(cpu_cc_src2, src2);
471
        tcg_gen_mov_tl(cpu_cc_dst, dst);
472
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
473
        dc->cc_op = CC_OP_ADDX;
474
    }
475
}
476

    
477
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
478
{
479
    tcg_gen_mov_tl(cpu_cc_src, src1);
480
    tcg_gen_mov_tl(cpu_cc_src2, src2);
481
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
482
    tcg_gen_mov_tl(dst, cpu_cc_dst);
483
}
484

    
485
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
486
{
487
    tcg_gen_mov_tl(cpu_cc_src, src1);
488
    tcg_gen_mov_tl(cpu_cc_src2, src2);
489
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
490
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
491
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
492
    tcg_gen_mov_tl(dst, cpu_cc_dst);
493
}
494

    
495
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
496
{
497
    TCGv r_temp;
498
    TCGv_i32 r_const;
499
    int l1;
500

    
501
    l1 = gen_new_label();
502

    
503
    r_temp = tcg_temp_new();
504
    tcg_gen_xor_tl(r_temp, src1, src2);
505
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
506
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
507
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
508
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
509
    r_const = tcg_const_i32(TT_TOVF);
510
    gen_helper_raise_exception(cpu_env, r_const);
511
    tcg_temp_free_i32(r_const);
512
    gen_set_label(l1);
513
    tcg_temp_free(r_temp);
514
}
515

    
516
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
517
{
518
    tcg_gen_mov_tl(cpu_cc_src, src1);
519
    tcg_gen_movi_tl(cpu_cc_src2, src2);
520
    if (src2 == 0) {
521
        tcg_gen_mov_tl(cpu_cc_dst, src1);
522
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
523
        dc->cc_op = CC_OP_LOGIC;
524
    } else {
525
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
526
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
527
        dc->cc_op = CC_OP_SUB;
528
    }
529
    tcg_gen_mov_tl(dst, cpu_cc_dst);
530
}
531

    
532
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
533
{
534
    tcg_gen_mov_tl(cpu_cc_src, src1);
535
    tcg_gen_mov_tl(cpu_cc_src2, src2);
536
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
537
    tcg_gen_mov_tl(dst, cpu_cc_dst);
538
}
539

    
540
static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
541
                            TCGv src2, int update_cc)
542
{
543
    TCGv_i32 carry_32;
544
    TCGv carry;
545

    
546
    switch (dc->cc_op) {
547
    case CC_OP_DIV:
548
    case CC_OP_LOGIC:
549
        /* Carry is known to be zero.  Fall back to plain SUB.  */
550
        if (update_cc) {
551
            gen_op_sub_cc(dst, src1, src2);
552
        } else {
553
            tcg_gen_sub_tl(dst, src1, src2);
554
        }
555
        return;
556

    
557
    case CC_OP_ADD:
558
    case CC_OP_TADD:
559
    case CC_OP_TADDTV:
560
        carry_32 = gen_add32_carry32();
561
        break;
562

    
563
    case CC_OP_SUB:
564
    case CC_OP_TSUB:
565
    case CC_OP_TSUBTV:
566
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
567
        {
568
            /* For 32-bit hosts, we can re-use the host's hardware carry
569
               generation by using a SUB2 opcode.  We discard the low
570
               part of the output.  Ideally we'd combine this operation
571
               with the add that generated the carry in the first place.  */
572
            TCGv dst_low = tcg_temp_new();
573
            tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
574
                            cpu_cc_src, src1, cpu_cc_src2, src2);
575
            tcg_temp_free(dst_low);
576
            goto sub_done;
577
        }
578
#endif
579
        carry_32 = gen_sub32_carry32();
580
        break;
581

    
582
    default:
583
        /* We need external help to produce the carry.  */
584
        carry_32 = tcg_temp_new_i32();
585
        gen_helper_compute_C_icc(carry_32, cpu_env);
586
        break;
587
    }
588

    
589
#if TARGET_LONG_BITS == 64
590
    carry = tcg_temp_new();
591
    tcg_gen_extu_i32_i64(carry, carry_32);
592
#else
593
    carry = carry_32;
594
#endif
595

    
596
    tcg_gen_sub_tl(dst, src1, src2);
597
    tcg_gen_sub_tl(dst, dst, carry);
598

    
599
    tcg_temp_free_i32(carry_32);
600
#if TARGET_LONG_BITS == 64
601
    tcg_temp_free(carry);
602
#endif
603

    
604
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
605
 sub_done:
606
#endif
607
    if (update_cc) {
608
        tcg_gen_mov_tl(cpu_cc_src, src1);
609
        tcg_gen_mov_tl(cpu_cc_src2, src2);
610
        tcg_gen_mov_tl(cpu_cc_dst, dst);
611
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
612
        dc->cc_op = CC_OP_SUBX;
613
    }
614
}
615

    
616
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
617
{
618
    tcg_gen_mov_tl(cpu_cc_src, src1);
619
    tcg_gen_mov_tl(cpu_cc_src2, src2);
620
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
621
    tcg_gen_mov_tl(dst, cpu_cc_dst);
622
}
623

    
624
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
625
{
626
    tcg_gen_mov_tl(cpu_cc_src, src1);
627
    tcg_gen_mov_tl(cpu_cc_src2, src2);
628
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
629
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
630
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
631
    tcg_gen_mov_tl(dst, cpu_cc_dst);
632
}
633

    
634
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
635
{
636
    TCGv r_temp;
637
    int l1;
638

    
639
    l1 = gen_new_label();
640
    r_temp = tcg_temp_new();
641

    
642
    /* old op:
643
    if (!(env->y & 1))
644
        T1 = 0;
645
    */
646
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
647
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
648
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
649
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
650
    tcg_gen_movi_tl(cpu_cc_src2, 0);
651
    gen_set_label(l1);
652

    
653
    // b2 = T0 & 1;
654
    // env->y = (b2 << 31) | (env->y >> 1);
655
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
656
    tcg_gen_shli_tl(r_temp, r_temp, 31);
657
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
658
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
659
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
660
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
661

    
662
    // b1 = N ^ V;
663
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
664
    gen_mov_reg_V(r_temp, cpu_psr);
665
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
666
    tcg_temp_free(r_temp);
667

    
668
    // T0 = (b1 << 31) | (T0 >> 1);
669
    // src1 = T0;
670
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
671
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
672
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
673

    
674
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
675

    
676
    tcg_gen_mov_tl(dst, cpu_cc_dst);
677
}
678

    
679
static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
680
{
681
    TCGv_i32 r_src1, r_src2;
682
    TCGv_i64 r_temp, r_temp2;
683

    
684
    r_src1 = tcg_temp_new_i32();
685
    r_src2 = tcg_temp_new_i32();
686

    
687
    tcg_gen_trunc_tl_i32(r_src1, src1);
688
    tcg_gen_trunc_tl_i32(r_src2, src2);
689

    
690
    r_temp = tcg_temp_new_i64();
691
    r_temp2 = tcg_temp_new_i64();
692

    
693
    if (sign_ext) {
694
        tcg_gen_ext_i32_i64(r_temp, r_src2);
695
        tcg_gen_ext_i32_i64(r_temp2, r_src1);
696
    } else {
697
        tcg_gen_extu_i32_i64(r_temp, r_src2);
698
        tcg_gen_extu_i32_i64(r_temp2, r_src1);
699
    }
700

    
701
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
702

    
703
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
704
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
705
    tcg_temp_free_i64(r_temp);
706
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
707

    
708
    tcg_gen_trunc_i64_tl(dst, r_temp2);
709

    
710
    tcg_temp_free_i64(r_temp2);
711

    
712
    tcg_temp_free_i32(r_src1);
713
    tcg_temp_free_i32(r_src2);
714
}
715

    
716
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
717
{
718
    /* zero-extend truncated operands before multiplication */
719
    gen_op_multiply(dst, src1, src2, 0);
720
}
721

    
722
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
723
{
724
    /* sign-extend truncated operands before multiplication */
725
    gen_op_multiply(dst, src1, src2, 1);
726
}
727

    
728
#ifdef TARGET_SPARC64
729
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
730
{
731
    TCGv_i32 r_const;
732
    int l1;
733

    
734
    l1 = gen_new_label();
735
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
736
    r_const = tcg_const_i32(TT_DIV_ZERO);
737
    gen_helper_raise_exception(cpu_env, r_const);
738
    tcg_temp_free_i32(r_const);
739
    gen_set_label(l1);
740
}
741

    
742
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
743
{
744
    int l1, l2;
745
    TCGv r_temp1, r_temp2;
746

    
747
    l1 = gen_new_label();
748
    l2 = gen_new_label();
749
    r_temp1 = tcg_temp_local_new();
750
    r_temp2 = tcg_temp_local_new();
751
    tcg_gen_mov_tl(r_temp1, src1);
752
    tcg_gen_mov_tl(r_temp2, src2);
753
    gen_trap_ifdivzero_tl(r_temp2);
754
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
755
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
756
    tcg_gen_movi_i64(dst, INT64_MIN);
757
    tcg_gen_br(l2);
758
    gen_set_label(l1);
759
    tcg_gen_div_i64(dst, r_temp1, r_temp2);
760
    gen_set_label(l2);
761
    tcg_temp_free(r_temp1);
762
    tcg_temp_free(r_temp2);
763
}
764
#endif
765

    
766
// 1
767
static inline void gen_op_eval_ba(TCGv dst)
768
{
769
    tcg_gen_movi_tl(dst, 1);
770
}
771

    
772
// Z
773
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
774
{
775
    gen_mov_reg_Z(dst, src);
776
}
777

    
778
// Z | (N ^ V)
779
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
780
{
781
    gen_mov_reg_N(cpu_tmp0, src);
782
    gen_mov_reg_V(dst, src);
783
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
784
    gen_mov_reg_Z(cpu_tmp0, src);
785
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
786
}
787

    
788
// N ^ V
789
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
790
{
791
    gen_mov_reg_V(cpu_tmp0, src);
792
    gen_mov_reg_N(dst, src);
793
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
794
}
795

    
796
// C | Z
797
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
798
{
799
    gen_mov_reg_Z(cpu_tmp0, src);
800
    gen_mov_reg_C(dst, src);
801
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
802
}
803

    
804
// C
805
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
806
{
807
    gen_mov_reg_C(dst, src);
808
}
809

    
810
// V
811
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
812
{
813
    gen_mov_reg_V(dst, src);
814
}
815

    
816
// 0
817
static inline void gen_op_eval_bn(TCGv dst)
818
{
819
    tcg_gen_movi_tl(dst, 0);
820
}
821

    
822
// N
823
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
824
{
825
    gen_mov_reg_N(dst, src);
826
}
827

    
828
// !Z
829
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
830
{
831
    gen_mov_reg_Z(dst, src);
832
    tcg_gen_xori_tl(dst, dst, 0x1);
833
}
834

    
835
// !(Z | (N ^ V))
836
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
837
{
838
    gen_mov_reg_N(cpu_tmp0, src);
839
    gen_mov_reg_V(dst, src);
840
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
841
    gen_mov_reg_Z(cpu_tmp0, src);
842
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
843
    tcg_gen_xori_tl(dst, dst, 0x1);
844
}
845

    
846
// !(N ^ V)
847
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
848
{
849
    gen_mov_reg_V(cpu_tmp0, src);
850
    gen_mov_reg_N(dst, src);
851
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
852
    tcg_gen_xori_tl(dst, dst, 0x1);
853
}
854

    
855
// !(C | Z)
856
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
857
{
858
    gen_mov_reg_Z(cpu_tmp0, src);
859
    gen_mov_reg_C(dst, src);
860
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
861
    tcg_gen_xori_tl(dst, dst, 0x1);
862
}
863

    
864
// !C
865
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
866
{
867
    gen_mov_reg_C(dst, src);
868
    tcg_gen_xori_tl(dst, dst, 0x1);
869
}
870

    
871
// !N
872
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
873
{
874
    gen_mov_reg_N(dst, src);
875
    tcg_gen_xori_tl(dst, dst, 0x1);
876
}
877

    
878
// !V
879
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
880
{
881
    gen_mov_reg_V(dst, src);
882
    tcg_gen_xori_tl(dst, dst, 0x1);
883
}
884

    
885
/*
886
  FPSR bit field FCC1 | FCC0:
887
   0 =
888
   1 <
889
   2 >
890
   3 unordered
891
*/
892
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
893
                                    unsigned int fcc_offset)
894
{
895
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
896
    tcg_gen_andi_tl(reg, reg, 0x1);
897
}
898

    
899
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
900
                                    unsigned int fcc_offset)
901
{
902
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
903
    tcg_gen_andi_tl(reg, reg, 0x1);
904
}
905

    
906
// !0: FCC0 | FCC1
907
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
908
                                    unsigned int fcc_offset)
909
{
910
    gen_mov_reg_FCC0(dst, src, fcc_offset);
911
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
912
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
913
}
914

    
915
// 1 or 2: FCC0 ^ FCC1
916
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
917
                                    unsigned int fcc_offset)
918
{
919
    gen_mov_reg_FCC0(dst, src, fcc_offset);
920
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
921
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
922
}
923

    
924
// 1 or 3: FCC0
925
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
926
                                    unsigned int fcc_offset)
927
{
928
    gen_mov_reg_FCC0(dst, src, fcc_offset);
929
}
930

    
931
// 1: FCC0 & !FCC1
932
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
933
                                    unsigned int fcc_offset)
934
{
935
    gen_mov_reg_FCC0(dst, src, fcc_offset);
936
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
937
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
938
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
939
}
940

    
941
// 2 or 3: FCC1
942
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
943
                                    unsigned int fcc_offset)
944
{
945
    gen_mov_reg_FCC1(dst, src, fcc_offset);
946
}
947

    
948
// 2: !FCC0 & FCC1
949
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
950
                                    unsigned int fcc_offset)
951
{
952
    gen_mov_reg_FCC0(dst, src, fcc_offset);
953
    tcg_gen_xori_tl(dst, dst, 0x1);
954
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
955
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
956
}
957

    
958
// 3: FCC0 & FCC1
959
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
960
                                    unsigned int fcc_offset)
961
{
962
    gen_mov_reg_FCC0(dst, src, fcc_offset);
963
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
964
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
965
}
966

    
967
// 0: !(FCC0 | FCC1)
968
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
969
                                    unsigned int fcc_offset)
970
{
971
    gen_mov_reg_FCC0(dst, src, fcc_offset);
972
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
973
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
974
    tcg_gen_xori_tl(dst, dst, 0x1);
975
}
976

    
977
// 0 or 3: !(FCC0 ^ FCC1)
978
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
979
                                    unsigned int fcc_offset)
980
{
981
    gen_mov_reg_FCC0(dst, src, fcc_offset);
982
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
983
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
984
    tcg_gen_xori_tl(dst, dst, 0x1);
985
}
986

    
987
// 0 or 2: !FCC0
988
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
989
                                    unsigned int fcc_offset)
990
{
991
    gen_mov_reg_FCC0(dst, src, fcc_offset);
992
    tcg_gen_xori_tl(dst, dst, 0x1);
993
}
994

    
995
// !1: !(FCC0 & !FCC1)
996
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
997
                                    unsigned int fcc_offset)
998
{
999
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1000
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1001
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1002
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1003
    tcg_gen_xori_tl(dst, dst, 0x1);
1004
}
1005

    
1006
// 0 or 1: !FCC1
1007
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1008
                                    unsigned int fcc_offset)
1009
{
1010
    gen_mov_reg_FCC1(dst, src, fcc_offset);
1011
    tcg_gen_xori_tl(dst, dst, 0x1);
1012
}
1013

    
1014
// !2: !(!FCC0 & FCC1)
1015
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1016
                                    unsigned int fcc_offset)
1017
{
1018
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1019
    tcg_gen_xori_tl(dst, dst, 0x1);
1020
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1021
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1022
    tcg_gen_xori_tl(dst, dst, 0x1);
1023
}
1024

    
1025
// !3: !(FCC0 & FCC1)
1026
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1027
                                    unsigned int fcc_offset)
1028
{
1029
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1030
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1031
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1032
    tcg_gen_xori_tl(dst, dst, 0x1);
1033
}
1034

    
1035
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1036
                               target_ulong pc2, TCGv r_cond)
1037
{
1038
    int l1;
1039

    
1040
    l1 = gen_new_label();
1041

    
1042
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1043

    
1044
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
1045

    
1046
    gen_set_label(l1);
1047
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
1048
}
1049

    
1050
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1051
                                target_ulong pc2, TCGv r_cond)
1052
{
1053
    int l1;
1054

    
1055
    l1 = gen_new_label();
1056

    
1057
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1058

    
1059
    gen_goto_tb(dc, 0, pc2, pc1);
1060

    
1061
    gen_set_label(l1);
1062
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1063
}
1064

    
1065
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1066
                                      TCGv r_cond)
1067
{
1068
    int l1, l2;
1069

    
1070
    l1 = gen_new_label();
1071
    l2 = gen_new_label();
1072

    
1073
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1074

    
1075
    tcg_gen_movi_tl(cpu_npc, npc1);
1076
    tcg_gen_br(l2);
1077

    
1078
    gen_set_label(l1);
1079
    tcg_gen_movi_tl(cpu_npc, npc2);
1080
    gen_set_label(l2);
1081
}
1082

    
1083
/* call this function before using the condition register as it may
1084
   have been set for a jump */
1085
static inline void flush_cond(DisasContext *dc, TCGv cond)
1086
{
1087
    if (dc->npc == JUMP_PC) {
1088
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1089
        dc->npc = DYNAMIC_PC;
1090
    }
1091
}
1092

    
1093
static inline void save_npc(DisasContext *dc, TCGv cond)
1094
{
1095
    if (dc->npc == JUMP_PC) {
1096
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1097
        dc->npc = DYNAMIC_PC;
1098
    } else if (dc->npc != DYNAMIC_PC) {
1099
        tcg_gen_movi_tl(cpu_npc, dc->npc);
1100
    }
1101
}
1102

    
1103
static inline void save_state(DisasContext *dc, TCGv cond)
1104
{
1105
    tcg_gen_movi_tl(cpu_pc, dc->pc);
1106
    /* flush pending conditional evaluations before exposing cpu state */
1107
    if (dc->cc_op != CC_OP_FLAGS) {
1108
        dc->cc_op = CC_OP_FLAGS;
1109
        gen_helper_compute_psr(cpu_env);
1110
    }
1111
    save_npc(dc, cond);
1112
}
1113

    
1114
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1115
{
1116
    if (dc->npc == JUMP_PC) {
1117
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1118
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1119
        dc->pc = DYNAMIC_PC;
1120
    } else if (dc->npc == DYNAMIC_PC) {
1121
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1122
        dc->pc = DYNAMIC_PC;
1123
    } else {
1124
        dc->pc = dc->npc;
1125
    }
1126
}
1127

    
1128
static inline void gen_op_next_insn(void)
1129
{
1130
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1131
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1132
}
1133

    
1134
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1135
                            DisasContext *dc)
1136
{
1137
    TCGv_i32 r_src;
1138

    
1139
#ifdef TARGET_SPARC64
1140
    if (cc)
1141
        r_src = cpu_xcc;
1142
    else
1143
        r_src = cpu_psr;
1144
#else
1145
    r_src = cpu_psr;
1146
#endif
1147
    switch (dc->cc_op) {
1148
    case CC_OP_FLAGS:
1149
        break;
1150
    default:
1151
        gen_helper_compute_psr(cpu_env);
1152
        dc->cc_op = CC_OP_FLAGS;
1153
        break;
1154
    }
1155
    switch (cond) {
1156
    case 0x0:
1157
        gen_op_eval_bn(r_dst);
1158
        break;
1159
    case 0x1:
1160
        gen_op_eval_be(r_dst, r_src);
1161
        break;
1162
    case 0x2:
1163
        gen_op_eval_ble(r_dst, r_src);
1164
        break;
1165
    case 0x3:
1166
        gen_op_eval_bl(r_dst, r_src);
1167
        break;
1168
    case 0x4:
1169
        gen_op_eval_bleu(r_dst, r_src);
1170
        break;
1171
    case 0x5:
1172
        gen_op_eval_bcs(r_dst, r_src);
1173
        break;
1174
    case 0x6:
1175
        gen_op_eval_bneg(r_dst, r_src);
1176
        break;
1177
    case 0x7:
1178
        gen_op_eval_bvs(r_dst, r_src);
1179
        break;
1180
    case 0x8:
1181
        gen_op_eval_ba(r_dst);
1182
        break;
1183
    case 0x9:
1184
        gen_op_eval_bne(r_dst, r_src);
1185
        break;
1186
    case 0xa:
1187
        gen_op_eval_bg(r_dst, r_src);
1188
        break;
1189
    case 0xb:
1190
        gen_op_eval_bge(r_dst, r_src);
1191
        break;
1192
    case 0xc:
1193
        gen_op_eval_bgu(r_dst, r_src);
1194
        break;
1195
    case 0xd:
1196
        gen_op_eval_bcc(r_dst, r_src);
1197
        break;
1198
    case 0xe:
1199
        gen_op_eval_bpos(r_dst, r_src);
1200
        break;
1201
    case 0xf:
1202
        gen_op_eval_bvc(r_dst, r_src);
1203
        break;
1204
    }
1205
}
1206

    
1207
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1208
{
1209
    unsigned int offset;
1210

    
1211
    switch (cc) {
1212
    default:
1213
    case 0x0:
1214
        offset = 0;
1215
        break;
1216
    case 0x1:
1217
        offset = 32 - 10;
1218
        break;
1219
    case 0x2:
1220
        offset = 34 - 10;
1221
        break;
1222
    case 0x3:
1223
        offset = 36 - 10;
1224
        break;
1225
    }
1226

    
1227
    switch (cond) {
1228
    case 0x0:
1229
        gen_op_eval_bn(r_dst);
1230
        break;
1231
    case 0x1:
1232
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1233
        break;
1234
    case 0x2:
1235
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1236
        break;
1237
    case 0x3:
1238
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1239
        break;
1240
    case 0x4:
1241
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1242
        break;
1243
    case 0x5:
1244
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1245
        break;
1246
    case 0x6:
1247
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1248
        break;
1249
    case 0x7:
1250
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1251
        break;
1252
    case 0x8:
1253
        gen_op_eval_ba(r_dst);
1254
        break;
1255
    case 0x9:
1256
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1257
        break;
1258
    case 0xa:
1259
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1260
        break;
1261
    case 0xb:
1262
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1263
        break;
1264
    case 0xc:
1265
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1266
        break;
1267
    case 0xd:
1268
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1269
        break;
1270
    case 0xe:
1271
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1272
        break;
1273
    case 0xf:
1274
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1275
        break;
1276
    }
1277
}
1278

    
1279
#ifdef TARGET_SPARC64
1280
// Inverted logic
1281
static const int gen_tcg_cond_reg[8] = {
1282
    -1,
1283
    TCG_COND_NE,
1284
    TCG_COND_GT,
1285
    TCG_COND_GE,
1286
    -1,
1287
    TCG_COND_EQ,
1288
    TCG_COND_LE,
1289
    TCG_COND_LT,
1290
};
1291

    
1292
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1293
{
1294
    int l1;
1295

    
1296
    l1 = gen_new_label();
1297
    tcg_gen_movi_tl(r_dst, 0);
1298
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1299
    tcg_gen_movi_tl(r_dst, 1);
1300
    gen_set_label(l1);
1301
}
1302
#endif
1303

    
1304
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1305
                      TCGv r_cond)
1306
{
1307
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1308
    target_ulong target = dc->pc + offset;
1309

    
1310
    if (cond == 0x0) {
1311
        /* unconditional not taken */
1312
        if (a) {
1313
            dc->pc = dc->npc + 4;
1314
            dc->npc = dc->pc + 4;
1315
        } else {
1316
            dc->pc = dc->npc;
1317
            dc->npc = dc->pc + 4;
1318
        }
1319
    } else if (cond == 0x8) {
1320
        /* unconditional taken */
1321
        if (a) {
1322
            dc->pc = target;
1323
            dc->npc = dc->pc + 4;
1324
        } else {
1325
            dc->pc = dc->npc;
1326
            dc->npc = target;
1327
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1328
        }
1329
    } else {
1330
        flush_cond(dc, r_cond);
1331
        gen_cond(r_cond, cc, cond, dc);
1332
        if (a) {
1333
            gen_branch_a(dc, target, dc->npc, r_cond);
1334
            dc->is_br = 1;
1335
        } else {
1336
            dc->pc = dc->npc;
1337
            dc->jump_pc[0] = target;
1338
            if (unlikely(dc->npc == DYNAMIC_PC)) {
1339
                dc->jump_pc[1] = DYNAMIC_PC;
1340
                tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1341
            } else {
1342
                dc->jump_pc[1] = dc->npc + 4;
1343
                dc->npc = JUMP_PC;
1344
            }
1345
        }
1346
    }
1347
}
1348

    
1349
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1350
                      TCGv r_cond)
1351
{
1352
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1353
    target_ulong target = dc->pc + offset;
1354

    
1355
    if (cond == 0x0) {
1356
        /* unconditional not taken */
1357
        if (a) {
1358
            dc->pc = dc->npc + 4;
1359
            dc->npc = dc->pc + 4;
1360
        } else {
1361
            dc->pc = dc->npc;
1362
            dc->npc = dc->pc + 4;
1363
        }
1364
    } else if (cond == 0x8) {
1365
        /* unconditional taken */
1366
        if (a) {
1367
            dc->pc = target;
1368
            dc->npc = dc->pc + 4;
1369
        } else {
1370
            dc->pc = dc->npc;
1371
            dc->npc = target;
1372
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1373
        }
1374
    } else {
1375
        flush_cond(dc, r_cond);
1376
        gen_fcond(r_cond, cc, cond);
1377
        if (a) {
1378
            gen_branch_a(dc, target, dc->npc, r_cond);
1379
            dc->is_br = 1;
1380
        } else {
1381
            dc->pc = dc->npc;
1382
            dc->jump_pc[0] = target;
1383
            if (unlikely(dc->npc == DYNAMIC_PC)) {
1384
                dc->jump_pc[1] = DYNAMIC_PC;
1385
                tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1386
            } else {
1387
                dc->jump_pc[1] = dc->npc + 4;
1388
                dc->npc = JUMP_PC;
1389
            }
1390
        }
1391
    }
1392
}
1393

    
1394
#ifdef TARGET_SPARC64
1395
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1396
                          TCGv r_cond, TCGv r_reg)
1397
{
1398
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1399
    target_ulong target = dc->pc + offset;
1400

    
1401
    flush_cond(dc, r_cond);
1402
    gen_cond_reg(r_cond, cond, r_reg);
1403
    if (a) {
1404
        gen_branch_a(dc, target, dc->npc, r_cond);
1405
        dc->is_br = 1;
1406
    } else {
1407
        dc->pc = dc->npc;
1408
        dc->jump_pc[0] = target;
1409
        if (unlikely(dc->npc == DYNAMIC_PC)) {
1410
            dc->jump_pc[1] = DYNAMIC_PC;
1411
            tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1412
        } else {
1413
            dc->jump_pc[1] = dc->npc + 4;
1414
            dc->npc = JUMP_PC;
1415
        }
1416
    }
1417
}
1418

    
1419
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1420
{
1421
    switch (fccno) {
1422
    case 0:
1423
        gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1424
        break;
1425
    case 1:
1426
        gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1427
        break;
1428
    case 2:
1429
        gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1430
        break;
1431
    case 3:
1432
        gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1433
        break;
1434
    }
1435
}
1436

    
1437
static inline void gen_op_fcmpd(int fccno)
1438
{
1439
    switch (fccno) {
1440
    case 0:
1441
        gen_helper_fcmpd(cpu_env);
1442
        break;
1443
    case 1:
1444
        gen_helper_fcmpd_fcc1(cpu_env);
1445
        break;
1446
    case 2:
1447
        gen_helper_fcmpd_fcc2(cpu_env);
1448
        break;
1449
    case 3:
1450
        gen_helper_fcmpd_fcc3(cpu_env);
1451
        break;
1452
    }
1453
}
1454

    
1455
static inline void gen_op_fcmpq(int fccno)
1456
{
1457
    switch (fccno) {
1458
    case 0:
1459
        gen_helper_fcmpq(cpu_env);
1460
        break;
1461
    case 1:
1462
        gen_helper_fcmpq_fcc1(cpu_env);
1463
        break;
1464
    case 2:
1465
        gen_helper_fcmpq_fcc2(cpu_env);
1466
        break;
1467
    case 3:
1468
        gen_helper_fcmpq_fcc3(cpu_env);
1469
        break;
1470
    }
1471
}
1472

    
1473
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1474
{
1475
    switch (fccno) {
1476
    case 0:
1477
        gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1478
        break;
1479
    case 1:
1480
        gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1481
        break;
1482
    case 2:
1483
        gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1484
        break;
1485
    case 3:
1486
        gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1487
        break;
1488
    }
1489
}
1490

    
1491
static inline void gen_op_fcmped(int fccno)
1492
{
1493
    switch (fccno) {
1494
    case 0:
1495
        gen_helper_fcmped(cpu_env);
1496
        break;
1497
    case 1:
1498
        gen_helper_fcmped_fcc1(cpu_env);
1499
        break;
1500
    case 2:
1501
        gen_helper_fcmped_fcc2(cpu_env);
1502
        break;
1503
    case 3:
1504
        gen_helper_fcmped_fcc3(cpu_env);
1505
        break;
1506
    }
1507
}
1508

    
1509
static inline void gen_op_fcmpeq(int fccno)
1510
{
1511
    switch (fccno) {
1512
    case 0:
1513
        gen_helper_fcmpeq(cpu_env);
1514
        break;
1515
    case 1:
1516
        gen_helper_fcmpeq_fcc1(cpu_env);
1517
        break;
1518
    case 2:
1519
        gen_helper_fcmpeq_fcc2(cpu_env);
1520
        break;
1521
    case 3:
1522
        gen_helper_fcmpeq_fcc3(cpu_env);
1523
        break;
1524
    }
1525
}
1526

    
1527
#else
1528

    
1529
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1530
{
1531
    gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1532
}
1533

    
1534
static inline void gen_op_fcmpd(int fccno)
1535
{
1536
    gen_helper_fcmpd(cpu_env);
1537
}
1538

    
1539
static inline void gen_op_fcmpq(int fccno)
1540
{
1541
    gen_helper_fcmpq(cpu_env);
1542
}
1543

    
1544
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1545
{
1546
    gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1547
}
1548

    
1549
static inline void gen_op_fcmped(int fccno)
1550
{
1551
    gen_helper_fcmped(cpu_env);
1552
}
1553

    
1554
static inline void gen_op_fcmpeq(int fccno)
1555
{
1556
    gen_helper_fcmpeq(cpu_env);
1557
}
1558
#endif
1559

    
1560
static inline void gen_op_fpexception_im(int fsr_flags)
1561
{
1562
    TCGv_i32 r_const;
1563

    
1564
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1565
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1566
    r_const = tcg_const_i32(TT_FP_EXCP);
1567
    gen_helper_raise_exception(cpu_env, r_const);
1568
    tcg_temp_free_i32(r_const);
1569
}
1570

    
1571
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1572
{
1573
#if !defined(CONFIG_USER_ONLY)
1574
    if (!dc->fpu_enabled) {
1575
        TCGv_i32 r_const;
1576

    
1577
        save_state(dc, r_cond);
1578
        r_const = tcg_const_i32(TT_NFPU_INSN);
1579
        gen_helper_raise_exception(cpu_env, r_const);
1580
        tcg_temp_free_i32(r_const);
1581
        dc->is_br = 1;
1582
        return 1;
1583
    }
1584
#endif
1585
    return 0;
1586
}
1587

    
1588
static inline void gen_update_fprs_dirty(int rd)
1589
{
1590
#if defined(TARGET_SPARC64)
1591
    tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
1592
#endif
1593
}
1594

    
1595
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1596
{
1597
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1598
}
1599

    
1600
static inline void gen_clear_float_exceptions(void)
1601
{
1602
    gen_helper_clear_float_exceptions(cpu_env);
1603
}
1604

    
1605
/* asi moves */
1606
#ifdef TARGET_SPARC64
1607
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1608
{
1609
    int asi;
1610
    TCGv_i32 r_asi;
1611

    
1612
    if (IS_IMM) {
1613
        r_asi = tcg_temp_new_i32();
1614
        tcg_gen_mov_i32(r_asi, cpu_asi);
1615
    } else {
1616
        asi = GET_FIELD(insn, 19, 26);
1617
        r_asi = tcg_const_i32(asi);
1618
    }
1619
    return r_asi;
1620
}
1621

    
1622
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1623
                              int sign)
1624
{
1625
    TCGv_i32 r_asi, r_size, r_sign;
1626

    
1627
    r_asi = gen_get_asi(insn, addr);
1628
    r_size = tcg_const_i32(size);
1629
    r_sign = tcg_const_i32(sign);
1630
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1631
    tcg_temp_free_i32(r_sign);
1632
    tcg_temp_free_i32(r_size);
1633
    tcg_temp_free_i32(r_asi);
1634
}
1635

    
1636
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1637
{
1638
    TCGv_i32 r_asi, r_size;
1639

    
1640
    r_asi = gen_get_asi(insn, addr);
1641
    r_size = tcg_const_i32(size);
1642
    gen_helper_st_asi(addr, src, r_asi, r_size);
1643
    tcg_temp_free_i32(r_size);
1644
    tcg_temp_free_i32(r_asi);
1645
}
1646

    
1647
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1648
{
1649
    TCGv_i32 r_asi, r_size, r_rd;
1650

    
1651
    r_asi = gen_get_asi(insn, addr);
1652
    r_size = tcg_const_i32(size);
1653
    r_rd = tcg_const_i32(rd);
1654
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1655
    tcg_temp_free_i32(r_rd);
1656
    tcg_temp_free_i32(r_size);
1657
    tcg_temp_free_i32(r_asi);
1658
}
1659

    
1660
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1661
{
1662
    TCGv_i32 r_asi, r_size, r_rd;
1663

    
1664
    r_asi = gen_get_asi(insn, addr);
1665
    r_size = tcg_const_i32(size);
1666
    r_rd = tcg_const_i32(rd);
1667
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1668
    tcg_temp_free_i32(r_rd);
1669
    tcg_temp_free_i32(r_size);
1670
    tcg_temp_free_i32(r_asi);
1671
}
1672

    
1673
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1674
{
1675
    TCGv_i32 r_asi, r_size, r_sign;
1676

    
1677
    r_asi = gen_get_asi(insn, addr);
1678
    r_size = tcg_const_i32(4);
1679
    r_sign = tcg_const_i32(0);
1680
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1681
    tcg_temp_free_i32(r_sign);
1682
    gen_helper_st_asi(addr, dst, r_asi, r_size);
1683
    tcg_temp_free_i32(r_size);
1684
    tcg_temp_free_i32(r_asi);
1685
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1686
}
1687

    
1688
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1689
{
1690
    TCGv_i32 r_asi, r_rd;
1691

    
1692
    r_asi = gen_get_asi(insn, addr);
1693
    r_rd = tcg_const_i32(rd);
1694
    gen_helper_ldda_asi(addr, r_asi, r_rd);
1695
    tcg_temp_free_i32(r_rd);
1696
    tcg_temp_free_i32(r_asi);
1697
}
1698

    
1699
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1700
{
1701
    TCGv_i32 r_asi, r_size;
1702

    
1703
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1704
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1705
    r_asi = gen_get_asi(insn, addr);
1706
    r_size = tcg_const_i32(8);
1707
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1708
    tcg_temp_free_i32(r_size);
1709
    tcg_temp_free_i32(r_asi);
1710
}
1711

    
1712
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1713
                               int rd)
1714
{
1715
    TCGv r_val1;
1716
    TCGv_i32 r_asi;
1717

    
1718
    r_val1 = tcg_temp_new();
1719
    gen_movl_reg_TN(rd, r_val1);
1720
    r_asi = gen_get_asi(insn, addr);
1721
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1722
    tcg_temp_free_i32(r_asi);
1723
    tcg_temp_free(r_val1);
1724
}
1725

    
1726
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1727
                                int rd)
1728
{
1729
    TCGv_i32 r_asi;
1730

    
1731
    gen_movl_reg_TN(rd, cpu_tmp64);
1732
    r_asi = gen_get_asi(insn, addr);
1733
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1734
    tcg_temp_free_i32(r_asi);
1735
}
1736

    
1737
#elif !defined(CONFIG_USER_ONLY)
1738

    
1739
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1740
                              int sign)
1741
{
1742
    TCGv_i32 r_asi, r_size, r_sign;
1743

    
1744
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1745
    r_size = tcg_const_i32(size);
1746
    r_sign = tcg_const_i32(sign);
1747
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1748
    tcg_temp_free(r_sign);
1749
    tcg_temp_free(r_size);
1750
    tcg_temp_free(r_asi);
1751
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1752
}
1753

    
1754
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1755
{
1756
    TCGv_i32 r_asi, r_size;
1757

    
1758
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
1759
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1760
    r_size = tcg_const_i32(size);
1761
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1762
    tcg_temp_free(r_size);
1763
    tcg_temp_free(r_asi);
1764
}
1765

    
1766
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1767
{
1768
    TCGv_i32 r_asi, r_size, r_sign;
1769
    TCGv_i64 r_val;
1770

    
1771
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1772
    r_size = tcg_const_i32(4);
1773
    r_sign = tcg_const_i32(0);
1774
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1775
    tcg_temp_free(r_sign);
1776
    r_val = tcg_temp_new_i64();
1777
    tcg_gen_extu_tl_i64(r_val, dst);
1778
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1779
    tcg_temp_free_i64(r_val);
1780
    tcg_temp_free(r_size);
1781
    tcg_temp_free(r_asi);
1782
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1783
}
1784

    
1785
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1786
{
1787
    TCGv_i32 r_asi, r_size, r_sign;
1788

    
1789
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1790
    r_size = tcg_const_i32(8);
1791
    r_sign = tcg_const_i32(0);
1792
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1793
    tcg_temp_free(r_sign);
1794
    tcg_temp_free(r_size);
1795
    tcg_temp_free(r_asi);
1796
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1797
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
1798
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1799
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1800
    gen_movl_TN_reg(rd, hi);
1801
}
1802

    
1803
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1804
{
1805
    TCGv_i32 r_asi, r_size;
1806

    
1807
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1808
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1809
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1810
    r_size = tcg_const_i32(8);
1811
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1812
    tcg_temp_free(r_size);
1813
    tcg_temp_free(r_asi);
1814
}
1815
#endif
1816

    
1817
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1818
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1819
{
1820
    TCGv_i64 r_val;
1821
    TCGv_i32 r_asi, r_size;
1822

    
1823
    gen_ld_asi(dst, addr, insn, 1, 0);
1824

    
1825
    r_val = tcg_const_i64(0xffULL);
1826
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1827
    r_size = tcg_const_i32(1);
1828
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1829
    tcg_temp_free_i32(r_size);
1830
    tcg_temp_free_i32(r_asi);
1831
    tcg_temp_free_i64(r_val);
1832
}
1833
#endif
1834

    
1835
static inline TCGv get_src1(unsigned int insn, TCGv def)
1836
{
1837
    TCGv r_rs1 = def;
1838
    unsigned int rs1;
1839

    
1840
    rs1 = GET_FIELD(insn, 13, 17);
1841
    if (rs1 == 0) {
1842
        tcg_gen_movi_tl(def, 0);
1843
    } else if (rs1 < 8) {
1844
        r_rs1 = cpu_gregs[rs1];
1845
    } else {
1846
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1847
    }
1848
    return r_rs1;
1849
}
1850

    
1851
static inline TCGv get_src2(unsigned int insn, TCGv def)
1852
{
1853
    TCGv r_rs2 = def;
1854

    
1855
    if (IS_IMM) { /* immediate */
1856
        target_long simm = GET_FIELDs(insn, 19, 31);
1857
        tcg_gen_movi_tl(def, simm);
1858
    } else { /* register */
1859
        unsigned int rs2 = GET_FIELD(insn, 27, 31);
1860
        if (rs2 == 0) {
1861
            tcg_gen_movi_tl(def, 0);
1862
        } else if (rs2 < 8) {
1863
            r_rs2 = cpu_gregs[rs2];
1864
        } else {
1865
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1866
        }
1867
    }
1868
    return r_rs2;
1869
}
1870

    
1871
#ifdef TARGET_SPARC64
1872
static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1873
{
1874
    TCGv_i32 r_tl = tcg_temp_new_i32();
1875

    
1876
    /* load env->tl into r_tl */
1877
    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1878

    
1879
    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1880
    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1881

    
1882
    /* calculate offset to current trap state from env->ts, reuse r_tl */
1883
    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1884
    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1885

    
1886
    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1887
    {
1888
        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1889
        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1890
        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1891
        tcg_temp_free_ptr(r_tl_tmp);
1892
    }
1893

    
1894
    tcg_temp_free_i32(r_tl);
1895
}
1896
#endif
1897

    
1898
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
1899
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1900
        goto illegal_insn;
1901
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1902
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1903
        goto nfpu_insn;
1904

    
1905
/* before an instruction, dc->pc must be static */
1906
static void disas_sparc_insn(DisasContext * dc)
1907
{
1908
    unsigned int insn, opc, rs1, rs2, rd;
1909
    TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1910
    TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
1911
    target_long simm;
1912

    
1913
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1914
        tcg_gen_debug_insn_start(dc->pc);
1915
    insn = ldl_code(dc->pc);
1916
    opc = GET_FIELD(insn, 0, 1);
1917

    
1918
    rd = GET_FIELD(insn, 2, 6);
1919

    
1920
    cpu_tmp1 = cpu_src1 = tcg_temp_new();
1921
    cpu_tmp2 = cpu_src2 = tcg_temp_new();
1922

    
1923
    switch (opc) {
1924
    case 0:                     /* branches/sethi */
1925
        {
1926
            unsigned int xop = GET_FIELD(insn, 7, 9);
1927
            int32_t target;
1928
            switch (xop) {
1929
#ifdef TARGET_SPARC64
1930
            case 0x1:           /* V9 BPcc */
1931
                {
1932
                    int cc;
1933

    
1934
                    target = GET_FIELD_SP(insn, 0, 18);
1935
                    target = sign_extend(target, 19);
1936
                    target <<= 2;
1937
                    cc = GET_FIELD_SP(insn, 20, 21);
1938
                    if (cc == 0)
1939
                        do_branch(dc, target, insn, 0, cpu_cond);
1940
                    else if (cc == 2)
1941
                        do_branch(dc, target, insn, 1, cpu_cond);
1942
                    else
1943
                        goto illegal_insn;
1944
                    goto jmp_insn;
1945
                }
1946
            case 0x3:           /* V9 BPr */
1947
                {
1948
                    target = GET_FIELD_SP(insn, 0, 13) |
1949
                        (GET_FIELD_SP(insn, 20, 21) << 14);
1950
                    target = sign_extend(target, 16);
1951
                    target <<= 2;
1952
                    cpu_src1 = get_src1(insn, cpu_src1);
1953
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1954
                    goto jmp_insn;
1955
                }
1956
            case 0x5:           /* V9 FBPcc */
1957
                {
1958
                    int cc = GET_FIELD_SP(insn, 20, 21);
1959
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1960
                        goto jmp_insn;
1961
                    target = GET_FIELD_SP(insn, 0, 18);
1962
                    target = sign_extend(target, 19);
1963
                    target <<= 2;
1964
                    do_fbranch(dc, target, insn, cc, cpu_cond);
1965
                    goto jmp_insn;
1966
                }
1967
#else
1968
            case 0x7:           /* CBN+x */
1969
                {
1970
                    goto ncp_insn;
1971
                }
1972
#endif
1973
            case 0x2:           /* BN+x */
1974
                {
1975
                    target = GET_FIELD(insn, 10, 31);
1976
                    target = sign_extend(target, 22);
1977
                    target <<= 2;
1978
                    do_branch(dc, target, insn, 0, cpu_cond);
1979
                    goto jmp_insn;
1980
                }
1981
            case 0x6:           /* FBN+x */
1982
                {
1983
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1984
                        goto jmp_insn;
1985
                    target = GET_FIELD(insn, 10, 31);
1986
                    target = sign_extend(target, 22);
1987
                    target <<= 2;
1988
                    do_fbranch(dc, target, insn, 0, cpu_cond);
1989
                    goto jmp_insn;
1990
                }
1991
            case 0x4:           /* SETHI */
1992
                if (rd) { // nop
1993
                    uint32_t value = GET_FIELD(insn, 10, 31);
1994
                    TCGv r_const;
1995

    
1996
                    r_const = tcg_const_tl(value << 10);
1997
                    gen_movl_TN_reg(rd, r_const);
1998
                    tcg_temp_free(r_const);
1999
                }
2000
                break;
2001
            case 0x0:           /* UNIMPL */
2002
            default:
2003
                goto illegal_insn;
2004
            }
2005
            break;
2006
        }
2007
        break;
2008
    case 1:                     /*CALL*/
2009
        {
2010
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
2011
            TCGv r_const;
2012

    
2013
            r_const = tcg_const_tl(dc->pc);
2014
            gen_movl_TN_reg(15, r_const);
2015
            tcg_temp_free(r_const);
2016
            target += dc->pc;
2017
            gen_mov_pc_npc(dc, cpu_cond);
2018
            dc->npc = target;
2019
        }
2020
        goto jmp_insn;
2021
    case 2:                     /* FPU & Logical Operations */
2022
        {
2023
            unsigned int xop = GET_FIELD(insn, 7, 12);
2024
            if (xop == 0x3a) {  /* generate trap */
2025
                int cond;
2026

    
2027
                cpu_src1 = get_src1(insn, cpu_src1);
2028
                if (IS_IMM) {
2029
                    rs2 = GET_FIELD(insn, 25, 31);
2030
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2031
                } else {
2032
                    rs2 = GET_FIELD(insn, 27, 31);
2033
                    if (rs2 != 0) {
2034
                        gen_movl_reg_TN(rs2, cpu_src2);
2035
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2036
                    } else
2037
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
2038
                }
2039

    
2040
                cond = GET_FIELD(insn, 3, 6);
2041
                if (cond == 0x8) { /* Trap Always */
2042
                    save_state(dc, cpu_cond);
2043
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2044
                        supervisor(dc))
2045
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2046
                    else
2047
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2048
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2049
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2050

    
2051
                    if (rs2 == 0 &&
2052
                        dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2053

    
2054
                        gen_helper_shutdown();
2055

    
2056
                    } else {
2057
                        gen_helper_raise_exception(cpu_env, cpu_tmp32);
2058
                    }
2059
                } else if (cond != 0) {
2060
                    TCGv r_cond = tcg_temp_new();
2061
                    int l1;
2062
#ifdef TARGET_SPARC64
2063
                    /* V9 icc/xcc */
2064
                    int cc = GET_FIELD_SP(insn, 11, 12);
2065

    
2066
                    save_state(dc, cpu_cond);
2067
                    if (cc == 0)
2068
                        gen_cond(r_cond, 0, cond, dc);
2069
                    else if (cc == 2)
2070
                        gen_cond(r_cond, 1, cond, dc);
2071
                    else
2072
                        goto illegal_insn;
2073
#else
2074
                    save_state(dc, cpu_cond);
2075
                    gen_cond(r_cond, 0, cond, dc);
2076
#endif
2077
                    l1 = gen_new_label();
2078
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2079

    
2080
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2081
                        supervisor(dc))
2082
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2083
                    else
2084
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2085
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2086
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2087
                    gen_helper_raise_exception(cpu_env, cpu_tmp32);
2088

    
2089
                    gen_set_label(l1);
2090
                    tcg_temp_free(r_cond);
2091
                }
2092
                gen_op_next_insn();
2093
                tcg_gen_exit_tb(0);
2094
                dc->is_br = 1;
2095
                goto jmp_insn;
2096
            } else if (xop == 0x28) {
2097
                rs1 = GET_FIELD(insn, 13, 17);
2098
                switch(rs1) {
2099
                case 0: /* rdy */
2100
#ifndef TARGET_SPARC64
2101
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
2102
                                       manual, rdy on the microSPARC
2103
                                       II */
2104
                case 0x0f:          /* stbar in the SPARCv8 manual,
2105
                                       rdy on the microSPARC II */
2106
                case 0x10 ... 0x1f: /* implementation-dependent in the
2107
                                       SPARCv8 manual, rdy on the
2108
                                       microSPARC II */
2109
                    /* Read Asr17 */
2110
                    if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2111
                        TCGv r_const;
2112

    
2113
                        /* Read Asr17 for a Leon3 monoprocessor */
2114
                        r_const = tcg_const_tl((1 << 8)
2115
                                               | (dc->def->nwindows - 1));
2116
                        gen_movl_TN_reg(rd, r_const);
2117
                        tcg_temp_free(r_const);
2118
                        break;
2119
                    }
2120
#endif
2121
                    gen_movl_TN_reg(rd, cpu_y);
2122
                    break;
2123
#ifdef TARGET_SPARC64
2124
                case 0x2: /* V9 rdccr */
2125
                    gen_helper_compute_psr(cpu_env);
2126
                    gen_helper_rdccr(cpu_dst, cpu_env);
2127
                    gen_movl_TN_reg(rd, cpu_dst);
2128
                    break;
2129
                case 0x3: /* V9 rdasi */
2130
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2131
                    gen_movl_TN_reg(rd, cpu_dst);
2132
                    break;
2133
                case 0x4: /* V9 rdtick */
2134
                    {
2135
                        TCGv_ptr r_tickptr;
2136

    
2137
                        r_tickptr = tcg_temp_new_ptr();
2138
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2139
                                       offsetof(CPUState, tick));
2140
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2141
                        tcg_temp_free_ptr(r_tickptr);
2142
                        gen_movl_TN_reg(rd, cpu_dst);
2143
                    }
2144
                    break;
2145
                case 0x5: /* V9 rdpc */
2146
                    {
2147
                        TCGv r_const;
2148

    
2149
                        r_const = tcg_const_tl(dc->pc);
2150
                        gen_movl_TN_reg(rd, r_const);
2151
                        tcg_temp_free(r_const);
2152
                    }
2153
                    break;
2154
                case 0x6: /* V9 rdfprs */
2155
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2156
                    gen_movl_TN_reg(rd, cpu_dst);
2157
                    break;
2158
                case 0xf: /* V9 membar */
2159
                    break; /* no effect */
2160
                case 0x13: /* Graphics Status */
2161
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2162
                        goto jmp_insn;
2163
                    gen_movl_TN_reg(rd, cpu_gsr);
2164
                    break;
2165
                case 0x16: /* Softint */
2166
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2167
                    gen_movl_TN_reg(rd, cpu_dst);
2168
                    break;
2169
                case 0x17: /* Tick compare */
2170
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
2171
                    break;
2172
                case 0x18: /* System tick */
2173
                    {
2174
                        TCGv_ptr r_tickptr;
2175

    
2176
                        r_tickptr = tcg_temp_new_ptr();
2177
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2178
                                       offsetof(CPUState, stick));
2179
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2180
                        tcg_temp_free_ptr(r_tickptr);
2181
                        gen_movl_TN_reg(rd, cpu_dst);
2182
                    }
2183
                    break;
2184
                case 0x19: /* System tick compare */
2185
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
2186
                    break;
2187
                case 0x10: /* Performance Control */
2188
                case 0x11: /* Performance Instrumentation Counter */
2189
                case 0x12: /* Dispatch Control */
2190
                case 0x14: /* Softint set, WO */
2191
                case 0x15: /* Softint clear, WO */
2192
#endif
2193
                default:
2194
                    goto illegal_insn;
2195
                }
2196
#if !defined(CONFIG_USER_ONLY)
2197
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2198
#ifndef TARGET_SPARC64
2199
                if (!supervisor(dc))
2200
                    goto priv_insn;
2201
                gen_helper_compute_psr(cpu_env);
2202
                dc->cc_op = CC_OP_FLAGS;
2203
                gen_helper_rdpsr(cpu_dst, cpu_env);
2204
#else
2205
                CHECK_IU_FEATURE(dc, HYPV);
2206
                if (!hypervisor(dc))
2207
                    goto priv_insn;
2208
                rs1 = GET_FIELD(insn, 13, 17);
2209
                switch (rs1) {
2210
                case 0: // hpstate
2211
                    // gen_op_rdhpstate();
2212
                    break;
2213
                case 1: // htstate
2214
                    // gen_op_rdhtstate();
2215
                    break;
2216
                case 3: // hintp
2217
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2218
                    break;
2219
                case 5: // htba
2220
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
2221
                    break;
2222
                case 6: // hver
2223
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
2224
                    break;
2225
                case 31: // hstick_cmpr
2226
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2227
                    break;
2228
                default:
2229
                    goto illegal_insn;
2230
                }
2231
#endif
2232
                gen_movl_TN_reg(rd, cpu_dst);
2233
                break;
2234
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2235
                if (!supervisor(dc))
2236
                    goto priv_insn;
2237
#ifdef TARGET_SPARC64
2238
                rs1 = GET_FIELD(insn, 13, 17);
2239
                switch (rs1) {
2240
                case 0: // tpc
2241
                    {
2242
                        TCGv_ptr r_tsptr;
2243

    
2244
                        r_tsptr = tcg_temp_new_ptr();
2245
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2246
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2247
                                      offsetof(trap_state, tpc));
2248
                        tcg_temp_free_ptr(r_tsptr);
2249
                    }
2250
                    break;
2251
                case 1: // tnpc
2252
                    {
2253
                        TCGv_ptr r_tsptr;
2254

    
2255
                        r_tsptr = tcg_temp_new_ptr();
2256
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2257
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2258
                                      offsetof(trap_state, tnpc));
2259
                        tcg_temp_free_ptr(r_tsptr);
2260
                    }
2261
                    break;
2262
                case 2: // tstate
2263
                    {
2264
                        TCGv_ptr r_tsptr;
2265

    
2266
                        r_tsptr = tcg_temp_new_ptr();
2267
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2268
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2269
                                      offsetof(trap_state, tstate));
2270
                        tcg_temp_free_ptr(r_tsptr);
2271
                    }
2272
                    break;
2273
                case 3: // tt
2274
                    {
2275
                        TCGv_ptr r_tsptr;
2276

    
2277
                        r_tsptr = tcg_temp_new_ptr();
2278
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2279
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2280
                                       offsetof(trap_state, tt));
2281
                        tcg_temp_free_ptr(r_tsptr);
2282
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2283
                    }
2284
                    break;
2285
                case 4: // tick
2286
                    {
2287
                        TCGv_ptr r_tickptr;
2288

    
2289
                        r_tickptr = tcg_temp_new_ptr();
2290
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2291
                                       offsetof(CPUState, tick));
2292
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2293
                        gen_movl_TN_reg(rd, cpu_tmp0);
2294
                        tcg_temp_free_ptr(r_tickptr);
2295
                    }
2296
                    break;
2297
                case 5: // tba
2298
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2299
                    break;
2300
                case 6: // pstate
2301
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2302
                                   offsetof(CPUSPARCState, pstate));
2303
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2304
                    break;
2305
                case 7: // tl
2306
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2307
                                   offsetof(CPUSPARCState, tl));
2308
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2309
                    break;
2310
                case 8: // pil
2311
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2312
                                   offsetof(CPUSPARCState, psrpil));
2313
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2314
                    break;
2315
                case 9: // cwp
2316
                    gen_helper_rdcwp(cpu_tmp0, cpu_env);
2317
                    break;
2318
                case 10: // cansave
2319
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2320
                                   offsetof(CPUSPARCState, cansave));
2321
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2322
                    break;
2323
                case 11: // canrestore
2324
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2325
                                   offsetof(CPUSPARCState, canrestore));
2326
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2327
                    break;
2328
                case 12: // cleanwin
2329
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2330
                                   offsetof(CPUSPARCState, cleanwin));
2331
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2332
                    break;
2333
                case 13: // otherwin
2334
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2335
                                   offsetof(CPUSPARCState, otherwin));
2336
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2337
                    break;
2338
                case 14: // wstate
2339
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2340
                                   offsetof(CPUSPARCState, wstate));
2341
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2342
                    break;
2343
                case 16: // UA2005 gl
2344
                    CHECK_IU_FEATURE(dc, GL);
2345
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2346
                                   offsetof(CPUSPARCState, gl));
2347
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2348
                    break;
2349
                case 26: // UA2005 strand status
2350
                    CHECK_IU_FEATURE(dc, HYPV);
2351
                    if (!hypervisor(dc))
2352
                        goto priv_insn;
2353
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2354
                    break;
2355
                case 31: // ver
2356
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2357
                    break;
2358
                case 15: // fq
2359
                default:
2360
                    goto illegal_insn;
2361
                }
2362
#else
2363
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2364
#endif
2365
                gen_movl_TN_reg(rd, cpu_tmp0);
2366
                break;
2367
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2368
#ifdef TARGET_SPARC64
2369
                save_state(dc, cpu_cond);
2370
                gen_helper_flushw(cpu_env);
2371
#else
2372
                if (!supervisor(dc))
2373
                    goto priv_insn;
2374
                gen_movl_TN_reg(rd, cpu_tbr);
2375
#endif
2376
                break;
2377
#endif
2378
            } else if (xop == 0x34) {   /* FPU Operations */
2379
                if (gen_trap_ifnofpu(dc, cpu_cond))
2380
                    goto jmp_insn;
2381
                gen_op_clear_ieee_excp_and_FTT();
2382
                rs1 = GET_FIELD(insn, 13, 17);
2383
                rs2 = GET_FIELD(insn, 27, 31);
2384
                xop = GET_FIELD(insn, 18, 26);
2385
                save_state(dc, cpu_cond);
2386
                switch (xop) {
2387
                case 0x1: /* fmovs */
2388
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2389
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
2390
                    gen_update_fprs_dirty(rd);
2391
                    break;
2392
                case 0x5: /* fnegs */
2393
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2394
                    cpu_dst_32 = gen_dest_fpr_F();
2395
                    gen_helper_fnegs(cpu_dst_32, cpu_src1_32);
2396
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2397
                    gen_update_fprs_dirty(rd);
2398
                    break;
2399
                case 0x9: /* fabss */
2400
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2401
                    cpu_dst_32 = gen_dest_fpr_F();
2402
                    gen_helper_fabss(cpu_dst_32, cpu_src1_32);
2403
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2404
                    gen_update_fprs_dirty(rd);
2405
                    break;
2406
                case 0x29: /* fsqrts */
2407
                    CHECK_FPU_FEATURE(dc, FSQRT);
2408
                    gen_clear_float_exceptions();
2409
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2410
                    cpu_dst_32 = gen_dest_fpr_F();
2411
                    gen_helper_fsqrts(cpu_dst_32, cpu_env, cpu_src1_32);
2412
                    gen_helper_check_ieee_exceptions(cpu_env);
2413
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2414
                    gen_update_fprs_dirty(rd);
2415
                    break;
2416
                case 0x2a: /* fsqrtd */
2417
                    CHECK_FPU_FEATURE(dc, FSQRT);
2418
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2419
                    gen_clear_float_exceptions();
2420
                    gen_helper_fsqrtd(cpu_env);
2421
                    gen_helper_check_ieee_exceptions(cpu_env);
2422
                    gen_op_store_DT0_fpr(DFPREG(rd));
2423
                    gen_update_fprs_dirty(DFPREG(rd));
2424
                    break;
2425
                case 0x2b: /* fsqrtq */
2426
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2427
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2428
                    gen_clear_float_exceptions();
2429
                    gen_helper_fsqrtq(cpu_env);
2430
                    gen_helper_check_ieee_exceptions(cpu_env);
2431
                    gen_op_store_QT0_fpr(QFPREG(rd));
2432
                    gen_update_fprs_dirty(QFPREG(rd));
2433
                    break;
2434
                case 0x41: /* fadds */
2435
                    gen_clear_float_exceptions();
2436
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
2437
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
2438
                    cpu_dst_32 = gen_dest_fpr_F();
2439
                    gen_helper_fadds(cpu_dst_32, cpu_env,
2440
                                     cpu_src1_32, cpu_src2_32);
2441
                    gen_helper_check_ieee_exceptions(cpu_env);
2442
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2443
                    gen_update_fprs_dirty(rd);
2444
                    break;
2445
                case 0x42: /* faddd */
2446
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2447
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2448
                    gen_clear_float_exceptions();
2449
                    gen_helper_faddd(cpu_env);
2450
                    gen_helper_check_ieee_exceptions(cpu_env);
2451
                    gen_op_store_DT0_fpr(DFPREG(rd));
2452
                    gen_update_fprs_dirty(DFPREG(rd));
2453
                    break;
2454
                case 0x43: /* faddq */
2455
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2456
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2457
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2458
                    gen_clear_float_exceptions();
2459
                    gen_helper_faddq(cpu_env);
2460
                    gen_helper_check_ieee_exceptions(cpu_env);
2461
                    gen_op_store_QT0_fpr(QFPREG(rd));
2462
                    gen_update_fprs_dirty(QFPREG(rd));
2463
                    break;
2464
                case 0x45: /* fsubs */
2465
                    gen_clear_float_exceptions();
2466
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
2467
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
2468
                    cpu_dst_32 = gen_dest_fpr_F();
2469
                    gen_helper_fsubs(cpu_dst_32, cpu_env,
2470
                                     cpu_src1_32, cpu_src2_32);
2471
                    gen_helper_check_ieee_exceptions(cpu_env);
2472
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2473
                    gen_update_fprs_dirty(rd);
2474
                    break;
2475
                case 0x46: /* fsubd */
2476
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2477
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2478
                    gen_clear_float_exceptions();
2479
                    gen_helper_fsubd(cpu_env);
2480
                    gen_helper_check_ieee_exceptions(cpu_env);
2481
                    gen_op_store_DT0_fpr(DFPREG(rd));
2482
                    gen_update_fprs_dirty(DFPREG(rd));
2483
                    break;
2484
                case 0x47: /* fsubq */
2485
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2486
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2487
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2488
                    gen_clear_float_exceptions();
2489
                    gen_helper_fsubq(cpu_env);
2490
                    gen_helper_check_ieee_exceptions(cpu_env);
2491
                    gen_op_store_QT0_fpr(QFPREG(rd));
2492
                    gen_update_fprs_dirty(QFPREG(rd));
2493
                    break;
2494
                case 0x49: /* fmuls */
2495
                    CHECK_FPU_FEATURE(dc, FMUL);
2496
                    gen_clear_float_exceptions();
2497
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
2498
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
2499
                    cpu_dst_32 = gen_dest_fpr_F();
2500
                    gen_helper_fmuls(cpu_dst_32, cpu_env,
2501
                                     cpu_src1_32, cpu_src2_32);
2502
                    gen_helper_check_ieee_exceptions(cpu_env);
2503
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2504
                    gen_update_fprs_dirty(rd);
2505
                    break;
2506
                case 0x4a: /* fmuld */
2507
                    CHECK_FPU_FEATURE(dc, FMUL);
2508
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2509
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2510
                    gen_clear_float_exceptions();
2511
                    gen_helper_fmuld(cpu_env);
2512
                    gen_helper_check_ieee_exceptions(cpu_env);
2513
                    gen_op_store_DT0_fpr(DFPREG(rd));
2514
                    gen_update_fprs_dirty(DFPREG(rd));
2515
                    break;
2516
                case 0x4b: /* fmulq */
2517
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2518
                    CHECK_FPU_FEATURE(dc, FMUL);
2519
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2520
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2521
                    gen_clear_float_exceptions();
2522
                    gen_helper_fmulq(cpu_env);
2523
                    gen_helper_check_ieee_exceptions(cpu_env);
2524
                    gen_op_store_QT0_fpr(QFPREG(rd));
2525
                    gen_update_fprs_dirty(QFPREG(rd));
2526
                    break;
2527
                case 0x4d: /* fdivs */
2528
                    gen_clear_float_exceptions();
2529
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
2530
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
2531
                    cpu_dst_32 = gen_dest_fpr_F();
2532
                    gen_helper_fdivs(cpu_dst_32, cpu_env,
2533
                                     cpu_src1_32, cpu_src2_32);
2534
                    gen_helper_check_ieee_exceptions(cpu_env);
2535
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2536
                    gen_update_fprs_dirty(rd);
2537
                    break;
2538
                case 0x4e: /* fdivd */
2539
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2540
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2541
                    gen_clear_float_exceptions();
2542
                    gen_helper_fdivd(cpu_env);
2543
                    gen_helper_check_ieee_exceptions(cpu_env);
2544
                    gen_op_store_DT0_fpr(DFPREG(rd));
2545
                    gen_update_fprs_dirty(DFPREG(rd));
2546
                    break;
2547
                case 0x4f: /* fdivq */
2548
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2549
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2550
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2551
                    gen_clear_float_exceptions();
2552
                    gen_helper_fdivq(cpu_env);
2553
                    gen_helper_check_ieee_exceptions(cpu_env);
2554
                    gen_op_store_QT0_fpr(QFPREG(rd));
2555
                    gen_update_fprs_dirty(QFPREG(rd));
2556
                    break;
2557
                case 0x69: /* fsmuld */
2558
                    CHECK_FPU_FEATURE(dc, FSMULD);
2559
                    gen_clear_float_exceptions();
2560
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
2561
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
2562
                    gen_helper_fsmuld(cpu_env, cpu_src1_32, cpu_src2_32);
2563
                    gen_helper_check_ieee_exceptions(cpu_env);
2564
                    gen_op_store_DT0_fpr(DFPREG(rd));
2565
                    gen_update_fprs_dirty(DFPREG(rd));
2566
                    break;
2567
                case 0x6e: /* fdmulq */
2568
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2569
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2570
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2571
                    gen_clear_float_exceptions();
2572
                    gen_helper_fdmulq(cpu_env);
2573
                    gen_helper_check_ieee_exceptions(cpu_env);
2574
                    gen_op_store_QT0_fpr(QFPREG(rd));
2575
                    gen_update_fprs_dirty(QFPREG(rd));
2576
                    break;
2577
                case 0xc4: /* fitos */
2578
                    gen_clear_float_exceptions();
2579
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2580
                    cpu_dst_32 = gen_dest_fpr_F();
2581
                    gen_helper_fitos(cpu_dst_32, cpu_env, cpu_src1_32);
2582
                    gen_helper_check_ieee_exceptions(cpu_env);
2583
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2584
                    gen_update_fprs_dirty(rd);
2585
                    break;
2586
                case 0xc6: /* fdtos */
2587
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2588
                    gen_clear_float_exceptions();
2589
                    cpu_dst_32 = gen_dest_fpr_F();
2590
                    gen_helper_fdtos(cpu_dst_32, cpu_env);
2591
                    gen_helper_check_ieee_exceptions(cpu_env);
2592
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2593
                    gen_update_fprs_dirty(rd);
2594
                    break;
2595
                case 0xc7: /* fqtos */
2596
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2597
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2598
                    gen_clear_float_exceptions();
2599
                    cpu_dst_32 = gen_dest_fpr_F();
2600
                    gen_helper_fqtos(cpu_dst_32, cpu_env);
2601
                    gen_helper_check_ieee_exceptions(cpu_env);
2602
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2603
                    gen_update_fprs_dirty(rd);
2604
                    break;
2605
                case 0xc8: /* fitod */
2606
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2607
                    gen_helper_fitod(cpu_env, cpu_src1_32);
2608
                    gen_op_store_DT0_fpr(DFPREG(rd));
2609
                    gen_update_fprs_dirty(DFPREG(rd));
2610
                    break;
2611
                case 0xc9: /* fstod */
2612
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2613
                    gen_helper_fstod(cpu_env, cpu_src1_32);
2614
                    gen_op_store_DT0_fpr(DFPREG(rd));
2615
                    gen_update_fprs_dirty(DFPREG(rd));
2616
                    break;
2617
                case 0xcb: /* fqtod */
2618
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2619
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2620
                    gen_clear_float_exceptions();
2621
                    gen_helper_fqtod(cpu_env);
2622
                    gen_helper_check_ieee_exceptions(cpu_env);
2623
                    gen_op_store_DT0_fpr(DFPREG(rd));
2624
                    gen_update_fprs_dirty(DFPREG(rd));
2625
                    break;
2626
                case 0xcc: /* fitoq */
2627
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2628
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2629
                    gen_helper_fitoq(cpu_env, cpu_src1_32);
2630
                    gen_op_store_QT0_fpr(QFPREG(rd));
2631
                    gen_update_fprs_dirty(QFPREG(rd));
2632
                    break;
2633
                case 0xcd: /* fstoq */
2634
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2635
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2636
                    gen_helper_fstoq(cpu_env, cpu_src1_32);
2637
                    gen_op_store_QT0_fpr(QFPREG(rd));
2638
                    gen_update_fprs_dirty(QFPREG(rd));
2639
                    break;
2640
                case 0xce: /* fdtoq */
2641
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2642
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2643
                    gen_helper_fdtoq(cpu_env);
2644
                    gen_op_store_QT0_fpr(QFPREG(rd));
2645
                    gen_update_fprs_dirty(QFPREG(rd));
2646
                    break;
2647
                case 0xd1: /* fstoi */
2648
                    gen_clear_float_exceptions();
2649
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2650
                    cpu_dst_32 = gen_dest_fpr_F();
2651
                    gen_helper_fstoi(cpu_dst_32, cpu_env, cpu_src1_32);
2652
                    gen_helper_check_ieee_exceptions(cpu_env);
2653
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2654
                    gen_update_fprs_dirty(rd);
2655
                    break;
2656
                case 0xd2: /* fdtoi */
2657
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2658
                    gen_clear_float_exceptions();
2659
                    cpu_dst_32 = gen_dest_fpr_F();
2660
                    gen_helper_fdtoi(cpu_dst_32, cpu_env);
2661
                    gen_helper_check_ieee_exceptions(cpu_env);
2662
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2663
                    gen_update_fprs_dirty(rd);
2664
                    break;
2665
                case 0xd3: /* fqtoi */
2666
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2667
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2668
                    gen_clear_float_exceptions();
2669
                    cpu_dst_32 = gen_dest_fpr_F();
2670
                    gen_helper_fqtoi(cpu_dst_32, cpu_env);
2671
                    gen_helper_check_ieee_exceptions(cpu_env);
2672
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2673
                    gen_update_fprs_dirty(rd);
2674
                    break;
2675
#ifdef TARGET_SPARC64
2676
                case 0x2: /* V9 fmovd */
2677
                    tcg_gen_mov_i32(cpu__fpr[DFPREG(rd)],
2678
                                    cpu__fpr[DFPREG(rs2)]);
2679
                    tcg_gen_mov_i32(cpu__fpr[DFPREG(rd) + 1],
2680
                                    cpu__fpr[DFPREG(rs2) + 1]);
2681
                    gen_update_fprs_dirty(DFPREG(rd));
2682
                    break;
2683
                case 0x3: /* V9 fmovq */
2684
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2685
                    tcg_gen_mov_i32(cpu__fpr[QFPREG(rd)],
2686
                                    cpu__fpr[QFPREG(rs2)]);
2687
                    tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 1],
2688
                                    cpu__fpr[QFPREG(rs2) + 1]);
2689
                    tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 2],
2690
                                    cpu__fpr[QFPREG(rs2) + 2]);
2691
                    tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 3],
2692
                                    cpu__fpr[QFPREG(rs2) + 3]);
2693
                    gen_update_fprs_dirty(QFPREG(rd));
2694
                    break;
2695
                case 0x6: /* V9 fnegd */
2696
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2697
                    gen_helper_fnegd(cpu_env);
2698
                    gen_op_store_DT0_fpr(DFPREG(rd));
2699
                    gen_update_fprs_dirty(DFPREG(rd));
2700
                    break;
2701
                case 0x7: /* V9 fnegq */
2702
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2703
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2704
                    gen_helper_fnegq(cpu_env);
2705
                    gen_op_store_QT0_fpr(QFPREG(rd));
2706
                    gen_update_fprs_dirty(QFPREG(rd));
2707
                    break;
2708
                case 0xa: /* V9 fabsd */
2709
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2710
                    gen_helper_fabsd(cpu_env);
2711
                    gen_op_store_DT0_fpr(DFPREG(rd));
2712
                    gen_update_fprs_dirty(DFPREG(rd));
2713
                    break;
2714
                case 0xb: /* V9 fabsq */
2715
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2716
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2717
                    gen_helper_fabsq(cpu_env);
2718
                    gen_op_store_QT0_fpr(QFPREG(rd));
2719
                    gen_update_fprs_dirty(QFPREG(rd));
2720
                    break;
2721
                case 0x81: /* V9 fstox */
2722
                    gen_clear_float_exceptions();
2723
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2724
                    gen_helper_fstox(cpu_env, cpu_src1_32);
2725
                    gen_helper_check_ieee_exceptions(cpu_env);
2726
                    gen_op_store_DT0_fpr(DFPREG(rd));
2727
                    gen_update_fprs_dirty(DFPREG(rd));
2728
                    break;
2729
                case 0x82: /* V9 fdtox */
2730
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2731
                    gen_clear_float_exceptions();
2732
                    gen_helper_fdtox(cpu_env);
2733
                    gen_helper_check_ieee_exceptions(cpu_env);
2734
                    gen_op_store_DT0_fpr(DFPREG(rd));
2735
                    gen_update_fprs_dirty(DFPREG(rd));
2736
                    break;
2737
                case 0x83: /* V9 fqtox */
2738
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2739
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2740
                    gen_clear_float_exceptions();
2741
                    gen_helper_fqtox(cpu_env);
2742
                    gen_helper_check_ieee_exceptions(cpu_env);
2743
                    gen_op_store_DT0_fpr(DFPREG(rd));
2744
                    gen_update_fprs_dirty(DFPREG(rd));
2745
                    break;
2746
                case 0x84: /* V9 fxtos */
2747
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2748
                    gen_clear_float_exceptions();
2749
                    cpu_dst_32 = gen_dest_fpr_F();
2750
                    gen_helper_fxtos(cpu_dst_32, cpu_env);
2751
                    gen_helper_check_ieee_exceptions(cpu_env);
2752
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
2753
                    gen_update_fprs_dirty(rd);
2754
                    break;
2755
                case 0x88: /* V9 fxtod */
2756
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2757
                    gen_clear_float_exceptions();
2758
                    gen_helper_fxtod(cpu_env);
2759
                    gen_helper_check_ieee_exceptions(cpu_env);
2760
                    gen_op_store_DT0_fpr(DFPREG(rd));
2761
                    gen_update_fprs_dirty(DFPREG(rd));
2762
                    break;
2763
                case 0x8c: /* V9 fxtoq */
2764
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2765
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2766
                    gen_clear_float_exceptions();
2767
                    gen_helper_fxtoq(cpu_env);
2768
                    gen_helper_check_ieee_exceptions(cpu_env);
2769
                    gen_op_store_QT0_fpr(QFPREG(rd));
2770
                    gen_update_fprs_dirty(QFPREG(rd));
2771
                    break;
2772
#endif
2773
                default:
2774
                    goto illegal_insn;
2775
                }
2776
            } else if (xop == 0x35) {   /* FPU Operations */
2777
#ifdef TARGET_SPARC64
2778
                int cond;
2779
#endif
2780
                if (gen_trap_ifnofpu(dc, cpu_cond))
2781
                    goto jmp_insn;
2782
                gen_op_clear_ieee_excp_and_FTT();
2783
                rs1 = GET_FIELD(insn, 13, 17);
2784
                rs2 = GET_FIELD(insn, 27, 31);
2785
                xop = GET_FIELD(insn, 18, 26);
2786
                save_state(dc, cpu_cond);
2787
#ifdef TARGET_SPARC64
2788
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2789
                    int l1;
2790

    
2791
                    l1 = gen_new_label();
2792
                    cond = GET_FIELD_SP(insn, 14, 17);
2793
                    cpu_src1 = get_src1(insn, cpu_src1);
2794
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2795
                                       0, l1);
2796
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2797
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
2798
                    gen_update_fprs_dirty(rd);
2799
                    gen_set_label(l1);
2800
                    break;
2801
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2802
                    int l1;
2803

    
2804
                    l1 = gen_new_label();
2805
                    cond = GET_FIELD_SP(insn, 14, 17);
2806
                    cpu_src1 = get_src1(insn, cpu_src1);
2807
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2808
                                       0, l1);
2809
                    tcg_gen_mov_i32(cpu__fpr[DFPREG(rd)], cpu__fpr[DFPREG(rs2)]);
2810
                    tcg_gen_mov_i32(cpu__fpr[DFPREG(rd) + 1], cpu__fpr[DFPREG(rs2) + 1]);
2811
                    gen_update_fprs_dirty(DFPREG(rd));
2812
                    gen_set_label(l1);
2813
                    break;
2814
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2815
                    int l1;
2816

    
2817
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2818
                    l1 = gen_new_label();
2819
                    cond = GET_FIELD_SP(insn, 14, 17);
2820
                    cpu_src1 = get_src1(insn, cpu_src1);
2821
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2822
                                       0, l1);
2823
                    tcg_gen_mov_i32(cpu__fpr[QFPREG(rd)], cpu__fpr[QFPREG(rs2)]);
2824
                    tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 1], cpu__fpr[QFPREG(rs2) + 1]);
2825
                    tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 2], cpu__fpr[QFPREG(rs2) + 2]);
2826
                    tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 3], cpu__fpr[QFPREG(rs2) + 3]);
2827
                    gen_update_fprs_dirty(QFPREG(rd));
2828
                    gen_set_label(l1);
2829
                    break;
2830
                }
2831
#endif
2832
                switch (xop) {
2833
#ifdef TARGET_SPARC64
2834
#define FMOVSCC(fcc)                                                    \
2835
                    {                                                   \
2836
                        TCGv r_cond;                                    \
2837
                        int l1;                                         \
2838
                                                                        \
2839
                        l1 = gen_new_label();                           \
2840
                        r_cond = tcg_temp_new();                        \
2841
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2842
                        gen_fcond(r_cond, fcc, cond);                   \
2843
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2844
                                           0, l1);                      \
2845
                        cpu_src1_32 = gen_load_fpr_F(dc, rs2);          \
2846
                        gen_store_fpr_F(dc, rd, cpu_src1_32);           \
2847
                        gen_update_fprs_dirty(rd);                      \
2848
                        gen_set_label(l1);                              \
2849
                        tcg_temp_free(r_cond);                          \
2850
                    }
2851
#define FMOVDCC(fcc)                                                    \
2852
                    {                                                   \
2853
                        TCGv r_cond;                                    \
2854
                        int l1;                                         \
2855
                                                                        \
2856
                        l1 = gen_new_label();                           \
2857
                        r_cond = tcg_temp_new();                        \
2858
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2859
                        gen_fcond(r_cond, fcc, cond);                   \
2860
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2861
                                           0, l1);                      \
2862
                        tcg_gen_mov_i32(cpu__fpr[DFPREG(rd)],           \
2863
                                        cpu__fpr[DFPREG(rs2)]);         \
2864
                        tcg_gen_mov_i32(cpu__fpr[DFPREG(rd) + 1],       \
2865
                                        cpu__fpr[DFPREG(rs2) + 1]);     \
2866
                        gen_update_fprs_dirty(DFPREG(rd));              \
2867
                        gen_set_label(l1);                              \
2868
                        tcg_temp_free(r_cond);                          \
2869
                    }
2870
#define FMOVQCC(fcc)                                                    \
2871
                    {                                                   \
2872
                        TCGv r_cond;                                    \
2873
                        int l1;                                         \
2874
                                                                        \
2875
                        l1 = gen_new_label();                           \
2876
                        r_cond = tcg_temp_new();                        \
2877
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2878
                        gen_fcond(r_cond, fcc, cond);                   \
2879
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2880
                                           0, l1);                      \
2881
                        tcg_gen_mov_i32(cpu__fpr[QFPREG(rd)],           \
2882
                                        cpu__fpr[QFPREG(rs2)]);         \
2883
                        tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 1],       \
2884
                                        cpu__fpr[QFPREG(rs2) + 1]);     \
2885
                        tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 2],       \
2886
                                        cpu__fpr[QFPREG(rs2) + 2]);     \
2887
                        tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 3],       \
2888
                                        cpu__fpr[QFPREG(rs2) + 3]);     \
2889
                        gen_update_fprs_dirty(QFPREG(rd));              \
2890
                        gen_set_label(l1);                              \
2891
                        tcg_temp_free(r_cond);                          \
2892
                    }
2893
                    case 0x001: /* V9 fmovscc %fcc0 */
2894
                        FMOVSCC(0);
2895
                        break;
2896
                    case 0x002: /* V9 fmovdcc %fcc0 */
2897
                        FMOVDCC(0);
2898
                        break;
2899
                    case 0x003: /* V9 fmovqcc %fcc0 */
2900
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2901
                        FMOVQCC(0);
2902
                        break;
2903
                    case 0x041: /* V9 fmovscc %fcc1 */
2904
                        FMOVSCC(1);
2905
                        break;
2906
                    case 0x042: /* V9 fmovdcc %fcc1 */
2907
                        FMOVDCC(1);
2908
                        break;
2909
                    case 0x043: /* V9 fmovqcc %fcc1 */
2910
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2911
                        FMOVQCC(1);
2912
                        break;
2913
                    case 0x081: /* V9 fmovscc %fcc2 */
2914
                        FMOVSCC(2);
2915
                        break;
2916
                    case 0x082: /* V9 fmovdcc %fcc2 */
2917
                        FMOVDCC(2);
2918
                        break;
2919
                    case 0x083: /* V9 fmovqcc %fcc2 */
2920
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2921
                        FMOVQCC(2);
2922
                        break;
2923
                    case 0x0c1: /* V9 fmovscc %fcc3 */
2924
                        FMOVSCC(3);
2925
                        break;
2926
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
2927
                        FMOVDCC(3);
2928
                        break;
2929
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
2930
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2931
                        FMOVQCC(3);
2932
                        break;
2933
#undef FMOVSCC
2934
#undef FMOVDCC
2935
#undef FMOVQCC
2936
#define FMOVSCC(icc)                                                    \
2937
                    {                                                   \
2938
                        TCGv r_cond;                                    \
2939
                        int l1;                                         \
2940
                                                                        \
2941
                        l1 = gen_new_label();                           \
2942
                        r_cond = tcg_temp_new();                        \
2943
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2944
                        gen_cond(r_cond, icc, cond, dc);                \
2945
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2946
                                           0, l1);                      \
2947
                        cpu_src1_32 = gen_load_fpr_F(dc, rs2);          \
2948
                        gen_store_fpr_F(dc, rd, cpu_src1_32);           \
2949
                        gen_update_fprs_dirty(rd);                      \
2950
                        gen_set_label(l1);                              \
2951
                        tcg_temp_free(r_cond);                          \
2952
                    }
2953
#define FMOVDCC(icc)                                                    \
2954
                    {                                                   \
2955
                        TCGv r_cond;                                    \
2956
                        int l1;                                         \
2957
                                                                        \
2958
                        l1 = gen_new_label();                           \
2959
                        r_cond = tcg_temp_new();                        \
2960
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2961
                        gen_cond(r_cond, icc, cond, dc);                \
2962
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2963
                                           0, l1);                      \
2964
                        tcg_gen_mov_i32(cpu__fpr[DFPREG(rd)],           \
2965
                                        cpu__fpr[DFPREG(rs2)]);         \
2966
                        tcg_gen_mov_i32(cpu__fpr[DFPREG(rd) + 1],       \
2967
                                        cpu__fpr[DFPREG(rs2) + 1]);     \
2968
                        gen_update_fprs_dirty(DFPREG(rd));              \
2969
                        gen_set_label(l1);                              \
2970
                        tcg_temp_free(r_cond);                          \
2971
                    }
2972
#define FMOVQCC(icc)                                                    \
2973
                    {                                                   \
2974
                        TCGv r_cond;                                    \
2975
                        int l1;                                         \
2976
                                                                        \
2977
                        l1 = gen_new_label();                           \
2978
                        r_cond = tcg_temp_new();                        \
2979
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2980
                        gen_cond(r_cond, icc, cond, dc);                \
2981
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2982
                                           0, l1);                      \
2983
                        tcg_gen_mov_i32(cpu__fpr[QFPREG(rd)],           \
2984
                                        cpu__fpr[QFPREG(rs2)]);         \
2985
                        tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 1],       \
2986
                                        cpu__fpr[QFPREG(rs2) + 1]);     \
2987
                        tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 2],       \
2988
                                        cpu__fpr[QFPREG(rs2) + 2]);     \
2989
                        tcg_gen_mov_i32(cpu__fpr[QFPREG(rd) + 3],       \
2990
                                        cpu__fpr[QFPREG(rs2) + 3]);     \
2991
                        gen_update_fprs_dirty(QFPREG(rd));              \
2992
                        gen_set_label(l1);                              \
2993
                        tcg_temp_free(r_cond);                          \
2994
                    }
2995

    
2996
                    case 0x101: /* V9 fmovscc %icc */
2997
                        FMOVSCC(0);
2998
                        break;
2999
                    case 0x102: /* V9 fmovdcc %icc */
3000
                        FMOVDCC(0);
3001
                        break;
3002
                    case 0x103: /* V9 fmovqcc %icc */
3003
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3004
                        FMOVQCC(0);
3005
                        break;
3006
                    case 0x181: /* V9 fmovscc %xcc */
3007
                        FMOVSCC(1);
3008
                        break;
3009
                    case 0x182: /* V9 fmovdcc %xcc */
3010
                        FMOVDCC(1);
3011
                        break;
3012
                    case 0x183: /* V9 fmovqcc %xcc */
3013
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3014
                        FMOVQCC(1);
3015
                        break;
3016
#undef FMOVSCC
3017
#undef FMOVDCC
3018
#undef FMOVQCC
3019
#endif
3020
                    case 0x51: /* fcmps, V9 %fcc */
3021
                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3022
                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3023
                        gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3024
                        break;
3025
                    case 0x52: /* fcmpd, V9 %fcc */
3026
                        gen_op_load_fpr_DT0(DFPREG(rs1));
3027
                        gen_op_load_fpr_DT1(DFPREG(rs2));
3028
                        gen_op_fcmpd(rd & 3);
3029
                        break;
3030
                    case 0x53: /* fcmpq, V9 %fcc */
3031
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3032
                        gen_op_load_fpr_QT0(QFPREG(rs1));
3033
                        gen_op_load_fpr_QT1(QFPREG(rs2));
3034
                        gen_op_fcmpq(rd & 3);
3035
                        break;
3036
                    case 0x55: /* fcmpes, V9 %fcc */
3037
                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3038
                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3039
                        gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3040
                        break;
3041
                    case 0x56: /* fcmped, V9 %fcc */
3042
                        gen_op_load_fpr_DT0(DFPREG(rs1));
3043
                        gen_op_load_fpr_DT1(DFPREG(rs2));
3044
                        gen_op_fcmped(rd & 3);
3045
                        break;
3046
                    case 0x57: /* fcmpeq, V9 %fcc */
3047
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3048
                        gen_op_load_fpr_QT0(QFPREG(rs1));
3049
                        gen_op_load_fpr_QT1(QFPREG(rs2));
3050
                        gen_op_fcmpeq(rd & 3);
3051
                        break;
3052
                    default:
3053
                        goto illegal_insn;
3054
                }
3055
            } else if (xop == 0x2) {
3056
                // clr/mov shortcut
3057

    
3058
                rs1 = GET_FIELD(insn, 13, 17);
3059
                if (rs1 == 0) {
3060
                    // or %g0, x, y -> mov T0, x; mov y, T0
3061
                    if (IS_IMM) {       /* immediate */
3062
                        TCGv r_const;
3063

    
3064
                        simm = GET_FIELDs(insn, 19, 31);
3065
                        r_const = tcg_const_tl(simm);
3066
                        gen_movl_TN_reg(rd, r_const);
3067
                        tcg_temp_free(r_const);
3068
                    } else {            /* register */
3069
                        rs2 = GET_FIELD(insn, 27, 31);
3070
                        gen_movl_reg_TN(rs2, cpu_dst);
3071
                        gen_movl_TN_reg(rd, cpu_dst);
3072
                    }
3073
                } else {
3074
                    cpu_src1 = get_src1(insn, cpu_src1);
3075
                    if (IS_IMM) {       /* immediate */
3076
                        simm = GET_FIELDs(insn, 19, 31);
3077
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3078
                        gen_movl_TN_reg(rd, cpu_dst);
3079
                    } else {            /* register */
3080
                        // or x, %g0, y -> mov T1, x; mov y, T1
3081
                        rs2 = GET_FIELD(insn, 27, 31);
3082
                        if (rs2 != 0) {
3083
                            gen_movl_reg_TN(rs2, cpu_src2);
3084
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3085
                            gen_movl_TN_reg(rd, cpu_dst);
3086
                        } else
3087
                            gen_movl_TN_reg(rd, cpu_src1);
3088
                    }
3089
                }
3090
#ifdef TARGET_SPARC64
3091
            } else if (xop == 0x25) { /* sll, V9 sllx */
3092
                cpu_src1 = get_src1(insn, cpu_src1);
3093
                if (IS_IMM) {   /* immediate */
3094
                    simm = GET_FIELDs(insn, 20, 31);
3095
                    if (insn & (1 << 12)) {
3096
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3097
                    } else {
3098
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3099
                    }
3100
                } else {                /* register */
3101
                    rs2 = GET_FIELD(insn, 27, 31);
3102
                    gen_movl_reg_TN(rs2, cpu_src2);
3103
                    if (insn & (1 << 12)) {
3104
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3105
                    } else {
3106
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3107
                    }
3108
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3109
                }
3110
                gen_movl_TN_reg(rd, cpu_dst);
3111
            } else if (xop == 0x26) { /* srl, V9 srlx */
3112
                cpu_src1 = get_src1(insn, cpu_src1);
3113
                if (IS_IMM) {   /* immediate */
3114
                    simm = GET_FIELDs(insn, 20, 31);
3115
                    if (insn & (1 << 12)) {
3116
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3117
                    } else {
3118
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3119
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3120
                    }
3121
                } else {                /* register */
3122
                    rs2 = GET_FIELD(insn, 27, 31);
3123
                    gen_movl_reg_TN(rs2, cpu_src2);
3124
                    if (insn & (1 << 12)) {
3125
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3126
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3127
                    } else {
3128
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3129
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3130
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3131
                    }
3132
                }
3133
                gen_movl_TN_reg(rd, cpu_dst);
3134
            } else if (xop == 0x27) { /* sra, V9 srax */
3135
                cpu_src1 = get_src1(insn, cpu_src1);
3136
                if (IS_IMM) {   /* immediate */
3137
                    simm = GET_FIELDs(insn, 20, 31);
3138
                    if (insn & (1 << 12)) {
3139
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3140
                    } else {
3141
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3142
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3143
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3144
                    }
3145
                } else {                /* register */
3146
                    rs2 = GET_FIELD(insn, 27, 31);
3147
                    gen_movl_reg_TN(rs2, cpu_src2);
3148
                    if (insn & (1 << 12)) {
3149
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3150
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3151
                    } else {
3152
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3153
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3154
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3155
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3156
                    }
3157
                }
3158
                gen_movl_TN_reg(rd, cpu_dst);
3159
#endif
3160
            } else if (xop < 0x36) {
3161
                if (xop < 0x20) {
3162
                    cpu_src1 = get_src1(insn, cpu_src1);
3163
                    cpu_src2 = get_src2(insn, cpu_src2);
3164
                    switch (xop & ~0x10) {
3165
                    case 0x0: /* add */
3166
                        if (IS_IMM) {
3167
                            simm = GET_FIELDs(insn, 19, 31);
3168
                            if (xop & 0x10) {
3169
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3170
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3171
                                dc->cc_op = CC_OP_ADD;
3172
                            } else {
3173
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3174
                            }
3175
                        } else {
3176
                            if (xop & 0x10) {
3177
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3178
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3179
                                dc->cc_op = CC_OP_ADD;
3180
                            } else {
3181
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3182
                            }
3183
                        }
3184
                        break;
3185
                    case 0x1: /* and */
3186
                        if (IS_IMM) {
3187
                            simm = GET_FIELDs(insn, 19, 31);
3188
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3189
                        } else {
3190
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3191
                        }
3192
                        if (xop & 0x10) {
3193
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3194
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3195
                            dc->cc_op = CC_OP_LOGIC;
3196
                        }
3197
                        break;
3198
                    case 0x2: /* or */
3199
                        if (IS_IMM) {
3200
                            simm = GET_FIELDs(insn, 19, 31);
3201
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3202
                        } else {
3203
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3204
                        }
3205
                        if (xop & 0x10) {
3206
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3207
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3208
                            dc->cc_op = CC_OP_LOGIC;
3209
                        }
3210
                        break;
3211
                    case 0x3: /* xor */
3212
                        if (IS_IMM) {
3213
                            simm = GET_FIELDs(insn, 19, 31);
3214
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3215
                        } else {
3216
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3217
                        }
3218
                        if (xop & 0x10) {
3219
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3220
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3221
                            dc->cc_op = CC_OP_LOGIC;
3222
                        }
3223
                        break;
3224
                    case 0x4: /* sub */
3225
                        if (IS_IMM) {
3226
                            simm = GET_FIELDs(insn, 19, 31);
3227
                            if (xop & 0x10) {
3228
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3229
                            } else {
3230
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3231
                            }
3232
                        } else {
3233
                            if (xop & 0x10) {
3234
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3235
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3236
                                dc->cc_op = CC_OP_SUB;
3237
                            } else {
3238
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3239
                            }
3240
                        }
3241
                        break;
3242
                    case 0x5: /* andn */
3243
                        if (IS_IMM) {
3244
                            simm = GET_FIELDs(insn, 19, 31);
3245
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3246
                        } else {
3247
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3248
                        }
3249
                        if (xop & 0x10) {
3250
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3251
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3252
                            dc->cc_op = CC_OP_LOGIC;
3253
                        }
3254
                        break;
3255
                    case 0x6: /* orn */
3256
                        if (IS_IMM) {
3257
                            simm = GET_FIELDs(insn, 19, 31);
3258
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3259
                        } else {
3260
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3261
                        }
3262
                        if (xop & 0x10) {
3263
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3264
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3265
                            dc->cc_op = CC_OP_LOGIC;
3266
                        }
3267
                        break;
3268
                    case 0x7: /* xorn */
3269
                        if (IS_IMM) {
3270
                            simm = GET_FIELDs(insn, 19, 31);
3271
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3272
                        } else {
3273
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3274
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3275
                        }
3276
                        if (xop & 0x10) {
3277
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3278
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3279
                            dc->cc_op = CC_OP_LOGIC;
3280
                        }
3281
                        break;
3282
                    case 0x8: /* addx, V9 addc */
3283
                        gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3284
                                        (xop & 0x10));
3285
                        break;
3286
#ifdef TARGET_SPARC64
3287
                    case 0x9: /* V9 mulx */
3288
                        if (IS_IMM) {
3289
                            simm = GET_FIELDs(insn, 19, 31);
3290
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3291
                        } else {
3292
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3293
                        }
3294
                        break;
3295
#endif
3296
                    case 0xa: /* umul */
3297
                        CHECK_IU_FEATURE(dc, MUL);
3298
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3299
                        if (xop & 0x10) {
3300
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3301
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3302
                            dc->cc_op = CC_OP_LOGIC;
3303
                        }
3304
                        break;
3305
                    case 0xb: /* smul */
3306
                        CHECK_IU_FEATURE(dc, MUL);
3307
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3308
                        if (xop & 0x10) {
3309
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3310
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3311
                            dc->cc_op = CC_OP_LOGIC;
3312
                        }
3313
                        break;
3314
                    case 0xc: /* subx, V9 subc */
3315
                        gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3316
                                        (xop & 0x10));
3317
                        break;
3318
#ifdef TARGET_SPARC64
3319
                    case 0xd: /* V9 udivx */
3320
                        {
3321
                            TCGv r_temp1, r_temp2;
3322
                            r_temp1 = tcg_temp_local_new();
3323
                            r_temp2 = tcg_temp_local_new();
3324
                            tcg_gen_mov_tl(r_temp1, cpu_src1);
3325
                            tcg_gen_mov_tl(r_temp2, cpu_src2);
3326
                            gen_trap_ifdivzero_tl(r_temp2);
3327
                            tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3328
                            tcg_temp_free(r_temp1);
3329
                            tcg_temp_free(r_temp2);
3330
                        }
3331
                        break;
3332
#endif
3333
                    case 0xe: /* udiv */
3334
                        CHECK_IU_FEATURE(dc, DIV);
3335
                        if (xop & 0x10) {
3336
                            gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3337
                                               cpu_src2);
3338
                            dc->cc_op = CC_OP_DIV;
3339
                        } else {
3340
                            gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3341
                                            cpu_src2);
3342
                        }
3343
                        break;
3344
                    case 0xf: /* sdiv */
3345
                        CHECK_IU_FEATURE(dc, DIV);
3346
                        if (xop & 0x10) {
3347
                            gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3348
                                               cpu_src2);
3349
                            dc->cc_op = CC_OP_DIV;
3350
                        } else {
3351
                            gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3352
                                            cpu_src2);
3353
                        }
3354
                        break;
3355
                    default:
3356
                        goto illegal_insn;
3357
                    }
3358
                    gen_movl_TN_reg(rd, cpu_dst);
3359
                } else {
3360
                    cpu_src1 = get_src1(insn, cpu_src1);
3361
                    cpu_src2 = get_src2(insn, cpu_src2);
3362
                    switch (xop) {
3363
                    case 0x20: /* taddcc */
3364
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3365
                        gen_movl_TN_reg(rd, cpu_dst);
3366
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3367
                        dc->cc_op = CC_OP_TADD;
3368
                        break;
3369
                    case 0x21: /* tsubcc */
3370
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3371
                        gen_movl_TN_reg(rd, cpu_dst);
3372
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3373
                        dc->cc_op = CC_OP_TSUB;
3374
                        break;
3375
                    case 0x22: /* taddcctv */
3376
                        save_state(dc, cpu_cond);
3377
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3378
                        gen_movl_TN_reg(rd, cpu_dst);
3379
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3380
                        dc->cc_op = CC_OP_TADDTV;
3381
                        break;
3382
                    case 0x23: /* tsubcctv */
3383
                        save_state(dc, cpu_cond);
3384
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3385
                        gen_movl_TN_reg(rd, cpu_dst);
3386
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3387
                        dc->cc_op = CC_OP_TSUBTV;
3388
                        break;
3389
                    case 0x24: /* mulscc */
3390
                        gen_helper_compute_psr(cpu_env);
3391
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3392
                        gen_movl_TN_reg(rd, cpu_dst);
3393
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3394
                        dc->cc_op = CC_OP_ADD;
3395
                        break;
3396
#ifndef TARGET_SPARC64
3397
                    case 0x25:  /* sll */
3398
                        if (IS_IMM) { /* immediate */
3399
                            simm = GET_FIELDs(insn, 20, 31);
3400
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3401
                        } else { /* register */
3402
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3403
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3404
                        }
3405
                        gen_movl_TN_reg(rd, cpu_dst);
3406
                        break;
3407
                    case 0x26:  /* srl */
3408
                        if (IS_IMM) { /* immediate */
3409
                            simm = GET_FIELDs(insn, 20, 31);
3410
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3411
                        } else { /* register */
3412
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3413
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3414
                        }
3415
                        gen_movl_TN_reg(rd, cpu_dst);
3416
                        break;
3417
                    case 0x27:  /* sra */
3418
                        if (IS_IMM) { /* immediate */
3419
                            simm = GET_FIELDs(insn, 20, 31);
3420
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3421
                        } else { /* register */
3422
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3423
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3424
                        }
3425
                        gen_movl_TN_reg(rd, cpu_dst);
3426
                        break;
3427
#endif
3428
                    case 0x30:
3429
                        {
3430
                            switch(rd) {
3431
                            case 0: /* wry */
3432
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3433
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3434
                                break;
3435
#ifndef TARGET_SPARC64
3436
                            case 0x01 ... 0x0f: /* undefined in the
3437
                                                   SPARCv8 manual, nop
3438
                                                   on the microSPARC
3439
                                                   II */
3440
                            case 0x10 ... 0x1f: /* implementation-dependent
3441
                                                   in the SPARCv8
3442
                                                   manual, nop on the
3443
                                                   microSPARC II */
3444
                                break;
3445
#else
3446
                            case 0x2: /* V9 wrccr */
3447
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3448
                                gen_helper_wrccr(cpu_env, cpu_dst);
3449
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3450
                                dc->cc_op = CC_OP_FLAGS;
3451
                                break;
3452
                            case 0x3: /* V9 wrasi */
3453
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3454
                                tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3455
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3456
                                break;
3457
                            case 0x6: /* V9 wrfprs */
3458
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3459
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3460
                                save_state(dc, cpu_cond);
3461
                                gen_op_next_insn();
3462
                                tcg_gen_exit_tb(0);
3463
                                dc->is_br = 1;
3464
                                break;
3465
                            case 0xf: /* V9 sir, nop if user */
3466
#if !defined(CONFIG_USER_ONLY)
3467
                                if (supervisor(dc)) {
3468
                                    ; // XXX
3469
                                }
3470
#endif
3471
                                break;
3472
                            case 0x13: /* Graphics Status */
3473
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3474
                                    goto jmp_insn;
3475
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3476
                                break;
3477
                            case 0x14: /* Softint set */
3478
                                if (!supervisor(dc))
3479
                                    goto illegal_insn;
3480
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3481
                                gen_helper_set_softint(cpu_env, cpu_tmp64);
3482
                                break;
3483
                            case 0x15: /* Softint clear */
3484
                                if (!supervisor(dc))
3485
                                    goto illegal_insn;
3486
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3487
                                gen_helper_clear_softint(cpu_env, cpu_tmp64);
3488
                                break;
3489
                            case 0x16: /* Softint write */
3490
                                if (!supervisor(dc))
3491
                                    goto illegal_insn;
3492
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3493
                                gen_helper_write_softint(cpu_env, cpu_tmp64);
3494
                                break;
3495
                            case 0x17: /* Tick compare */
3496
#if !defined(CONFIG_USER_ONLY)
3497
                                if (!supervisor(dc))
3498
                                    goto illegal_insn;
3499
#endif
3500
                                {
3501
                                    TCGv_ptr r_tickptr;
3502

    
3503
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3504
                                                   cpu_src2);
3505
                                    r_tickptr = tcg_temp_new_ptr();
3506
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3507
                                                   offsetof(CPUState, tick));
3508
                                    gen_helper_tick_set_limit(r_tickptr,
3509
                                                              cpu_tick_cmpr);
3510
                                    tcg_temp_free_ptr(r_tickptr);
3511
                                }
3512
                                break;
3513
                            case 0x18: /* System tick */
3514
#if !defined(CONFIG_USER_ONLY)
3515
                                if (!supervisor(dc))
3516
                                    goto illegal_insn;
3517
#endif
3518
                                {
3519
                                    TCGv_ptr r_tickptr;
3520

    
3521
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3522
                                                   cpu_src2);
3523
                                    r_tickptr = tcg_temp_new_ptr();
3524
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3525
                                                   offsetof(CPUState, stick));
3526
                                    gen_helper_tick_set_count(r_tickptr,
3527
                                                              cpu_dst);
3528
                                    tcg_temp_free_ptr(r_tickptr);
3529
                                }
3530
                                break;
3531
                            case 0x19: /* System tick compare */
3532
#if !defined(CONFIG_USER_ONLY)
3533
                                if (!supervisor(dc))
3534
                                    goto illegal_insn;
3535
#endif
3536
                                {
3537
                                    TCGv_ptr r_tickptr;
3538

    
3539
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3540
                                                   cpu_src2);
3541
                                    r_tickptr = tcg_temp_new_ptr();
3542
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3543
                                                   offsetof(CPUState, stick));
3544
                                    gen_helper_tick_set_limit(r_tickptr,
3545
                                                              cpu_stick_cmpr);
3546
                                    tcg_temp_free_ptr(r_tickptr);
3547
                                }
3548
                                break;
3549

    
3550
                            case 0x10: /* Performance Control */
3551
                            case 0x11: /* Performance Instrumentation
3552
                                          Counter */
3553
                            case 0x12: /* Dispatch Control */
3554
#endif
3555
                            default:
3556
                                goto illegal_insn;
3557
                            }
3558
                        }
3559
                        break;
3560
#if !defined(CONFIG_USER_ONLY)
3561
                    case 0x31: /* wrpsr, V9 saved, restored */
3562
                        {
3563
                            if (!supervisor(dc))
3564
                                goto priv_insn;
3565
#ifdef TARGET_SPARC64
3566
                            switch (rd) {
3567
                            case 0:
3568
                                gen_helper_saved(cpu_env);
3569
                                break;
3570
                            case 1:
3571
                                gen_helper_restored(cpu_env);
3572
                                break;
3573
                            case 2: /* UA2005 allclean */
3574
                            case 3: /* UA2005 otherw */
3575
                            case 4: /* UA2005 normalw */
3576
                            case 5: /* UA2005 invalw */
3577
                                // XXX
3578
                            default:
3579
                                goto illegal_insn;
3580
                            }
3581
#else
3582
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3583
                            gen_helper_wrpsr(cpu_env, cpu_dst);
3584
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3585
                            dc->cc_op = CC_OP_FLAGS;
3586
                            save_state(dc, cpu_cond);
3587
                            gen_op_next_insn();
3588
                            tcg_gen_exit_tb(0);
3589
                            dc->is_br = 1;
3590
#endif
3591
                        }
3592
                        break;
3593
                    case 0x32: /* wrwim, V9 wrpr */
3594
                        {
3595
                            if (!supervisor(dc))
3596
                                goto priv_insn;
3597
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3598
#ifdef TARGET_SPARC64
3599
                            switch (rd) {
3600
                            case 0: // tpc
3601
                                {
3602
                                    TCGv_ptr r_tsptr;
3603

    
3604
                                    r_tsptr = tcg_temp_new_ptr();
3605
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3606
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3607
                                                  offsetof(trap_state, tpc));
3608
                                    tcg_temp_free_ptr(r_tsptr);
3609
                                }
3610
                                break;
3611
                            case 1: // tnpc
3612
                                {
3613
                                    TCGv_ptr r_tsptr;
3614

    
3615
                                    r_tsptr = tcg_temp_new_ptr();
3616
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3617
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3618
                                                  offsetof(trap_state, tnpc));
3619
                                    tcg_temp_free_ptr(r_tsptr);
3620
                                }
3621
                                break;
3622
                            case 2: // tstate
3623
                                {
3624
                                    TCGv_ptr r_tsptr;
3625

    
3626
                                    r_tsptr = tcg_temp_new_ptr();
3627
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3628
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3629
                                                  offsetof(trap_state,
3630
                                                           tstate));
3631
                                    tcg_temp_free_ptr(r_tsptr);
3632
                                }
3633
                                break;
3634
                            case 3: // tt
3635
                                {
3636
                                    TCGv_ptr r_tsptr;
3637

    
3638
                                    r_tsptr = tcg_temp_new_ptr();
3639
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3640
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3641
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3642
                                                   offsetof(trap_state, tt));
3643
                                    tcg_temp_free_ptr(r_tsptr);
3644
                                }
3645
                                break;
3646
                            case 4: // tick
3647
                                {
3648
                                    TCGv_ptr r_tickptr;
3649

    
3650
                                    r_tickptr = tcg_temp_new_ptr();
3651
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3652
                                                   offsetof(CPUState, tick));
3653
                                    gen_helper_tick_set_count(r_tickptr,
3654
                                                              cpu_tmp0);
3655
                                    tcg_temp_free_ptr(r_tickptr);
3656
                                }
3657
                                break;
3658
                            case 5: // tba
3659
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3660
                                break;
3661
                            case 6: // pstate
3662
                                {
3663
                                    TCGv r_tmp = tcg_temp_local_new();
3664

    
3665
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3666
                                    save_state(dc, cpu_cond);
3667
                                    gen_helper_wrpstate(cpu_env, r_tmp);
3668
                                    tcg_temp_free(r_tmp);
3669
                                    dc->npc = DYNAMIC_PC;
3670
                                }
3671
                                break;
3672
                            case 7: // tl
3673
                                {
3674
                                    TCGv r_tmp = tcg_temp_local_new();
3675

    
3676
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3677
                                    save_state(dc, cpu_cond);
3678
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3679
                                    tcg_temp_free(r_tmp);
3680
                                    tcg_gen_st_i32(cpu_tmp32, cpu_env,
3681
                                                   offsetof(CPUSPARCState, tl));
3682
                                    dc->npc = DYNAMIC_PC;
3683
                                }
3684
                                break;
3685
                            case 8: // pil
3686
                                gen_helper_wrpil(cpu_env, cpu_tmp0);
3687
                                break;
3688
                            case 9: // cwp
3689
                                gen_helper_wrcwp(cpu_env, cpu_tmp0);
3690
                                break;
3691
                            case 10: // cansave
3692
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3693
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3694
                                               offsetof(CPUSPARCState,
3695
                                                        cansave));
3696
                                break;
3697
                            case 11: // canrestore
3698
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3699
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3700
                                               offsetof(CPUSPARCState,
3701
                                                        canrestore));
3702
                                break;
3703
                            case 12: // cleanwin
3704
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3705
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3706
                                               offsetof(CPUSPARCState,
3707
                                                        cleanwin));
3708
                                break;
3709
                            case 13: // otherwin
3710
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3711
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3712
                                               offsetof(CPUSPARCState,
3713
                                                        otherwin));
3714
                                break;
3715
                            case 14: // wstate
3716
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3717
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3718
                                               offsetof(CPUSPARCState,
3719
                                                        wstate));
3720
                                break;
3721
                            case 16: // UA2005 gl
3722
                                CHECK_IU_FEATURE(dc, GL);
3723
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3724
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3725
                                               offsetof(CPUSPARCState, gl));
3726
                                break;
3727
                            case 26: // UA2005 strand status
3728
                                CHECK_IU_FEATURE(dc, HYPV);
3729
                                if (!hypervisor(dc))
3730
                                    goto priv_insn;
3731
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3732
                                break;
3733
                            default:
3734
                                goto illegal_insn;
3735
                            }
3736
#else
3737
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3738
                            if (dc->def->nwindows != 32)
3739
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3740
                                                (1 << dc->def->nwindows) - 1);
3741
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3742
#endif
3743
                        }
3744
                        break;
3745
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3746
                        {
3747
#ifndef TARGET_SPARC64
3748
                            if (!supervisor(dc))
3749
                                goto priv_insn;
3750
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3751
#else
3752
                            CHECK_IU_FEATURE(dc, HYPV);
3753
                            if (!hypervisor(dc))
3754
                                goto priv_insn;
3755
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3756
                            switch (rd) {
3757
                            case 0: // hpstate
3758
                                // XXX gen_op_wrhpstate();
3759
                                save_state(dc, cpu_cond);
3760
                                gen_op_next_insn();
3761
                                tcg_gen_exit_tb(0);
3762
                                dc->is_br = 1;
3763
                                break;
3764
                            case 1: // htstate
3765
                                // XXX gen_op_wrhtstate();
3766
                                break;
3767
                            case 3: // hintp
3768
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3769
                                break;
3770
                            case 5: // htba
3771
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3772
                                break;
3773
                            case 31: // hstick_cmpr
3774
                                {
3775
                                    TCGv_ptr r_tickptr;
3776

    
3777
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3778
                                    r_tickptr = tcg_temp_new_ptr();
3779
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3780
                                                   offsetof(CPUState, hstick));
3781
                                    gen_helper_tick_set_limit(r_tickptr,
3782
                                                              cpu_hstick_cmpr);
3783
                                    tcg_temp_free_ptr(r_tickptr);
3784
                                }
3785
                                break;
3786
                            case 6: // hver readonly
3787
                            default:
3788
                                goto illegal_insn;
3789
                            }
3790
#endif
3791
                        }
3792
                        break;
3793
#endif
3794
#ifdef TARGET_SPARC64
3795
                    case 0x2c: /* V9 movcc */
3796
                        {
3797
                            int cc = GET_FIELD_SP(insn, 11, 12);
3798
                            int cond = GET_FIELD_SP(insn, 14, 17);
3799
                            TCGv r_cond;
3800
                            int l1;
3801

    
3802
                            r_cond = tcg_temp_new();
3803
                            if (insn & (1 << 18)) {
3804
                                if (cc == 0)
3805
                                    gen_cond(r_cond, 0, cond, dc);
3806
                                else if (cc == 2)
3807
                                    gen_cond(r_cond, 1, cond, dc);
3808
                                else
3809
                                    goto illegal_insn;
3810
                            } else {
3811
                                gen_fcond(r_cond, cc, cond);
3812
                            }
3813

    
3814
                            l1 = gen_new_label();
3815

    
3816
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3817
                            if (IS_IMM) {       /* immediate */
3818
                                TCGv r_const;
3819

    
3820
                                simm = GET_FIELD_SPs(insn, 0, 10);
3821
                                r_const = tcg_const_tl(simm);
3822
                                gen_movl_TN_reg(rd, r_const);
3823
                                tcg_temp_free(r_const);
3824
                            } else {
3825
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3826
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3827
                                gen_movl_TN_reg(rd, cpu_tmp0);
3828
                            }
3829
                            gen_set_label(l1);
3830
                            tcg_temp_free(r_cond);
3831
                            break;
3832
                        }
3833
                    case 0x2d: /* V9 sdivx */
3834
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3835
                        gen_movl_TN_reg(rd, cpu_dst);
3836
                        break;
3837
                    case 0x2e: /* V9 popc */
3838
                        {
3839
                            cpu_src2 = get_src2(insn, cpu_src2);
3840
                            gen_helper_popc(cpu_dst, cpu_src2);
3841
                            gen_movl_TN_reg(rd, cpu_dst);
3842
                        }
3843
                    case 0x2f: /* V9 movr */
3844
                        {
3845
                            int cond = GET_FIELD_SP(insn, 10, 12);
3846
                            int l1;
3847

    
3848
                            cpu_src1 = get_src1(insn, cpu_src1);
3849

    
3850
                            l1 = gen_new_label();
3851

    
3852
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3853
                                              cpu_src1, 0, l1);
3854
                            if (IS_IMM) {       /* immediate */
3855
                                TCGv r_const;
3856

    
3857
                                simm = GET_FIELD_SPs(insn, 0, 9);
3858
                                r_const = tcg_const_tl(simm);
3859
                                gen_movl_TN_reg(rd, r_const);
3860
                                tcg_temp_free(r_const);
3861
                            } else {
3862
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3863
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3864
                                gen_movl_TN_reg(rd, cpu_tmp0);
3865
                            }
3866
                            gen_set_label(l1);
3867
                            break;
3868
                        }
3869
#endif
3870
                    default:
3871
                        goto illegal_insn;
3872
                    }
3873
                }
3874
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3875
#ifdef TARGET_SPARC64
3876
                int opf = GET_FIELD_SP(insn, 5, 13);
3877
                rs1 = GET_FIELD(insn, 13, 17);
3878
                rs2 = GET_FIELD(insn, 27, 31);
3879
                if (gen_trap_ifnofpu(dc, cpu_cond))
3880
                    goto jmp_insn;
3881

    
3882
                switch (opf) {
3883
                case 0x000: /* VIS I edge8cc */
3884
                case 0x001: /* VIS II edge8n */
3885
                case 0x002: /* VIS I edge8lcc */
3886
                case 0x003: /* VIS II edge8ln */
3887
                case 0x004: /* VIS I edge16cc */
3888
                case 0x005: /* VIS II edge16n */
3889
                case 0x006: /* VIS I edge16lcc */
3890
                case 0x007: /* VIS II edge16ln */
3891
                case 0x008: /* VIS I edge32cc */
3892
                case 0x009: /* VIS II edge32n */
3893
                case 0x00a: /* VIS I edge32lcc */
3894
                case 0x00b: /* VIS II edge32ln */
3895
                    // XXX
3896
                    goto illegal_insn;
3897
                case 0x010: /* VIS I array8 */
3898
                    CHECK_FPU_FEATURE(dc, VIS1);
3899
                    cpu_src1 = get_src1(insn, cpu_src1);
3900
                    gen_movl_reg_TN(rs2, cpu_src2);
3901
                    gen_helper_array8(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3902
                    gen_movl_TN_reg(rd, cpu_dst);
3903
                    break;
3904
                case 0x012: /* VIS I array16 */
3905
                    CHECK_FPU_FEATURE(dc, VIS1);
3906
                    cpu_src1 = get_src1(insn, cpu_src1);
3907
                    gen_movl_reg_TN(rs2, cpu_src2);
3908
                    gen_helper_array8(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3909
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3910
                    gen_movl_TN_reg(rd, cpu_dst);
3911
                    break;
3912
                case 0x014: /* VIS I array32 */
3913
                    CHECK_FPU_FEATURE(dc, VIS1);
3914
                    cpu_src1 = get_src1(insn, cpu_src1);
3915
                    gen_movl_reg_TN(rs2, cpu_src2);
3916
                    gen_helper_array8(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3917
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3918
                    gen_movl_TN_reg(rd, cpu_dst);
3919
                    break;
3920
                case 0x018: /* VIS I alignaddr */
3921
                    CHECK_FPU_FEATURE(dc, VIS1);
3922
                    cpu_src1 = get_src1(insn, cpu_src1);
3923
                    gen_movl_reg_TN(rs2, cpu_src2);
3924
                    gen_helper_alignaddr(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3925
                    gen_movl_TN_reg(rd, cpu_dst);
3926
                    break;
3927
                case 0x019: /* VIS II bmask */
3928
                case 0x01a: /* VIS I alignaddrl */
3929
                    // XXX
3930
                    goto illegal_insn;
3931
                case 0x020: /* VIS I fcmple16 */
3932
                    CHECK_FPU_FEATURE(dc, VIS1);
3933
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3934
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3935
                    gen_helper_fcmple16(cpu_dst, cpu_env);
3936
                    gen_movl_TN_reg(rd, cpu_dst);
3937
                    break;
3938
                case 0x022: /* VIS I fcmpne16 */
3939
                    CHECK_FPU_FEATURE(dc, VIS1);
3940
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3941
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3942
                    gen_helper_fcmpne16(cpu_dst, cpu_env);
3943
                    gen_movl_TN_reg(rd, cpu_dst);
3944
                    break;
3945
                case 0x024: /* VIS I fcmple32 */
3946
                    CHECK_FPU_FEATURE(dc, VIS1);
3947
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3948
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3949
                    gen_helper_fcmple32(cpu_dst, cpu_env);
3950
                    gen_movl_TN_reg(rd, cpu_dst);
3951
                    break;
3952
                case 0x026: /* VIS I fcmpne32 */
3953
                    CHECK_FPU_FEATURE(dc, VIS1);
3954
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3955
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3956
                    gen_helper_fcmpne32(cpu_dst, cpu_env);
3957
                    gen_movl_TN_reg(rd, cpu_dst);
3958
                    break;
3959
                case 0x028: /* VIS I fcmpgt16 */
3960
                    CHECK_FPU_FEATURE(dc, VIS1);
3961
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3962
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3963
                    gen_helper_fcmpgt16(cpu_dst, cpu_env);
3964
                    gen_movl_TN_reg(rd, cpu_dst);
3965
                    break;
3966
                case 0x02a: /* VIS I fcmpeq16 */
3967
                    CHECK_FPU_FEATURE(dc, VIS1);
3968
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3969
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3970
                    gen_helper_fcmpeq16(cpu_dst, cpu_env);
3971
                    gen_movl_TN_reg(rd, cpu_dst);
3972
                    break;
3973
                case 0x02c: /* VIS I fcmpgt32 */
3974
                    CHECK_FPU_FEATURE(dc, VIS1);
3975
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3976
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3977
                    gen_helper_fcmpgt32(cpu_dst, cpu_env);
3978
                    gen_movl_TN_reg(rd, cpu_dst);
3979
                    break;
3980
                case 0x02e: /* VIS I fcmpeq32 */
3981
                    CHECK_FPU_FEATURE(dc, VIS1);
3982
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3983
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3984
                    gen_helper_fcmpeq32(cpu_dst, cpu_env);
3985
                    gen_movl_TN_reg(rd, cpu_dst);
3986
                    break;
3987
                case 0x031: /* VIS I fmul8x16 */
3988
                    CHECK_FPU_FEATURE(dc, VIS1);
3989
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3990
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3991
                    gen_helper_fmul8x16(cpu_env);
3992
                    gen_op_store_DT0_fpr(DFPREG(rd));
3993
                    gen_update_fprs_dirty(DFPREG(rd));
3994
                    break;
3995
                case 0x033: /* VIS I fmul8x16au */
3996
                    CHECK_FPU_FEATURE(dc, VIS1);
3997
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3998
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3999
                    gen_helper_fmul8x16au(cpu_env);
4000
                    gen_op_store_DT0_fpr(DFPREG(rd));
4001
                    gen_update_fprs_dirty(DFPREG(rd));
4002
                    break;
4003
                case 0x035: /* VIS I fmul8x16al */
4004
                    CHECK_FPU_FEATURE(dc, VIS1);
4005
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4006
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4007
                    gen_helper_fmul8x16al(cpu_env);
4008
                    gen_op_store_DT0_fpr(DFPREG(rd));
4009
                    gen_update_fprs_dirty(DFPREG(rd));
4010
                    break;
4011
                case 0x036: /* VIS I fmul8sux16 */
4012
                    CHECK_FPU_FEATURE(dc, VIS1);
4013
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4014
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4015
                    gen_helper_fmul8sux16(cpu_env);
4016
                    gen_op_store_DT0_fpr(DFPREG(rd));
4017
                    gen_update_fprs_dirty(DFPREG(rd));
4018
                    break;
4019
                case 0x037: /* VIS I fmul8ulx16 */
4020
                    CHECK_FPU_FEATURE(dc, VIS1);
4021
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4022
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4023
                    gen_helper_fmul8ulx16(cpu_env);
4024
                    gen_op_store_DT0_fpr(DFPREG(rd));
4025
                    gen_update_fprs_dirty(DFPREG(rd));
4026
                    break;
4027
                case 0x038: /* VIS I fmuld8sux16 */
4028
                    CHECK_FPU_FEATURE(dc, VIS1);
4029
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4030
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4031
                    gen_helper_fmuld8sux16(cpu_env);
4032
                    gen_op_store_DT0_fpr(DFPREG(rd));
4033
                    gen_update_fprs_dirty(DFPREG(rd));
4034
                    break;
4035
                case 0x039: /* VIS I fmuld8ulx16 */
4036
                    CHECK_FPU_FEATURE(dc, VIS1);
4037
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4038
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4039
                    gen_helper_fmuld8ulx16(cpu_env);
4040
                    gen_op_store_DT0_fpr(DFPREG(rd));
4041
                    gen_update_fprs_dirty(DFPREG(rd));
4042
                    break;
4043
                case 0x03a: /* VIS I fpack32 */
4044
                case 0x03b: /* VIS I fpack16 */
4045
                case 0x03d: /* VIS I fpackfix */
4046
                case 0x03e: /* VIS I pdist */
4047
                    // XXX
4048
                    goto illegal_insn;
4049
                case 0x048: /* VIS I faligndata */
4050
                    CHECK_FPU_FEATURE(dc, VIS1);
4051
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4052
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4053
                    gen_helper_faligndata(cpu_env);
4054
                    gen_op_store_DT0_fpr(DFPREG(rd));
4055
                    gen_update_fprs_dirty(DFPREG(rd));
4056
                    break;
4057
                case 0x04b: /* VIS I fpmerge */
4058
                    CHECK_FPU_FEATURE(dc, VIS1);
4059
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4060
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4061
                    gen_helper_fpmerge(cpu_env);
4062
                    gen_op_store_DT0_fpr(DFPREG(rd));
4063
                    gen_update_fprs_dirty(DFPREG(rd));
4064
                    break;
4065
                case 0x04c: /* VIS II bshuffle */
4066
                    // XXX
4067
                    goto illegal_insn;
4068
                case 0x04d: /* VIS I fexpand */
4069
                    CHECK_FPU_FEATURE(dc, VIS1);
4070
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4071
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4072
                    gen_helper_fexpand(cpu_env);
4073
                    gen_op_store_DT0_fpr(DFPREG(rd));
4074
                    gen_update_fprs_dirty(DFPREG(rd));
4075
                    break;
4076
                case 0x050: /* VIS I fpadd16 */
4077
                    CHECK_FPU_FEATURE(dc, VIS1);
4078
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4079
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4080
                    gen_helper_fpadd16(cpu_env);
4081
                    gen_op_store_DT0_fpr(DFPREG(rd));
4082
                    gen_update_fprs_dirty(DFPREG(rd));
4083
                    break;
4084
                case 0x051: /* VIS I fpadd16s */
4085
                    CHECK_FPU_FEATURE(dc, VIS1);
4086
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4087
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4088
                    cpu_dst_32 = gen_dest_fpr_F();
4089
                    gen_helper_fpadd16s(cpu_dst_32, cpu_env,
4090
                                        cpu_src1_32, cpu_src2_32);
4091
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4092
                    gen_update_fprs_dirty(rd);
4093
                    break;
4094
                case 0x052: /* VIS I fpadd32 */
4095
                    CHECK_FPU_FEATURE(dc, VIS1);
4096
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4097
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4098
                    gen_helper_fpadd32(cpu_env);
4099
                    gen_op_store_DT0_fpr(DFPREG(rd));
4100
                    gen_update_fprs_dirty(DFPREG(rd));
4101
                    break;
4102
                case 0x053: /* VIS I fpadd32s */
4103
                    CHECK_FPU_FEATURE(dc, VIS1);
4104
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4105
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4106
                    cpu_dst_32 = gen_dest_fpr_F();
4107
                    tcg_gen_add_i32(cpu_dst_32, cpu_src1_32, cpu_src2_32);
4108
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4109
                    gen_update_fprs_dirty(rd);
4110
                    break;
4111
                case 0x054: /* VIS I fpsub16 */
4112
                    CHECK_FPU_FEATURE(dc, VIS1);
4113
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4114
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4115
                    gen_helper_fpsub16(cpu_env);
4116
                    gen_op_store_DT0_fpr(DFPREG(rd));
4117
                    gen_update_fprs_dirty(DFPREG(rd));
4118
                    break;
4119
                case 0x055: /* VIS I fpsub16s */
4120
                    CHECK_FPU_FEATURE(dc, VIS1);
4121
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4122
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4123
                    cpu_dst_32 = gen_dest_fpr_F();
4124
                    gen_helper_fpsub16s(cpu_dst_32, cpu_env,
4125
                                        cpu_src1_32, cpu_src2_32);
4126
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4127
                    gen_update_fprs_dirty(rd);
4128
                    break;
4129
                case 0x056: /* VIS I fpsub32 */
4130
                    CHECK_FPU_FEATURE(dc, VIS1);
4131
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4132
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4133
                    gen_helper_fpsub32(cpu_env);
4134
                    gen_op_store_DT0_fpr(DFPREG(rd));
4135
                    gen_update_fprs_dirty(DFPREG(rd));
4136
                    break;
4137
                case 0x057: /* VIS I fpsub32s */
4138
                    CHECK_FPU_FEATURE(dc, VIS1);
4139
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4140
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4141
                    cpu_dst_32 = gen_dest_fpr_F();
4142
                    tcg_gen_sub_i32(cpu_dst_32, cpu_src1_32, cpu_src2_32);
4143
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4144
                    gen_update_fprs_dirty(rd);
4145
                    break;
4146
                case 0x060: /* VIS I fzero */
4147
                    CHECK_FPU_FEATURE(dc, VIS1);
4148
                    tcg_gen_movi_i32(cpu__fpr[DFPREG(rd)], 0);
4149
                    tcg_gen_movi_i32(cpu__fpr[DFPREG(rd) + 1], 0);
4150
                    gen_update_fprs_dirty(DFPREG(rd));
4151
                    break;
4152
                case 0x061: /* VIS I fzeros */
4153
                    CHECK_FPU_FEATURE(dc, VIS1);
4154
                    cpu_dst_32 = gen_dest_fpr_F();
4155
                    tcg_gen_movi_i32(cpu_dst_32, 0);
4156
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4157
                    gen_update_fprs_dirty(rd);
4158
                    break;
4159
                case 0x062: /* VIS I fnor */
4160
                    CHECK_FPU_FEATURE(dc, VIS1);
4161
                    tcg_gen_nor_i32(cpu__fpr[DFPREG(rd)],
4162
                                    cpu__fpr[DFPREG(rs1)],
4163
                                    cpu__fpr[DFPREG(rs2)]);
4164
                    tcg_gen_nor_i32(cpu__fpr[DFPREG(rd) + 1],
4165
                                    cpu__fpr[DFPREG(rs1) + 1],
4166
                                    cpu__fpr[DFPREG(rs2) + 1]);
4167
                    gen_update_fprs_dirty(DFPREG(rd));
4168
                    break;
4169
                case 0x063: /* VIS I fnors */
4170
                    CHECK_FPU_FEATURE(dc, VIS1);
4171
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4172
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4173
                    cpu_dst_32 = gen_dest_fpr_F();
4174
                    tcg_gen_nor_i32(cpu_dst_32, cpu_src1_32, cpu_src2_32);
4175
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4176
                    gen_update_fprs_dirty(rd);
4177
                    break;
4178
                case 0x064: /* VIS I fandnot2 */
4179
                    CHECK_FPU_FEATURE(dc, VIS1);
4180
                    tcg_gen_andc_i32(cpu__fpr[DFPREG(rd)],
4181
                                     cpu__fpr[DFPREG(rs1)],
4182
                                     cpu__fpr[DFPREG(rs2)]);
4183
                    tcg_gen_andc_i32(cpu__fpr[DFPREG(rd) + 1],
4184
                                     cpu__fpr[DFPREG(rs1) + 1],
4185
                                     cpu__fpr[DFPREG(rs2) + 1]);
4186
                    gen_update_fprs_dirty(DFPREG(rd));
4187
                    break;
4188
                case 0x065: /* VIS I fandnot2s */
4189
                    CHECK_FPU_FEATURE(dc, VIS1);
4190
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4191
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4192
                    cpu_dst_32 = gen_dest_fpr_F();
4193
                    tcg_gen_andc_i32(cpu_dst_32, cpu_src1_32, cpu_src2_32);
4194
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4195
                    gen_update_fprs_dirty(rd);
4196
                    break;
4197
                case 0x066: /* VIS I fnot2 */
4198
                    CHECK_FPU_FEATURE(dc, VIS1);
4199
                    tcg_gen_not_i32(cpu__fpr[DFPREG(rd)],
4200
                                    cpu__fpr[DFPREG(rs2)]);
4201
                    tcg_gen_not_i32(cpu__fpr[DFPREG(rd) + 1],
4202
                                    cpu__fpr[DFPREG(rs2) + 1]);
4203
                    gen_update_fprs_dirty(DFPREG(rd));
4204
                    break;
4205
                case 0x067: /* VIS I fnot2s */
4206
                    CHECK_FPU_FEATURE(dc, VIS1);
4207
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4208
                    cpu_dst_32 = gen_dest_fpr_F();
4209
                    tcg_gen_not_i32(cpu_dst_32, cpu_src1_32);
4210
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4211
                    gen_update_fprs_dirty(rd);
4212
                    break;
4213
                case 0x068: /* VIS I fandnot1 */
4214
                    CHECK_FPU_FEATURE(dc, VIS1);
4215
                    tcg_gen_andc_i32(cpu__fpr[DFPREG(rd)],
4216
                                     cpu__fpr[DFPREG(rs2)],
4217
                                     cpu__fpr[DFPREG(rs1)]);
4218
                    tcg_gen_andc_i32(cpu__fpr[DFPREG(rd) + 1],
4219
                                     cpu__fpr[DFPREG(rs2) + 1],
4220
                                     cpu__fpr[DFPREG(rs1) + 1]);
4221
                    gen_update_fprs_dirty(DFPREG(rd));
4222
                    break;
4223
                case 0x069: /* VIS I fandnot1s */
4224
                    CHECK_FPU_FEATURE(dc, VIS1);
4225
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4226
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4227
                    cpu_dst_32 = gen_dest_fpr_F();
4228
                    tcg_gen_andc_i32(cpu_dst_32, cpu_src2_32, cpu_src1_32);
4229
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4230
                    gen_update_fprs_dirty(rd);
4231
                    break;
4232
                case 0x06a: /* VIS I fnot1 */
4233
                    CHECK_FPU_FEATURE(dc, VIS1);
4234
                    tcg_gen_not_i32(cpu__fpr[DFPREG(rd)],
4235
                                    cpu__fpr[DFPREG(rs1)]);
4236
                    tcg_gen_not_i32(cpu__fpr[DFPREG(rd) + 1],
4237
                                    cpu__fpr[DFPREG(rs1) + 1]);
4238
                    gen_update_fprs_dirty(DFPREG(rd));
4239
                    break;
4240
                case 0x06b: /* VIS I fnot1s */
4241
                    CHECK_FPU_FEATURE(dc, VIS1);
4242
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4243
                    cpu_dst_32 = gen_dest_fpr_F();
4244
                    tcg_gen_not_i32(cpu_dst_32, cpu_src1_32);
4245
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4246
                    gen_update_fprs_dirty(rd);
4247
                    break;
4248
                case 0x06c: /* VIS I fxor */
4249
                    CHECK_FPU_FEATURE(dc, VIS1);
4250
                    tcg_gen_xor_i32(cpu__fpr[DFPREG(rd)],
4251
                                    cpu__fpr[DFPREG(rs1)],
4252
                                    cpu__fpr[DFPREG(rs2)]);
4253
                    tcg_gen_xor_i32(cpu__fpr[DFPREG(rd) + 1],
4254
                                    cpu__fpr[DFPREG(rs1) + 1],
4255
                                    cpu__fpr[DFPREG(rs2) + 1]);
4256
                    gen_update_fprs_dirty(DFPREG(rd));
4257
                    break;
4258
                case 0x06d: /* VIS I fxors */
4259
                    CHECK_FPU_FEATURE(dc, VIS1);
4260
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4261
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4262
                    cpu_dst_32 = gen_dest_fpr_F();
4263
                    tcg_gen_xor_i32(cpu_dst_32, cpu_src1_32, cpu_src2_32);
4264
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4265
                    gen_update_fprs_dirty(rd);
4266
                    break;
4267
                case 0x06e: /* VIS I fnand */
4268
                    CHECK_FPU_FEATURE(dc, VIS1);
4269
                    tcg_gen_nand_i32(cpu__fpr[DFPREG(rd)],
4270
                                     cpu__fpr[DFPREG(rs1)],
4271
                                     cpu__fpr[DFPREG(rs2)]);
4272
                    tcg_gen_nand_i32(cpu__fpr[DFPREG(rd) + 1],
4273
                                     cpu__fpr[DFPREG(rs1) + 1],
4274
                                     cpu__fpr[DFPREG(rs2) + 1]);
4275
                    gen_update_fprs_dirty(DFPREG(rd));
4276
                    break;
4277
                case 0x06f: /* VIS I fnands */
4278
                    CHECK_FPU_FEATURE(dc, VIS1);
4279
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4280
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4281
                    cpu_dst_32 = gen_dest_fpr_F();
4282
                    tcg_gen_nand_i32(cpu_dst_32, cpu_src1_32, cpu_src2_32);
4283
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4284
                    gen_update_fprs_dirty(rd);
4285
                    break;
4286
                case 0x070: /* VIS I fand */
4287
                    CHECK_FPU_FEATURE(dc, VIS1);
4288
                    tcg_gen_and_i32(cpu__fpr[DFPREG(rd)],
4289
                                    cpu__fpr[DFPREG(rs1)],
4290
                                    cpu__fpr[DFPREG(rs2)]);
4291
                    tcg_gen_and_i32(cpu__fpr[DFPREG(rd) + 1],
4292
                                    cpu__fpr[DFPREG(rs1) + 1],
4293
                                    cpu__fpr[DFPREG(rs2) + 1]);
4294
                    gen_update_fprs_dirty(DFPREG(rd));
4295
                    break;
4296
                case 0x071: /* VIS I fands */
4297
                    CHECK_FPU_FEATURE(dc, VIS1);
4298
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4299
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4300
                    cpu_dst_32 = gen_dest_fpr_F();
4301
                    tcg_gen_and_i32(cpu_dst_32, cpu_src1_32, cpu_src2_32);
4302
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4303
                    gen_update_fprs_dirty(rd);
4304
                    break;
4305
                case 0x072: /* VIS I fxnor */
4306
                    CHECK_FPU_FEATURE(dc, VIS1);
4307
                    tcg_gen_eqv_i32(cpu__fpr[DFPREG(rd)],
4308
                                    cpu__fpr[DFPREG(rs1)],
4309
                                    cpu__fpr[DFPREG(rs2)]);
4310
                    tcg_gen_eqv_i32(cpu__fpr[DFPREG(rd) + 1],
4311
                                    cpu__fpr[DFPREG(rs1) + 1],
4312
                                    cpu__fpr[DFPREG(rs2) + 1]);
4313
                    gen_update_fprs_dirty(DFPREG(rd));
4314
                    break;
4315
                case 0x073: /* VIS I fxnors */
4316
                    CHECK_FPU_FEATURE(dc, VIS1);
4317
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4318
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4319
                    cpu_dst_32 = gen_dest_fpr_F();
4320
                    tcg_gen_eqv_i32(cpu_dst_32, cpu_src1_32, cpu_src2_32);
4321
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4322
                    gen_update_fprs_dirty(rd);
4323
                    break;
4324
                case 0x074: /* VIS I fsrc1 */
4325
                    CHECK_FPU_FEATURE(dc, VIS1);
4326
                    tcg_gen_mov_i32(cpu__fpr[DFPREG(rd)],
4327
                                    cpu__fpr[DFPREG(rs1)]);
4328
                    tcg_gen_mov_i32(cpu__fpr[DFPREG(rd) + 1],
4329
                                    cpu__fpr[DFPREG(rs1) + 1]);
4330
                    gen_update_fprs_dirty(DFPREG(rd));
4331
                    break;
4332
                case 0x075: /* VIS I fsrc1s */
4333
                    CHECK_FPU_FEATURE(dc, VIS1);
4334
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4335
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
4336
                    gen_update_fprs_dirty(rd);
4337
                    break;
4338
                case 0x076: /* VIS I fornot2 */
4339
                    CHECK_FPU_FEATURE(dc, VIS1);
4340
                    tcg_gen_orc_i32(cpu__fpr[DFPREG(rd)],
4341
                                    cpu__fpr[DFPREG(rs1)],
4342
                                    cpu__fpr[DFPREG(rs2)]);
4343
                    tcg_gen_orc_i32(cpu__fpr[DFPREG(rd) + 1],
4344
                                    cpu__fpr[DFPREG(rs1) + 1],
4345
                                    cpu__fpr[DFPREG(rs2) + 1]);
4346
                    gen_update_fprs_dirty(DFPREG(rd));
4347
                    break;
4348
                case 0x077: /* VIS I fornot2s */
4349
                    CHECK_FPU_FEATURE(dc, VIS1);
4350
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4351
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4352
                    cpu_dst_32 = gen_dest_fpr_F();
4353
                    tcg_gen_orc_i32(cpu_dst_32, cpu_src1_32, cpu_src2_32);
4354
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4355
                    gen_update_fprs_dirty(rd);
4356
                    break;
4357
                case 0x078: /* VIS I fsrc2 */
4358
                    CHECK_FPU_FEATURE(dc, VIS1);
4359
                    gen_op_load_fpr_DT0(DFPREG(rs2));
4360
                    gen_op_store_DT0_fpr(DFPREG(rd));
4361
                    gen_update_fprs_dirty(DFPREG(rd));
4362
                    break;
4363
                case 0x079: /* VIS I fsrc2s */
4364
                    CHECK_FPU_FEATURE(dc, VIS1);
4365
                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4366
                    gen_store_fpr_F(dc, rd, cpu_src1_32);
4367
                    gen_update_fprs_dirty(rd);
4368
                    break;
4369
                case 0x07a: /* VIS I fornot1 */
4370
                    CHECK_FPU_FEATURE(dc, VIS1);
4371
                    tcg_gen_orc_i32(cpu__fpr[DFPREG(rd)],
4372
                                    cpu__fpr[DFPREG(rs2)],
4373
                                    cpu__fpr[DFPREG(rs1)]);
4374
                    tcg_gen_orc_i32(cpu__fpr[DFPREG(rd) + 1],
4375
                                    cpu__fpr[DFPREG(rs2) + 1],
4376
                                    cpu__fpr[DFPREG(rs1) + 1]);
4377
                    gen_update_fprs_dirty(DFPREG(rd));
4378
                    break;
4379
                case 0x07b: /* VIS I fornot1s */
4380
                    CHECK_FPU_FEATURE(dc, VIS1);
4381
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4382
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4383
                    cpu_dst_32 = gen_dest_fpr_F();
4384
                    tcg_gen_orc_i32(cpu_dst_32, cpu_src2_32, cpu_src1_32);
4385
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4386
                    gen_update_fprs_dirty(rd);
4387
                    break;
4388
                case 0x07c: /* VIS I for */
4389
                    CHECK_FPU_FEATURE(dc, VIS1);
4390
                    tcg_gen_or_i32(cpu__fpr[DFPREG(rd)],
4391
                                   cpu__fpr[DFPREG(rs1)],
4392
                                   cpu__fpr[DFPREG(rs2)]);
4393
                    tcg_gen_or_i32(cpu__fpr[DFPREG(rd) + 1],
4394
                                   cpu__fpr[DFPREG(rs1) + 1],
4395
                                   cpu__fpr[DFPREG(rs2) + 1]);
4396
                    gen_update_fprs_dirty(DFPREG(rd));
4397
                    break;
4398
                case 0x07d: /* VIS I fors */
4399
                    CHECK_FPU_FEATURE(dc, VIS1);
4400
                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4401
                    cpu_src2_32 = gen_load_fpr_F(dc, rs2);
4402
                    cpu_dst_32 = gen_dest_fpr_F();
4403
                    tcg_gen_or_i32(cpu_dst_32, cpu_src1_32, cpu_src2_32);
4404
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4405
                    gen_update_fprs_dirty(rd);
4406
                    break;
4407
                case 0x07e: /* VIS I fone */
4408
                    CHECK_FPU_FEATURE(dc, VIS1);
4409
                    tcg_gen_movi_i32(cpu__fpr[DFPREG(rd)], -1);
4410
                    tcg_gen_movi_i32(cpu__fpr[DFPREG(rd) + 1], -1);
4411
                    gen_update_fprs_dirty(DFPREG(rd));
4412
                    break;
4413
                case 0x07f: /* VIS I fones */
4414
                    CHECK_FPU_FEATURE(dc, VIS1);
4415
                    cpu_dst_32 = gen_dest_fpr_F();
4416
                    tcg_gen_movi_i32(cpu_dst_32, -1);
4417
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4418
                    gen_update_fprs_dirty(rd);
4419
                    break;
4420
                case 0x080: /* VIS I shutdown */
4421
                case 0x081: /* VIS II siam */
4422
                    // XXX
4423
                    goto illegal_insn;
4424
                default:
4425
                    goto illegal_insn;
4426
                }
4427
#else
4428
                goto ncp_insn;
4429
#endif
4430
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4431
#ifdef TARGET_SPARC64
4432
                goto illegal_insn;
4433
#else
4434
                goto ncp_insn;
4435
#endif
4436
#ifdef TARGET_SPARC64
4437
            } else if (xop == 0x39) { /* V9 return */
4438
                TCGv_i32 r_const;
4439

    
4440
                save_state(dc, cpu_cond);
4441
                cpu_src1 = get_src1(insn, cpu_src1);
4442
                if (IS_IMM) {   /* immediate */
4443
                    simm = GET_FIELDs(insn, 19, 31);
4444
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4445
                } else {                /* register */
4446
                    rs2 = GET_FIELD(insn, 27, 31);
4447
                    if (rs2) {
4448
                        gen_movl_reg_TN(rs2, cpu_src2);
4449
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4450
                    } else
4451
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4452
                }
4453
                gen_helper_restore(cpu_env);
4454
                gen_mov_pc_npc(dc, cpu_cond);
4455
                r_const = tcg_const_i32(3);
4456
                gen_helper_check_align(cpu_dst, r_const);
4457
                tcg_temp_free_i32(r_const);
4458
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4459
                dc->npc = DYNAMIC_PC;
4460
                goto jmp_insn;
4461
#endif
4462
            } else {
4463
                cpu_src1 = get_src1(insn, cpu_src1);
4464
                if (IS_IMM) {   /* immediate */
4465
                    simm = GET_FIELDs(insn, 19, 31);
4466
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4467
                } else {                /* register */
4468
                    rs2 = GET_FIELD(insn, 27, 31);
4469
                    if (rs2) {
4470
                        gen_movl_reg_TN(rs2, cpu_src2);
4471
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4472
                    } else
4473
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4474
                }
4475
                switch (xop) {
4476
                case 0x38:      /* jmpl */
4477
                    {
4478
                        TCGv r_pc;
4479
                        TCGv_i32 r_const;
4480

    
4481
                        r_pc = tcg_const_tl(dc->pc);
4482
                        gen_movl_TN_reg(rd, r_pc);
4483
                        tcg_temp_free(r_pc);
4484
                        gen_mov_pc_npc(dc, cpu_cond);
4485
                        r_const = tcg_const_i32(3);
4486
                        gen_helper_check_align(cpu_dst, r_const);
4487
                        tcg_temp_free_i32(r_const);
4488
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4489
                        dc->npc = DYNAMIC_PC;
4490
                    }
4491
                    goto jmp_insn;
4492
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4493
                case 0x39:      /* rett, V9 return */
4494
                    {
4495
                        TCGv_i32 r_const;
4496

    
4497
                        if (!supervisor(dc))
4498
                            goto priv_insn;
4499
                        gen_mov_pc_npc(dc, cpu_cond);
4500
                        r_const = tcg_const_i32(3);
4501
                        gen_helper_check_align(cpu_dst, r_const);
4502
                        tcg_temp_free_i32(r_const);
4503
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4504
                        dc->npc = DYNAMIC_PC;
4505
                        gen_helper_rett(cpu_env);
4506
                    }
4507
                    goto jmp_insn;
4508
#endif
4509
                case 0x3b: /* flush */
4510
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4511
                        goto unimp_flush;
4512
                    /* nop */
4513
                    break;
4514
                case 0x3c:      /* save */
4515
                    save_state(dc, cpu_cond);
4516
                    gen_helper_save(cpu_env);
4517
                    gen_movl_TN_reg(rd, cpu_dst);
4518
                    break;
4519
                case 0x3d:      /* restore */
4520
                    save_state(dc, cpu_cond);
4521
                    gen_helper_restore(cpu_env);
4522
                    gen_movl_TN_reg(rd, cpu_dst);
4523
                    break;
4524
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4525
                case 0x3e:      /* V9 done/retry */
4526
                    {
4527
                        switch (rd) {
4528
                        case 0:
4529
                            if (!supervisor(dc))
4530
                                goto priv_insn;
4531
                            dc->npc = DYNAMIC_PC;
4532
                            dc->pc = DYNAMIC_PC;
4533
                            gen_helper_done(cpu_env);
4534
                            goto jmp_insn;
4535
                        case 1:
4536
                            if (!supervisor(dc))
4537
                                goto priv_insn;
4538
                            dc->npc = DYNAMIC_PC;
4539
                            dc->pc = DYNAMIC_PC;
4540
                            gen_helper_retry(cpu_env);
4541
                            goto jmp_insn;
4542
                        default:
4543
                            goto illegal_insn;
4544
                        }
4545
                    }
4546
                    break;
4547
#endif
4548
                default:
4549
                    goto illegal_insn;
4550
                }
4551
            }
4552
            break;
4553
        }
4554
        break;
4555
    case 3:                     /* load/store instructions */
4556
        {
4557
            unsigned int xop = GET_FIELD(insn, 7, 12);
4558

    
4559
            /* flush pending conditional evaluations before exposing
4560
               cpu state */
4561
            if (dc->cc_op != CC_OP_FLAGS) {
4562
                dc->cc_op = CC_OP_FLAGS;
4563
                gen_helper_compute_psr(cpu_env);
4564
            }
4565
            cpu_src1 = get_src1(insn, cpu_src1);
4566
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4567
                rs2 = GET_FIELD(insn, 27, 31);
4568
                gen_movl_reg_TN(rs2, cpu_src2);
4569
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4570
            } else if (IS_IMM) {     /* immediate */
4571
                simm = GET_FIELDs(insn, 19, 31);
4572
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4573
            } else {            /* register */
4574
                rs2 = GET_FIELD(insn, 27, 31);
4575
                if (rs2 != 0) {
4576
                    gen_movl_reg_TN(rs2, cpu_src2);
4577
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4578
                } else
4579
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4580
            }
4581
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4582
                (xop > 0x17 && xop <= 0x1d ) ||
4583
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4584
                switch (xop) {
4585
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4586
                    gen_address_mask(dc, cpu_addr);
4587
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4588
                    break;
4589
                case 0x1:       /* ldub, load unsigned byte */
4590
                    gen_address_mask(dc, cpu_addr);
4591
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4592
                    break;
4593
                case 0x2:       /* lduh, load unsigned halfword */
4594
                    gen_address_mask(dc, cpu_addr);
4595
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4596
                    break;
4597
                case 0x3:       /* ldd, load double word */
4598
                    if (rd & 1)
4599
                        goto illegal_insn;
4600
                    else {
4601
                        TCGv_i32 r_const;
4602

    
4603
                        save_state(dc, cpu_cond);
4604
                        r_const = tcg_const_i32(7);
4605
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4606
                        tcg_temp_free_i32(r_const);
4607
                        gen_address_mask(dc, cpu_addr);
4608
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4609
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4610
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4611
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4612
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4613
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4614
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4615
                    }
4616
                    break;
4617
                case 0x9:       /* ldsb, load signed byte */
4618
                    gen_address_mask(dc, cpu_addr);
4619
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4620
                    break;
4621
                case 0xa:       /* ldsh, load signed halfword */
4622
                    gen_address_mask(dc, cpu_addr);
4623
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4624
                    break;
4625
                case 0xd:       /* ldstub -- XXX: should be atomically */
4626
                    {
4627
                        TCGv r_const;
4628

    
4629
                        gen_address_mask(dc, cpu_addr);
4630
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4631
                        r_const = tcg_const_tl(0xff);
4632
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4633
                        tcg_temp_free(r_const);
4634
                    }
4635
                    break;
4636
                case 0x0f:      /* swap, swap register with memory. Also
4637
                                   atomically */
4638
                    CHECK_IU_FEATURE(dc, SWAP);
4639
                    gen_movl_reg_TN(rd, cpu_val);
4640
                    gen_address_mask(dc, cpu_addr);
4641
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4642
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4643
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4644
                    break;
4645
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4646
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4647
#ifndef TARGET_SPARC64
4648
                    if (IS_IMM)
4649
                        goto illegal_insn;
4650
                    if (!supervisor(dc))
4651
                        goto priv_insn;
4652
#endif
4653
                    save_state(dc, cpu_cond);
4654
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4655
                    break;
4656
                case 0x11:      /* lduba, load unsigned byte alternate */
4657
#ifndef TARGET_SPARC64
4658
                    if (IS_IMM)
4659
                        goto illegal_insn;
4660
                    if (!supervisor(dc))
4661
                        goto priv_insn;
4662
#endif
4663
                    save_state(dc, cpu_cond);
4664
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4665
                    break;
4666
                case 0x12:      /* lduha, load unsigned halfword alternate */
4667
#ifndef TARGET_SPARC64
4668
                    if (IS_IMM)
4669
                        goto illegal_insn;
4670
                    if (!supervisor(dc))
4671
                        goto priv_insn;
4672
#endif
4673
                    save_state(dc, cpu_cond);
4674
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4675
                    break;
4676
                case 0x13:      /* ldda, load double word alternate */
4677
#ifndef TARGET_SPARC64
4678
                    if (IS_IMM)
4679
                        goto illegal_insn;
4680
                    if (!supervisor(dc))
4681
                        goto priv_insn;
4682
#endif
4683
                    if (rd & 1)
4684
                        goto illegal_insn;
4685
                    save_state(dc, cpu_cond);
4686
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4687
                    goto skip_move;
4688
                case 0x19:      /* ldsba, load signed byte alternate */
4689
#ifndef TARGET_SPARC64
4690
                    if (IS_IMM)
4691
                        goto illegal_insn;
4692
                    if (!supervisor(dc))
4693
                        goto priv_insn;
4694
#endif
4695
                    save_state(dc, cpu_cond);
4696
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4697
                    break;
4698
                case 0x1a:      /* ldsha, load signed halfword alternate */
4699
#ifndef TARGET_SPARC64
4700
                    if (IS_IMM)
4701
                        goto illegal_insn;
4702
                    if (!supervisor(dc))
4703
                        goto priv_insn;
4704
#endif
4705
                    save_state(dc, cpu_cond);
4706
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4707
                    break;
4708
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4709
#ifndef TARGET_SPARC64
4710
                    if (IS_IMM)
4711
                        goto illegal_insn;
4712
                    if (!supervisor(dc))
4713
                        goto priv_insn;
4714
#endif
4715
                    save_state(dc, cpu_cond);
4716
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4717
                    break;
4718
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4719
                                   atomically */
4720
                    CHECK_IU_FEATURE(dc, SWAP);
4721
#ifndef TARGET_SPARC64
4722
                    if (IS_IMM)
4723
                        goto illegal_insn;
4724
                    if (!supervisor(dc))
4725
                        goto priv_insn;
4726
#endif
4727
                    save_state(dc, cpu_cond);
4728
                    gen_movl_reg_TN(rd, cpu_val);
4729
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4730
                    break;
4731

    
4732
#ifndef TARGET_SPARC64
4733
                case 0x30: /* ldc */
4734
                case 0x31: /* ldcsr */
4735
                case 0x33: /* lddc */
4736
                    goto ncp_insn;
4737
#endif
4738
#endif
4739
#ifdef TARGET_SPARC64
4740
                case 0x08: /* V9 ldsw */
4741
                    gen_address_mask(dc, cpu_addr);
4742
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4743
                    break;
4744
                case 0x0b: /* V9 ldx */
4745
                    gen_address_mask(dc, cpu_addr);
4746
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4747
                    break;
4748
                case 0x18: /* V9 ldswa */
4749
                    save_state(dc, cpu_cond);
4750
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4751
                    break;
4752
                case 0x1b: /* V9 ldxa */
4753
                    save_state(dc, cpu_cond);
4754
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4755
                    break;
4756
                case 0x2d: /* V9 prefetch, no effect */
4757
                    goto skip_move;
4758
                case 0x30: /* V9 ldfa */
4759
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4760
                        goto jmp_insn;
4761
                    }
4762
                    save_state(dc, cpu_cond);
4763
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4764
                    gen_update_fprs_dirty(rd);
4765
                    goto skip_move;
4766
                case 0x33: /* V9 lddfa */
4767
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4768
                        goto jmp_insn;
4769
                    }
4770
                    save_state(dc, cpu_cond);
4771
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4772
                    gen_update_fprs_dirty(DFPREG(rd));
4773
                    goto skip_move;
4774
                case 0x3d: /* V9 prefetcha, no effect */
4775
                    goto skip_move;
4776
                case 0x32: /* V9 ldqfa */
4777
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4778
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4779
                        goto jmp_insn;
4780
                    }
4781
                    save_state(dc, cpu_cond);
4782
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4783
                    gen_update_fprs_dirty(QFPREG(rd));
4784
                    goto skip_move;
4785
#endif
4786
                default:
4787
                    goto illegal_insn;
4788
                }
4789
                gen_movl_TN_reg(rd, cpu_val);
4790
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4791
            skip_move: ;
4792
#endif
4793
            } else if (xop >= 0x20 && xop < 0x24) {
4794
                if (gen_trap_ifnofpu(dc, cpu_cond))
4795
                    goto jmp_insn;
4796
                save_state(dc, cpu_cond);
4797
                switch (xop) {
4798
                case 0x20:      /* ldf, load fpreg */
4799
                    gen_address_mask(dc, cpu_addr);
4800
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4801
                    cpu_dst_32 = gen_dest_fpr_F();
4802
                    tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4803
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4804
                    gen_update_fprs_dirty(rd);
4805
                    break;
4806
                case 0x21:      /* ldfsr, V9 ldxfsr */
4807
#ifdef TARGET_SPARC64
4808
                    gen_address_mask(dc, cpu_addr);
4809
                    if (rd == 1) {
4810
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4811
                        gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4812
                    } else {
4813
                        tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4814
                        tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4815
                        gen_helper_ldfsr(cpu_env, cpu_tmp32);
4816
                    }
4817
#else
4818
                    {
4819
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4820
                        gen_helper_ldfsr(cpu_env, cpu_tmp32);
4821
                    }
4822
#endif
4823
                    break;
4824
                case 0x22:      /* ldqf, load quad fpreg */
4825
                    {
4826
                        TCGv_i32 r_const;
4827

    
4828
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4829
                        r_const = tcg_const_i32(dc->mem_idx);
4830
                        gen_address_mask(dc, cpu_addr);
4831
                        gen_helper_ldqf(cpu_addr, r_const);
4832
                        tcg_temp_free_i32(r_const);
4833
                        gen_op_store_QT0_fpr(QFPREG(rd));
4834
                        gen_update_fprs_dirty(QFPREG(rd));
4835
                    }
4836
                    break;
4837
                case 0x23:      /* lddf, load double fpreg */
4838
                    {
4839
                        TCGv_i32 r_const;
4840

    
4841
                        r_const = tcg_const_i32(dc->mem_idx);
4842
                        gen_address_mask(dc, cpu_addr);
4843
                        gen_helper_lddf(cpu_addr, r_const);
4844
                        tcg_temp_free_i32(r_const);
4845
                        gen_op_store_DT0_fpr(DFPREG(rd));
4846
                        gen_update_fprs_dirty(DFPREG(rd));
4847
                    }
4848
                    break;
4849
                default:
4850
                    goto illegal_insn;
4851
                }
4852
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4853
                       xop == 0xe || xop == 0x1e) {
4854
                gen_movl_reg_TN(rd, cpu_val);
4855
                switch (xop) {
4856
                case 0x4: /* st, store word */
4857
                    gen_address_mask(dc, cpu_addr);
4858
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4859
                    break;
4860
                case 0x5: /* stb, store byte */
4861
                    gen_address_mask(dc, cpu_addr);
4862
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4863
                    break;
4864
                case 0x6: /* sth, store halfword */
4865
                    gen_address_mask(dc, cpu_addr);
4866
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4867
                    break;
4868
                case 0x7: /* std, store double word */
4869
                    if (rd & 1)
4870
                        goto illegal_insn;
4871
                    else {
4872
                        TCGv_i32 r_const;
4873

    
4874
                        save_state(dc, cpu_cond);
4875
                        gen_address_mask(dc, cpu_addr);
4876
                        r_const = tcg_const_i32(7);
4877
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4878
                        tcg_temp_free_i32(r_const);
4879
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4880
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4881
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4882
                    }
4883
                    break;
4884
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4885
                case 0x14: /* sta, V9 stwa, store word alternate */
4886
#ifndef TARGET_SPARC64
4887
                    if (IS_IMM)
4888
                        goto illegal_insn;
4889
                    if (!supervisor(dc))
4890
                        goto priv_insn;
4891
#endif
4892
                    save_state(dc, cpu_cond);
4893
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4894
                    dc->npc = DYNAMIC_PC;
4895
                    break;
4896
                case 0x15: /* stba, store byte alternate */
4897
#ifndef TARGET_SPARC64
4898
                    if (IS_IMM)
4899
                        goto illegal_insn;
4900
                    if (!supervisor(dc))
4901
                        goto priv_insn;
4902
#endif
4903
                    save_state(dc, cpu_cond);
4904
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4905
                    dc->npc = DYNAMIC_PC;
4906
                    break;
4907
                case 0x16: /* stha, store halfword alternate */
4908
#ifndef TARGET_SPARC64
4909
                    if (IS_IMM)
4910
                        goto illegal_insn;
4911
                    if (!supervisor(dc))
4912
                        goto priv_insn;
4913
#endif
4914
                    save_state(dc, cpu_cond);
4915
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4916
                    dc->npc = DYNAMIC_PC;
4917
                    break;
4918
                case 0x17: /* stda, store double word alternate */
4919
#ifndef TARGET_SPARC64
4920
                    if (IS_IMM)
4921
                        goto illegal_insn;
4922
                    if (!supervisor(dc))
4923
                        goto priv_insn;
4924
#endif
4925
                    if (rd & 1)
4926
                        goto illegal_insn;
4927
                    else {
4928
                        save_state(dc, cpu_cond);
4929
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4930
                    }
4931
                    break;
4932
#endif
4933
#ifdef TARGET_SPARC64
4934
                case 0x0e: /* V9 stx */
4935
                    gen_address_mask(dc, cpu_addr);
4936
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4937
                    break;
4938
                case 0x1e: /* V9 stxa */
4939
                    save_state(dc, cpu_cond);
4940
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4941
                    dc->npc = DYNAMIC_PC;
4942
                    break;
4943
#endif
4944
                default:
4945
                    goto illegal_insn;
4946
                }
4947
            } else if (xop > 0x23 && xop < 0x28) {
4948
                if (gen_trap_ifnofpu(dc, cpu_cond))
4949
                    goto jmp_insn;
4950
                save_state(dc, cpu_cond);
4951
                switch (xop) {
4952
                case 0x24: /* stf, store fpreg */
4953
                    gen_address_mask(dc, cpu_addr);
4954
                    cpu_src1_32 = gen_load_fpr_F(dc, rd);
4955
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
4956
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4957
                    break;
4958
                case 0x25: /* stfsr, V9 stxfsr */
4959
#ifdef TARGET_SPARC64
4960
                    gen_address_mask(dc, cpu_addr);
4961
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4962
                    if (rd == 1)
4963
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4964
                    else
4965
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4966
#else
4967
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4968
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4969
#endif
4970
                    break;
4971
                case 0x26:
4972
#ifdef TARGET_SPARC64
4973
                    /* V9 stqf, store quad fpreg */
4974
                    {
4975
                        TCGv_i32 r_const;
4976

    
4977
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4978
                        gen_op_load_fpr_QT0(QFPREG(rd));
4979
                        r_const = tcg_const_i32(dc->mem_idx);
4980
                        gen_address_mask(dc, cpu_addr);
4981
                        gen_helper_stqf(cpu_addr, r_const);
4982
                        tcg_temp_free_i32(r_const);
4983
                    }
4984
                    break;
4985
#else /* !TARGET_SPARC64 */
4986
                    /* stdfq, store floating point queue */
4987
#if defined(CONFIG_USER_ONLY)
4988
                    goto illegal_insn;
4989
#else
4990
                    if (!supervisor(dc))
4991
                        goto priv_insn;
4992
                    if (gen_trap_ifnofpu(dc, cpu_cond))
4993
                        goto jmp_insn;
4994
                    goto nfq_insn;
4995
#endif
4996
#endif
4997
                case 0x27: /* stdf, store double fpreg */
4998
                    {
4999
                        TCGv_i32 r_const;
5000

    
5001
                        gen_op_load_fpr_DT0(DFPREG(rd));
5002
                        r_const = tcg_const_i32(dc->mem_idx);
5003
                        gen_address_mask(dc, cpu_addr);
5004
                        gen_helper_stdf(cpu_addr, r_const);
5005
                        tcg_temp_free_i32(r_const);
5006
                    }
5007
                    break;
5008
                default:
5009
                    goto illegal_insn;
5010
                }
5011
            } else if (xop > 0x33 && xop < 0x3f) {
5012
                save_state(dc, cpu_cond);
5013
                switch (xop) {
5014
#ifdef TARGET_SPARC64
5015
                case 0x34: /* V9 stfa */
5016
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
5017
                        goto jmp_insn;
5018
                    }
5019
                    gen_stf_asi(cpu_addr, insn, 4, rd);
5020
                    break;
5021
                case 0x36: /* V9 stqfa */
5022
                    {
5023
                        TCGv_i32 r_const;
5024

    
5025
                        CHECK_FPU_FEATURE(dc, FLOAT128);
5026
                        if (gen_trap_ifnofpu(dc, cpu_cond)) {
5027
                            goto jmp_insn;
5028
                        }
5029
                        r_const = tcg_const_i32(7);
5030
                        gen_helper_check_align(cpu_addr, r_const);
5031
                        tcg_temp_free_i32(r_const);
5032
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5033
                    }
5034
                    break;
5035
                case 0x37: /* V9 stdfa */
5036
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
5037
                        goto jmp_insn;
5038
                    }
5039
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5040
                    break;
5041
                case 0x3c: /* V9 casa */
5042
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5043
                    gen_movl_TN_reg(rd, cpu_val);
5044
                    break;
5045
                case 0x3e: /* V9 casxa */
5046
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5047
                    gen_movl_TN_reg(rd, cpu_val);
5048
                    break;
5049
#else
5050
                case 0x34: /* stc */
5051
                case 0x35: /* stcsr */
5052
                case 0x36: /* stdcq */
5053
                case 0x37: /* stdc */
5054
                    goto ncp_insn;
5055
#endif
5056
                default:
5057
                    goto illegal_insn;
5058
                }
5059
            } else
5060
                goto illegal_insn;
5061
        }
5062
        break;
5063
    }
5064
    /* default case for non jump instructions */
5065
    if (dc->npc == DYNAMIC_PC) {
5066
        dc->pc = DYNAMIC_PC;
5067
        gen_op_next_insn();
5068
    } else if (dc->npc == JUMP_PC) {
5069
        /* we can do a static jump */
5070
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5071
        dc->is_br = 1;
5072
    } else {
5073
        dc->pc = dc->npc;
5074
        dc->npc = dc->npc + 4;
5075
    }
5076
 jmp_insn:
5077
    goto egress;
5078
 illegal_insn:
5079
    {
5080
        TCGv_i32 r_const;
5081

    
5082
        save_state(dc, cpu_cond);
5083
        r_const = tcg_const_i32(TT_ILL_INSN);
5084
        gen_helper_raise_exception(cpu_env, r_const);
5085
        tcg_temp_free_i32(r_const);
5086
        dc->is_br = 1;
5087
    }
5088
    goto egress;
5089
 unimp_flush:
5090
    {
5091
        TCGv_i32 r_const;
5092

    
5093
        save_state(dc, cpu_cond);
5094
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5095
        gen_helper_raise_exception(cpu_env, r_const);
5096
        tcg_temp_free_i32(r_const);
5097
        dc->is_br = 1;
5098
    }
5099
    goto egress;
5100
#if !defined(CONFIG_USER_ONLY)
5101
 priv_insn:
5102
    {
5103
        TCGv_i32 r_const;
5104

    
5105
        save_state(dc, cpu_cond);
5106
        r_const = tcg_const_i32(TT_PRIV_INSN);
5107
        gen_helper_raise_exception(cpu_env, r_const);
5108
        tcg_temp_free_i32(r_const);
5109
        dc->is_br = 1;
5110
    }
5111
    goto egress;
5112
#endif
5113
 nfpu_insn:
5114
    save_state(dc, cpu_cond);
5115
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5116
    dc->is_br = 1;
5117
    goto egress;
5118
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5119
 nfq_insn:
5120
    save_state(dc, cpu_cond);
5121
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5122
    dc->is_br = 1;
5123
    goto egress;
5124
#endif
5125
#ifndef TARGET_SPARC64
5126
 ncp_insn:
5127
    {
5128
        TCGv r_const;
5129

    
5130
        save_state(dc, cpu_cond);
5131
        r_const = tcg_const_i32(TT_NCP_INSN);
5132
        gen_helper_raise_exception(cpu_env, r_const);
5133
        tcg_temp_free(r_const);
5134
        dc->is_br = 1;
5135
    }
5136
    goto egress;
5137
#endif
5138
 egress:
5139
    tcg_temp_free(cpu_tmp1);
5140
    tcg_temp_free(cpu_tmp2);
5141
}
5142

    
5143
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5144
                                                  int spc, CPUSPARCState *env)
5145
{
5146
    target_ulong pc_start, last_pc;
5147
    uint16_t *gen_opc_end;
5148
    DisasContext dc1, *dc = &dc1;
5149
    CPUBreakpoint *bp;
5150
    int j, lj = -1;
5151
    int num_insns;
5152
    int max_insns;
5153

    
5154
    memset(dc, 0, sizeof(DisasContext));
5155
    dc->tb = tb;
5156
    pc_start = tb->pc;
5157
    dc->pc = pc_start;
5158
    last_pc = dc->pc;
5159
    dc->npc = (target_ulong) tb->cs_base;
5160
    dc->cc_op = CC_OP_DYNAMIC;
5161
    dc->mem_idx = cpu_mmu_index(env);
5162
    dc->def = env->def;
5163
    dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5164
    dc->address_mask_32bit = tb_am_enabled(tb->flags);
5165
    dc->singlestep = (env->singlestep_enabled || singlestep);
5166
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5167

    
5168
    cpu_tmp0 = tcg_temp_new();
5169
    cpu_tmp32 = tcg_temp_new_i32();
5170
    cpu_tmp64 = tcg_temp_new_i64();
5171

    
5172
    cpu_dst = tcg_temp_local_new();
5173

    
5174
    // loads and stores
5175
    cpu_val = tcg_temp_local_new();
5176
    cpu_addr = tcg_temp_local_new();
5177

    
5178
    num_insns = 0;
5179
    max_insns = tb->cflags & CF_COUNT_MASK;
5180
    if (max_insns == 0)
5181
        max_insns = CF_COUNT_MASK;
5182
    gen_icount_start();
5183
    do {
5184
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5185
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5186
                if (bp->pc == dc->pc) {
5187
                    if (dc->pc != pc_start)
5188
                        save_state(dc, cpu_cond);
5189
                    gen_helper_debug(cpu_env);
5190
                    tcg_gen_exit_tb(0);
5191
                    dc->is_br = 1;
5192
                    goto exit_gen_loop;
5193
                }
5194
            }
5195
        }
5196
        if (spc) {
5197
            qemu_log("Search PC...\n");
5198
            j = gen_opc_ptr - gen_opc_buf;
5199
            if (lj < j) {
5200
                lj++;
5201
                while (lj < j)
5202
                    gen_opc_instr_start[lj++] = 0;
5203
                gen_opc_pc[lj] = dc->pc;
5204
                gen_opc_npc[lj] = dc->npc;
5205
                gen_opc_instr_start[lj] = 1;
5206
                gen_opc_icount[lj] = num_insns;
5207
            }
5208
        }
5209
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5210
            gen_io_start();
5211
        last_pc = dc->pc;
5212
        disas_sparc_insn(dc);
5213
        num_insns++;
5214

    
5215
        if (dc->is_br)
5216
            break;
5217
        /* if the next PC is different, we abort now */
5218
        if (dc->pc != (last_pc + 4))
5219
            break;
5220
        /* if we reach a page boundary, we stop generation so that the
5221
           PC of a TT_TFAULT exception is always in the right page */
5222
        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5223
            break;
5224
        /* if single step mode, we generate only one instruction and
5225
           generate an exception */
5226
        if (dc->singlestep) {
5227
            break;
5228
        }
5229
    } while ((gen_opc_ptr < gen_opc_end) &&
5230
             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5231
             num_insns < max_insns);
5232

    
5233
 exit_gen_loop:
5234
    tcg_temp_free(cpu_addr);
5235
    tcg_temp_free(cpu_val);
5236
    tcg_temp_free(cpu_dst);
5237
    tcg_temp_free_i64(cpu_tmp64);
5238
    tcg_temp_free_i32(cpu_tmp32);
5239
    tcg_temp_free(cpu_tmp0);
5240
    if (tb->cflags & CF_LAST_IO)
5241
        gen_io_end();
5242
    if (!dc->is_br) {
5243
        if (dc->pc != DYNAMIC_PC &&
5244
            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5245
            /* static PC and NPC: we can use direct chaining */
5246
            gen_goto_tb(dc, 0, dc->pc, dc->npc);
5247
        } else {
5248
            if (dc->pc != DYNAMIC_PC)
5249
                tcg_gen_movi_tl(cpu_pc, dc->pc);
5250
            save_npc(dc, cpu_cond);
5251
            tcg_gen_exit_tb(0);
5252
        }
5253
    }
5254
    gen_icount_end(tb, num_insns);
5255
    *gen_opc_ptr = INDEX_op_end;
5256
    if (spc) {
5257
        j = gen_opc_ptr - gen_opc_buf;
5258
        lj++;
5259
        while (lj <= j)
5260
            gen_opc_instr_start[lj++] = 0;
5261
#if 0
5262
        log_page_dump();
5263
#endif
5264
        gen_opc_jump_pc[0] = dc->jump_pc[0];
5265
        gen_opc_jump_pc[1] = dc->jump_pc[1];
5266
    } else {
5267
        tb->size = last_pc + 4 - pc_start;
5268
        tb->icount = num_insns;
5269
    }
5270
#ifdef DEBUG_DISAS
5271
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5272
        qemu_log("--------------\n");
5273
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
5274
        log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5275
        qemu_log("\n");
5276
    }
5277
#endif
5278
}
5279

    
5280
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5281
{
5282
    gen_intermediate_code_internal(tb, 0, env);
5283
}
5284

    
5285
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5286
{
5287
    gen_intermediate_code_internal(tb, 1, env);
5288
}
5289

    
5290
void gen_intermediate_code_init(CPUSPARCState *env)
5291
{
5292
    unsigned int i;
5293
    static int inited;
5294
    static const char * const gregnames[8] = {
5295
        NULL, // g0 not used
5296
        "g1",
5297
        "g2",
5298
        "g3",
5299
        "g4",
5300
        "g5",
5301
        "g6",
5302
        "g7",
5303
    };
5304
    static const char * const fregnames[64] = {
5305
        "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5306
        "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5307
        "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5308
        "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5309
        "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5310
        "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5311
        "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5312
        "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5313
    };
5314

    
5315
    /* init various static tables */
5316
    if (!inited) {
5317
        inited = 1;
5318

    
5319
        cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5320
        cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5321
                                             offsetof(CPUState, regwptr),
5322
                                             "regwptr");
5323
#ifdef TARGET_SPARC64
5324
        cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5325
                                         "xcc");
5326
        cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5327
                                         "asi");
5328
        cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5329
                                          "fprs");
5330
        cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5331
                                     "gsr");
5332
        cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5333
                                           offsetof(CPUState, tick_cmpr),
5334
                                           "tick_cmpr");
5335
        cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5336
                                            offsetof(CPUState, stick_cmpr),
5337
                                            "stick_cmpr");
5338
        cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5339
                                             offsetof(CPUState, hstick_cmpr),
5340
                                             "hstick_cmpr");
5341
        cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5342
                                       "hintp");
5343
        cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5344
                                      "htba");
5345
        cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5346
                                      "hver");
5347
        cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5348
                                     offsetof(CPUState, ssr), "ssr");
5349
        cpu_ver = tcg_global_mem_new(TCG_AREG0,
5350
                                     offsetof(CPUState, version), "ver");
5351
        cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5352
                                             offsetof(CPUState, softint),
5353
                                             "softint");
5354
#else
5355
        cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5356
                                     "wim");
5357
#endif
5358
        cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5359
                                      "cond");
5360
        cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5361
                                        "cc_src");
5362
        cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5363
                                         offsetof(CPUState, cc_src2),
5364
                                         "cc_src2");
5365
        cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5366
                                        "cc_dst");
5367
        cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5368
                                           "cc_op");
5369
        cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5370
                                         "psr");
5371
        cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5372
                                     "fsr");
5373
        cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5374
                                    "pc");
5375
        cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5376
                                     "npc");
5377
        cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5378
#ifndef CONFIG_USER_ONLY
5379
        cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5380
                                     "tbr");
5381
#endif
5382
        for (i = 1; i < 8; i++)
5383
            cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5384
                                              offsetof(CPUState, gregs[i]),
5385
                                              gregnames[i]);
5386
        for (i = 0; i < TARGET_FPREGS; i++)
5387
            cpu__fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5388
                                                 offsetof(CPUState, fpr[i]),
5389
                                                 fregnames[i]);
5390

    
5391
        /* register helpers */
5392

    
5393
#define GEN_HELPER 2
5394
#include "helper.h"
5395
    }
5396
}
5397

    
5398
void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5399
{
5400
    target_ulong npc;
5401
    env->pc = gen_opc_pc[pc_pos];
5402
    npc = gen_opc_npc[pc_pos];
5403
    if (npc == 1) {
5404
        /* dynamic NPC: already stored */
5405
    } else if (npc == 2) {
5406
        /* jump PC: use 'cond' and the jump targets of the translation */
5407
        if (env->cond) {
5408
            env->npc = gen_opc_jump_pc[0];
5409
        } else {
5410
            env->npc = gen_opc_jump_pc[1];
5411
        }
5412
    } else {
5413
        env->npc = npc;
5414
    }
5415

    
5416
    /* flush pending conditional evaluations before exposing cpu state */
5417
    if (CC_OP != CC_OP_FLAGS) {
5418
        helper_compute_psr(env);
5419
    }
5420
}