Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ dcfd14b3

History | View | Annotate | Download (191.2 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20

    
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26

    
27
#include "cpu.h"
28
#include "exec-all.h"
29
#include "disas.h"
30
#include "helper.h"
31
#include "tcg-op.h"
32

    
33
#define GEN_HELPER 1
34
#include "helper.h"
35

    
36
#define DEBUG_DISAS
37

    
38
#define DYNAMIC_PC  1 /* dynamic pc value */
39
#define JUMP_PC     2 /* dynamic pc value which takes only two values
40
                         according to jump_pc[T2] */
41

    
42
/* global register indexes */
43
static TCGv_ptr cpu_env, cpu_regwptr;
44
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45
static TCGv_i32 cpu_cc_op;
46
static TCGv_i32 cpu_psr;
47
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48
static TCGv cpu_y;
49
#ifndef CONFIG_USER_ONLY
50
static TCGv cpu_tbr;
51
#endif
52
static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53
#ifdef TARGET_SPARC64
54
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55
static TCGv cpu_gsr;
56
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58
static TCGv_i32 cpu_softint;
59
#else
60
static TCGv cpu_wim;
61
#endif
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0;
64
static TCGv_i32 cpu_tmp32;
65
static TCGv_i64 cpu_tmp64;
66
/* Floating point registers */
67
static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68

    
69
static target_ulong gen_opc_npc[OPC_BUF_SIZE];
70
static target_ulong gen_opc_jump_pc[2];
71

    
72
#include "gen-icount.h"
73

    
74
typedef struct DisasContext {
75
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
76
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78
    int is_br;
79
    int mem_idx;
80
    int fpu_enabled;
81
    int address_mask_32bit;
82
    int singlestep;
83
    uint32_t cc_op;  /* current CC operation */
84
    struct TranslationBlock *tb;
85
    sparc_def_t *def;
86
} DisasContext;
87

    
88
// This function uses non-native bit order
89
#define GET_FIELD(X, FROM, TO)                                  \
90
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91

    
92
// This function uses the order in the manuals, i.e. bit 0 is 2^0
93
#define GET_FIELD_SP(X, FROM, TO)               \
94
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
95

    
96
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
98

    
99
#ifdef TARGET_SPARC64
100
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102
#else
103
#define DFPREG(r) (r & 0x1e)
104
#define QFPREG(r) (r & 0x1c)
105
#endif
106

    
107
#define UA2005_HTRAP_MASK 0xff
108
#define V8_TRAP_MASK 0x7f
109

    
110
static int sign_extend(int x, int len)
111
{
112
    len = 32 - len;
113
    return (x << len) >> len;
114
}
115

    
116
#define IS_IMM (insn & (1<<13))
117

    
118
/* floating point registers moves */
119
static void gen_op_load_fpr_DT0(unsigned int src)
120
{
121
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
122
                   offsetof(CPU_DoubleU, l.upper));
123
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
124
                   offsetof(CPU_DoubleU, l.lower));
125
}
126

    
127
static void gen_op_load_fpr_DT1(unsigned int src)
128
{
129
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
130
                   offsetof(CPU_DoubleU, l.upper));
131
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
132
                   offsetof(CPU_DoubleU, l.lower));
133
}
134

    
135
static void gen_op_store_DT0_fpr(unsigned int dst)
136
{
137
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
138
                   offsetof(CPU_DoubleU, l.upper));
139
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
140
                   offsetof(CPU_DoubleU, l.lower));
141
}
142

    
143
static void gen_op_load_fpr_QT0(unsigned int src)
144
{
145
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
146
                   offsetof(CPU_QuadU, l.upmost));
147
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
148
                   offsetof(CPU_QuadU, l.upper));
149
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
150
                   offsetof(CPU_QuadU, l.lower));
151
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
152
                   offsetof(CPU_QuadU, l.lowest));
153
}
154

    
155
static void gen_op_load_fpr_QT1(unsigned int src)
156
{
157
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
158
                   offsetof(CPU_QuadU, l.upmost));
159
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
160
                   offsetof(CPU_QuadU, l.upper));
161
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
162
                   offsetof(CPU_QuadU, l.lower));
163
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
164
                   offsetof(CPU_QuadU, l.lowest));
165
}
166

    
167
static void gen_op_store_QT0_fpr(unsigned int dst)
168
{
169
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
170
                   offsetof(CPU_QuadU, l.upmost));
171
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
172
                   offsetof(CPU_QuadU, l.upper));
173
    tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
174
                   offsetof(CPU_QuadU, l.lower));
175
    tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
176
                   offsetof(CPU_QuadU, l.lowest));
177
}
178

    
179
/* moves */
180
#ifdef CONFIG_USER_ONLY
181
#define supervisor(dc) 0
182
#ifdef TARGET_SPARC64
183
#define hypervisor(dc) 0
184
#endif
185
#else
186
#define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
187
#ifdef TARGET_SPARC64
188
#define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
189
#else
190
#endif
191
#endif
192

    
193
#ifdef TARGET_SPARC64
194
#ifndef TARGET_ABI32
195
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
196
#else
197
#define AM_CHECK(dc) (1)
198
#endif
199
#endif
200

    
201
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
202
{
203
#ifdef TARGET_SPARC64
204
    if (AM_CHECK(dc))
205
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
206
#endif
207
}
208

    
209
static inline void gen_movl_reg_TN(int reg, TCGv tn)
210
{
211
    if (reg == 0)
212
        tcg_gen_movi_tl(tn, 0);
213
    else if (reg < 8)
214
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
215
    else {
216
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
217
    }
218
}
219

    
220
static inline void gen_movl_TN_reg(int reg, TCGv tn)
221
{
222
    if (reg == 0)
223
        return;
224
    else if (reg < 8)
225
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
226
    else {
227
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
228
    }
229
}
230

    
231
static inline void gen_goto_tb(DisasContext *s, int tb_num,
232
                               target_ulong pc, target_ulong npc)
233
{
234
    TranslationBlock *tb;
235

    
236
    tb = s->tb;
237
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
239
        !s->singlestep)  {
240
        /* jump to same page: we can use a direct jump */
241
        tcg_gen_goto_tb(tb_num);
242
        tcg_gen_movi_tl(cpu_pc, pc);
243
        tcg_gen_movi_tl(cpu_npc, npc);
244
        tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
245
    } else {
246
        /* jump to another page: currently not optimized */
247
        tcg_gen_movi_tl(cpu_pc, pc);
248
        tcg_gen_movi_tl(cpu_npc, npc);
249
        tcg_gen_exit_tb(0);
250
    }
251
}
252

    
253
// XXX suboptimal
254
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
255
{
256
    tcg_gen_extu_i32_tl(reg, src);
257
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
258
    tcg_gen_andi_tl(reg, reg, 0x1);
259
}
260

    
261
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
262
{
263
    tcg_gen_extu_i32_tl(reg, src);
264
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
265
    tcg_gen_andi_tl(reg, reg, 0x1);
266
}
267

    
268
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
269
{
270
    tcg_gen_extu_i32_tl(reg, src);
271
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
272
    tcg_gen_andi_tl(reg, reg, 0x1);
273
}
274

    
275
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
276
{
277
    tcg_gen_extu_i32_tl(reg, src);
278
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
279
    tcg_gen_andi_tl(reg, reg, 0x1);
280
}
281

    
282
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
283
{
284
    TCGv r_temp;
285
    TCGv_i32 r_const;
286
    int l1;
287

    
288
    l1 = gen_new_label();
289

    
290
    r_temp = tcg_temp_new();
291
    tcg_gen_xor_tl(r_temp, src1, src2);
292
    tcg_gen_not_tl(r_temp, r_temp);
293
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
294
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
295
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
296
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
297
    r_const = tcg_const_i32(TT_TOVF);
298
    gen_helper_raise_exception(r_const);
299
    tcg_temp_free_i32(r_const);
300
    gen_set_label(l1);
301
    tcg_temp_free(r_temp);
302
}
303

    
304
static inline void gen_tag_tv(TCGv src1, TCGv src2)
305
{
306
    int l1;
307
    TCGv_i32 r_const;
308

    
309
    l1 = gen_new_label();
310
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
311
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
312
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
313
    r_const = tcg_const_i32(TT_TOVF);
314
    gen_helper_raise_exception(r_const);
315
    tcg_temp_free_i32(r_const);
316
    gen_set_label(l1);
317
}
318

    
319
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
320
{
321
    tcg_gen_mov_tl(cpu_cc_src, src1);
322
    tcg_gen_movi_tl(cpu_cc_src2, src2);
323
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
324
    tcg_gen_mov_tl(dst, cpu_cc_dst);
325
}
326

    
327
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
328
{
329
    tcg_gen_mov_tl(cpu_cc_src, src1);
330
    tcg_gen_mov_tl(cpu_cc_src2, src2);
331
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
332
    tcg_gen_mov_tl(dst, cpu_cc_dst);
333
}
334

    
335
static TCGv_i32 gen_add32_carry32(void)
336
{
337
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
338

    
339
    /* Carry is computed from a previous add: (dst < src)  */
340
#if TARGET_LONG_BITS == 64
341
    cc_src1_32 = tcg_temp_new_i32();
342
    cc_src2_32 = tcg_temp_new_i32();
343
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
344
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
345
#else
346
    cc_src1_32 = cpu_cc_dst;
347
    cc_src2_32 = cpu_cc_src;
348
#endif
349

    
350
    carry_32 = tcg_temp_new_i32();
351
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
352

    
353
#if TARGET_LONG_BITS == 64
354
    tcg_temp_free_i32(cc_src1_32);
355
    tcg_temp_free_i32(cc_src2_32);
356
#endif
357

    
358
    return carry_32;
359
}
360

    
361
static TCGv_i32 gen_sub32_carry32(void)
362
{
363
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
364

    
365
    /* Carry is computed from a previous borrow: (src1 < src2)  */
366
#if TARGET_LONG_BITS == 64
367
    cc_src1_32 = tcg_temp_new_i32();
368
    cc_src2_32 = tcg_temp_new_i32();
369
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
370
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
371
#else
372
    cc_src1_32 = cpu_cc_src;
373
    cc_src2_32 = cpu_cc_src2;
374
#endif
375

    
376
    carry_32 = tcg_temp_new_i32();
377
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
378

    
379
#if TARGET_LONG_BITS == 64
380
    tcg_temp_free_i32(cc_src1_32);
381
    tcg_temp_free_i32(cc_src2_32);
382
#endif
383

    
384
    return carry_32;
385
}
386

    
387
static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
388
                            TCGv src2, int update_cc)
389
{
390
    TCGv_i32 carry_32;
391
    TCGv carry;
392

    
393
    switch (dc->cc_op) {
394
    case CC_OP_DIV:
395
    case CC_OP_LOGIC:
396
        /* Carry is known to be zero.  Fall back to plain ADD.  */
397
        if (update_cc) {
398
            gen_op_add_cc(dst, src1, src2);
399
        } else {
400
            tcg_gen_add_tl(dst, src1, src2);
401
        }
402
        return;
403

    
404
    case CC_OP_ADD:
405
    case CC_OP_TADD:
406
    case CC_OP_TADDTV:
407
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
408
        {
409
            /* For 32-bit hosts, we can re-use the host's hardware carry
410
               generation by using an ADD2 opcode.  We discard the low
411
               part of the output.  Ideally we'd combine this operation
412
               with the add that generated the carry in the first place.  */
413
            TCGv dst_low = tcg_temp_new();
414
            tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
415
                            cpu_cc_src, src1, cpu_cc_src2, src2);
416
            tcg_temp_free(dst_low);
417
            goto add_done;
418
        }
419
#endif
420
        carry_32 = gen_add32_carry32();
421
        break;
422

    
423
    case CC_OP_SUB:
424
    case CC_OP_TSUB:
425
    case CC_OP_TSUBTV:
426
        carry_32 = gen_sub32_carry32();
427
        break;
428

    
429
    default:
430
        /* We need external help to produce the carry.  */
431
        carry_32 = tcg_temp_new_i32();
432
        gen_helper_compute_C_icc(carry_32);
433
        break;
434
    }
435

    
436
#if TARGET_LONG_BITS == 64
437
    carry = tcg_temp_new();
438
    tcg_gen_extu_i32_i64(carry, carry_32);
439
#else
440
    carry = carry_32;
441
#endif
442

    
443
    tcg_gen_add_tl(dst, src1, src2);
444
    tcg_gen_add_tl(dst, dst, carry);
445

    
446
    tcg_temp_free_i32(carry_32);
447
#if TARGET_LONG_BITS == 64
448
    tcg_temp_free(carry);
449
#endif
450

    
451
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
452
 add_done:
453
#endif
454
    if (update_cc) {
455
        tcg_gen_mov_tl(cpu_cc_src, src1);
456
        tcg_gen_mov_tl(cpu_cc_src2, src2);
457
        tcg_gen_mov_tl(cpu_cc_dst, dst);
458
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
459
        dc->cc_op = CC_OP_ADDX;
460
    }
461
}
462

    
463
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
464
{
465
    tcg_gen_mov_tl(cpu_cc_src, src1);
466
    tcg_gen_mov_tl(cpu_cc_src2, src2);
467
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
468
    tcg_gen_mov_tl(dst, cpu_cc_dst);
469
}
470

    
471
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
472
{
473
    tcg_gen_mov_tl(cpu_cc_src, src1);
474
    tcg_gen_mov_tl(cpu_cc_src2, src2);
475
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
476
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
478
    tcg_gen_mov_tl(dst, cpu_cc_dst);
479
}
480

    
481
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
482
{
483
    TCGv r_temp;
484
    TCGv_i32 r_const;
485
    int l1;
486

    
487
    l1 = gen_new_label();
488

    
489
    r_temp = tcg_temp_new();
490
    tcg_gen_xor_tl(r_temp, src1, src2);
491
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
492
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
493
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
494
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
495
    r_const = tcg_const_i32(TT_TOVF);
496
    gen_helper_raise_exception(r_const);
497
    tcg_temp_free_i32(r_const);
498
    gen_set_label(l1);
499
    tcg_temp_free(r_temp);
500
}
501

    
502
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
503
{
504
    tcg_gen_mov_tl(cpu_cc_src, src1);
505
    tcg_gen_movi_tl(cpu_cc_src2, src2);
506
    if (src2 == 0) {
507
        tcg_gen_mov_tl(cpu_cc_dst, src1);
508
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
509
        dc->cc_op = CC_OP_LOGIC;
510
    } else {
511
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
512
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
513
        dc->cc_op = CC_OP_SUB;
514
    }
515
    tcg_gen_mov_tl(dst, cpu_cc_dst);
516
}
517

    
518
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
519
{
520
    tcg_gen_mov_tl(cpu_cc_src, src1);
521
    tcg_gen_mov_tl(cpu_cc_src2, src2);
522
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
523
    tcg_gen_mov_tl(dst, cpu_cc_dst);
524
}
525

    
526
static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
527
                            TCGv src2, int update_cc)
528
{
529
    TCGv_i32 carry_32;
530
    TCGv carry;
531

    
532
    switch (dc->cc_op) {
533
    case CC_OP_DIV:
534
    case CC_OP_LOGIC:
535
        /* Carry is known to be zero.  Fall back to plain SUB.  */
536
        if (update_cc) {
537
            gen_op_sub_cc(dst, src1, src2);
538
        } else {
539
            tcg_gen_sub_tl(dst, src1, src2);
540
        }
541
        return;
542

    
543
    case CC_OP_ADD:
544
    case CC_OP_TADD:
545
    case CC_OP_TADDTV:
546
        carry_32 = gen_add32_carry32();
547
        break;
548

    
549
    case CC_OP_SUB:
550
    case CC_OP_TSUB:
551
    case CC_OP_TSUBTV:
552
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
553
        {
554
            /* For 32-bit hosts, we can re-use the host's hardware carry
555
               generation by using a SUB2 opcode.  We discard the low
556
               part of the output.  Ideally we'd combine this operation
557
               with the add that generated the carry in the first place.  */
558
            TCGv dst_low = tcg_temp_new();
559
            tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
560
                            cpu_cc_src, src1, cpu_cc_src2, src2);
561
            tcg_temp_free(dst_low);
562
            goto sub_done;
563
        }
564
#endif
565
        carry_32 = gen_sub32_carry32();
566
        break;
567

    
568
    default:
569
        /* We need external help to produce the carry.  */
570
        carry_32 = tcg_temp_new_i32();
571
        gen_helper_compute_C_icc(carry_32);
572
        break;
573
    }
574

    
575
#if TARGET_LONG_BITS == 64
576
    carry = tcg_temp_new();
577
    tcg_gen_extu_i32_i64(carry, carry_32);
578
#else
579
    carry = carry_32;
580
#endif
581

    
582
    tcg_gen_sub_tl(dst, src1, src2);
583
    tcg_gen_sub_tl(dst, dst, carry);
584

    
585
    tcg_temp_free_i32(carry_32);
586
#if TARGET_LONG_BITS == 64
587
    tcg_temp_free(carry);
588
#endif
589

    
590
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
591
 sub_done:
592
#endif
593
    if (update_cc) {
594
        tcg_gen_mov_tl(cpu_cc_src, src1);
595
        tcg_gen_mov_tl(cpu_cc_src2, src2);
596
        tcg_gen_mov_tl(cpu_cc_dst, dst);
597
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
598
        dc->cc_op = CC_OP_SUBX;
599
    }
600
}
601

    
602
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
603
{
604
    tcg_gen_mov_tl(cpu_cc_src, src1);
605
    tcg_gen_mov_tl(cpu_cc_src2, src2);
606
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
607
    tcg_gen_mov_tl(dst, cpu_cc_dst);
608
}
609

    
610
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
611
{
612
    tcg_gen_mov_tl(cpu_cc_src, src1);
613
    tcg_gen_mov_tl(cpu_cc_src2, src2);
614
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
615
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
617
    tcg_gen_mov_tl(dst, cpu_cc_dst);
618
}
619

    
620
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
621
{
622
    TCGv r_temp;
623
    int l1;
624

    
625
    l1 = gen_new_label();
626
    r_temp = tcg_temp_new();
627

    
628
    /* old op:
629
    if (!(env->y & 1))
630
        T1 = 0;
631
    */
632
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
633
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
634
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
635
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
636
    tcg_gen_movi_tl(cpu_cc_src2, 0);
637
    gen_set_label(l1);
638

    
639
    // b2 = T0 & 1;
640
    // env->y = (b2 << 31) | (env->y >> 1);
641
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
642
    tcg_gen_shli_tl(r_temp, r_temp, 31);
643
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
644
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
645
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
646
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
647

    
648
    // b1 = N ^ V;
649
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
650
    gen_mov_reg_V(r_temp, cpu_psr);
651
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
652
    tcg_temp_free(r_temp);
653

    
654
    // T0 = (b1 << 31) | (T0 >> 1);
655
    // src1 = T0;
656
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
657
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
658
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
659

    
660
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
661

    
662
    tcg_gen_mov_tl(dst, cpu_cc_dst);
663
}
664

    
665
static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
666
{
667
    TCGv_i32 r_src1, r_src2;
668
    TCGv_i64 r_temp, r_temp2;
669

    
670
    r_src1 = tcg_temp_new_i32();
671
    r_src2 = tcg_temp_new_i32();
672

    
673
    tcg_gen_trunc_tl_i32(r_src1, src1);
674
    tcg_gen_trunc_tl_i32(r_src2, src2);
675

    
676
    r_temp = tcg_temp_new_i64();
677
    r_temp2 = tcg_temp_new_i64();
678

    
679
    if (sign_ext) {
680
        tcg_gen_ext_i32_i64(r_temp, r_src2);
681
        tcg_gen_ext_i32_i64(r_temp2, r_src1);
682
    } else {
683
        tcg_gen_extu_i32_i64(r_temp, r_src2);
684
        tcg_gen_extu_i32_i64(r_temp2, r_src1);
685
    }
686

    
687
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
688

    
689
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
690
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
691
    tcg_temp_free_i64(r_temp);
692
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
693

    
694
    tcg_gen_trunc_i64_tl(dst, r_temp2);
695

    
696
    tcg_temp_free_i64(r_temp2);
697

    
698
    tcg_temp_free_i32(r_src1);
699
    tcg_temp_free_i32(r_src2);
700
}
701

    
702
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
703
{
704
    /* zero-extend truncated operands before multiplication */
705
    gen_op_multiply(dst, src1, src2, 0);
706
}
707

    
708
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
709
{
710
    /* sign-extend truncated operands before multiplication */
711
    gen_op_multiply(dst, src1, src2, 1);
712
}
713

    
714
#ifdef TARGET_SPARC64
715
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
716
{
717
    TCGv_i32 r_const;
718
    int l1;
719

    
720
    l1 = gen_new_label();
721
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
722
    r_const = tcg_const_i32(TT_DIV_ZERO);
723
    gen_helper_raise_exception(r_const);
724
    tcg_temp_free_i32(r_const);
725
    gen_set_label(l1);
726
}
727

    
728
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
729
{
730
    int l1, l2;
731

    
732
    l1 = gen_new_label();
733
    l2 = gen_new_label();
734
    tcg_gen_mov_tl(cpu_cc_src, src1);
735
    tcg_gen_mov_tl(cpu_cc_src2, src2);
736
    gen_trap_ifdivzero_tl(cpu_cc_src2);
737
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
738
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
739
    tcg_gen_movi_i64(dst, INT64_MIN);
740
    tcg_gen_br(l2);
741
    gen_set_label(l1);
742
    tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
743
    gen_set_label(l2);
744
}
745
#endif
746

    
747
// 1
748
static inline void gen_op_eval_ba(TCGv dst)
749
{
750
    tcg_gen_movi_tl(dst, 1);
751
}
752

    
753
// Z
754
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
755
{
756
    gen_mov_reg_Z(dst, src);
757
}
758

    
759
// Z | (N ^ V)
760
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
761
{
762
    gen_mov_reg_N(cpu_tmp0, src);
763
    gen_mov_reg_V(dst, src);
764
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
765
    gen_mov_reg_Z(cpu_tmp0, src);
766
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
767
}
768

    
769
// N ^ V
770
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
771
{
772
    gen_mov_reg_V(cpu_tmp0, src);
773
    gen_mov_reg_N(dst, src);
774
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
775
}
776

    
777
// C | Z
778
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
779
{
780
    gen_mov_reg_Z(cpu_tmp0, src);
781
    gen_mov_reg_C(dst, src);
782
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
783
}
784

    
785
// C
786
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
787
{
788
    gen_mov_reg_C(dst, src);
789
}
790

    
791
// V
792
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
793
{
794
    gen_mov_reg_V(dst, src);
795
}
796

    
797
// 0
798
static inline void gen_op_eval_bn(TCGv dst)
799
{
800
    tcg_gen_movi_tl(dst, 0);
801
}
802

    
803
// N
804
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
805
{
806
    gen_mov_reg_N(dst, src);
807
}
808

    
809
// !Z
810
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
811
{
812
    gen_mov_reg_Z(dst, src);
813
    tcg_gen_xori_tl(dst, dst, 0x1);
814
}
815

    
816
// !(Z | (N ^ V))
817
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
818
{
819
    gen_mov_reg_N(cpu_tmp0, src);
820
    gen_mov_reg_V(dst, src);
821
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
822
    gen_mov_reg_Z(cpu_tmp0, src);
823
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
824
    tcg_gen_xori_tl(dst, dst, 0x1);
825
}
826

    
827
// !(N ^ V)
828
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
829
{
830
    gen_mov_reg_V(cpu_tmp0, src);
831
    gen_mov_reg_N(dst, src);
832
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
833
    tcg_gen_xori_tl(dst, dst, 0x1);
834
}
835

    
836
// !(C | Z)
837
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
838
{
839
    gen_mov_reg_Z(cpu_tmp0, src);
840
    gen_mov_reg_C(dst, src);
841
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
842
    tcg_gen_xori_tl(dst, dst, 0x1);
843
}
844

    
845
// !C
846
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
847
{
848
    gen_mov_reg_C(dst, src);
849
    tcg_gen_xori_tl(dst, dst, 0x1);
850
}
851

    
852
// !N
853
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
854
{
855
    gen_mov_reg_N(dst, src);
856
    tcg_gen_xori_tl(dst, dst, 0x1);
857
}
858

    
859
// !V
860
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
861
{
862
    gen_mov_reg_V(dst, src);
863
    tcg_gen_xori_tl(dst, dst, 0x1);
864
}
865

    
866
/*
867
  FPSR bit field FCC1 | FCC0:
868
   0 =
869
   1 <
870
   2 >
871
   3 unordered
872
*/
873
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
874
                                    unsigned int fcc_offset)
875
{
876
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
877
    tcg_gen_andi_tl(reg, reg, 0x1);
878
}
879

    
880
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
881
                                    unsigned int fcc_offset)
882
{
883
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
884
    tcg_gen_andi_tl(reg, reg, 0x1);
885
}
886

    
887
// !0: FCC0 | FCC1
888
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
889
                                    unsigned int fcc_offset)
890
{
891
    gen_mov_reg_FCC0(dst, src, fcc_offset);
892
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
893
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
894
}
895

    
896
// 1 or 2: FCC0 ^ FCC1
897
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
898
                                    unsigned int fcc_offset)
899
{
900
    gen_mov_reg_FCC0(dst, src, fcc_offset);
901
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
902
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
903
}
904

    
905
// 1 or 3: FCC0
906
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
907
                                    unsigned int fcc_offset)
908
{
909
    gen_mov_reg_FCC0(dst, src, fcc_offset);
910
}
911

    
912
// 1: FCC0 & !FCC1
913
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
914
                                    unsigned int fcc_offset)
915
{
916
    gen_mov_reg_FCC0(dst, src, fcc_offset);
917
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
918
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
919
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
920
}
921

    
922
// 2 or 3: FCC1
923
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
924
                                    unsigned int fcc_offset)
925
{
926
    gen_mov_reg_FCC1(dst, src, fcc_offset);
927
}
928

    
929
// 2: !FCC0 & FCC1
930
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
931
                                    unsigned int fcc_offset)
932
{
933
    gen_mov_reg_FCC0(dst, src, fcc_offset);
934
    tcg_gen_xori_tl(dst, dst, 0x1);
935
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
936
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
937
}
938

    
939
// 3: FCC0 & FCC1
940
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
941
                                    unsigned int fcc_offset)
942
{
943
    gen_mov_reg_FCC0(dst, src, fcc_offset);
944
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
945
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
946
}
947

    
948
// 0: !(FCC0 | FCC1)
949
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
950
                                    unsigned int fcc_offset)
951
{
952
    gen_mov_reg_FCC0(dst, src, fcc_offset);
953
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
954
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
955
    tcg_gen_xori_tl(dst, dst, 0x1);
956
}
957

    
958
// 0 or 3: !(FCC0 ^ FCC1)
959
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
960
                                    unsigned int fcc_offset)
961
{
962
    gen_mov_reg_FCC0(dst, src, fcc_offset);
963
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
964
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
965
    tcg_gen_xori_tl(dst, dst, 0x1);
966
}
967

    
968
// 0 or 2: !FCC0
969
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
970
                                    unsigned int fcc_offset)
971
{
972
    gen_mov_reg_FCC0(dst, src, fcc_offset);
973
    tcg_gen_xori_tl(dst, dst, 0x1);
974
}
975

    
976
// !1: !(FCC0 & !FCC1)
977
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
978
                                    unsigned int fcc_offset)
979
{
980
    gen_mov_reg_FCC0(dst, src, fcc_offset);
981
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
982
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
983
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
984
    tcg_gen_xori_tl(dst, dst, 0x1);
985
}
986

    
987
// 0 or 1: !FCC1
988
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
989
                                    unsigned int fcc_offset)
990
{
991
    gen_mov_reg_FCC1(dst, src, fcc_offset);
992
    tcg_gen_xori_tl(dst, dst, 0x1);
993
}
994

    
995
// !2: !(!FCC0 & FCC1)
996
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
997
                                    unsigned int fcc_offset)
998
{
999
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1000
    tcg_gen_xori_tl(dst, dst, 0x1);
1001
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1002
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1003
    tcg_gen_xori_tl(dst, dst, 0x1);
1004
}
1005

    
1006
// !3: !(FCC0 & FCC1)
1007
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1008
                                    unsigned int fcc_offset)
1009
{
1010
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1011
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1012
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1013
    tcg_gen_xori_tl(dst, dst, 0x1);
1014
}
1015

    
1016
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1017
                               target_ulong pc2, TCGv r_cond)
1018
{
1019
    int l1;
1020

    
1021
    l1 = gen_new_label();
1022

    
1023
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1024

    
1025
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
1026

    
1027
    gen_set_label(l1);
1028
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
1029
}
1030

    
1031
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1032
                                target_ulong pc2, TCGv r_cond)
1033
{
1034
    int l1;
1035

    
1036
    l1 = gen_new_label();
1037

    
1038
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1039

    
1040
    gen_goto_tb(dc, 0, pc2, pc1);
1041

    
1042
    gen_set_label(l1);
1043
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1044
}
1045

    
1046
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1047
                                      TCGv r_cond)
1048
{
1049
    int l1, l2;
1050

    
1051
    l1 = gen_new_label();
1052
    l2 = gen_new_label();
1053

    
1054
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1055

    
1056
    tcg_gen_movi_tl(cpu_npc, npc1);
1057
    tcg_gen_br(l2);
1058

    
1059
    gen_set_label(l1);
1060
    tcg_gen_movi_tl(cpu_npc, npc2);
1061
    gen_set_label(l2);
1062
}
1063

    
1064
/* call this function before using the condition register as it may
1065
   have been set for a jump */
1066
static inline void flush_cond(DisasContext *dc, TCGv cond)
1067
{
1068
    if (dc->npc == JUMP_PC) {
1069
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1070
        dc->npc = DYNAMIC_PC;
1071
    }
1072
}
1073

    
1074
static inline void save_npc(DisasContext *dc, TCGv cond)
1075
{
1076
    if (dc->npc == JUMP_PC) {
1077
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1078
        dc->npc = DYNAMIC_PC;
1079
    } else if (dc->npc != DYNAMIC_PC) {
1080
        tcg_gen_movi_tl(cpu_npc, dc->npc);
1081
    }
1082
}
1083

    
1084
static inline void save_state(DisasContext *dc, TCGv cond)
1085
{
1086
    tcg_gen_movi_tl(cpu_pc, dc->pc);
1087
    /* flush pending conditional evaluations before exposing cpu state */
1088
    if (dc->cc_op != CC_OP_FLAGS) {
1089
        dc->cc_op = CC_OP_FLAGS;
1090
        gen_helper_compute_psr();
1091
    }
1092
    save_npc(dc, cond);
1093
}
1094

    
1095
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1096
{
1097
    if (dc->npc == JUMP_PC) {
1098
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1099
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1100
        dc->pc = DYNAMIC_PC;
1101
    } else if (dc->npc == DYNAMIC_PC) {
1102
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1103
        dc->pc = DYNAMIC_PC;
1104
    } else {
1105
        dc->pc = dc->npc;
1106
    }
1107
}
1108

    
1109
static inline void gen_op_next_insn(void)
1110
{
1111
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1112
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1113
}
1114

    
1115
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1116
                            DisasContext *dc)
1117
{
1118
    TCGv_i32 r_src;
1119

    
1120
#ifdef TARGET_SPARC64
1121
    if (cc)
1122
        r_src = cpu_xcc;
1123
    else
1124
        r_src = cpu_psr;
1125
#else
1126
    r_src = cpu_psr;
1127
#endif
1128
    switch (dc->cc_op) {
1129
    case CC_OP_FLAGS:
1130
        break;
1131
    default:
1132
        gen_helper_compute_psr();
1133
        dc->cc_op = CC_OP_FLAGS;
1134
        break;
1135
    }
1136
    switch (cond) {
1137
    case 0x0:
1138
        gen_op_eval_bn(r_dst);
1139
        break;
1140
    case 0x1:
1141
        gen_op_eval_be(r_dst, r_src);
1142
        break;
1143
    case 0x2:
1144
        gen_op_eval_ble(r_dst, r_src);
1145
        break;
1146
    case 0x3:
1147
        gen_op_eval_bl(r_dst, r_src);
1148
        break;
1149
    case 0x4:
1150
        gen_op_eval_bleu(r_dst, r_src);
1151
        break;
1152
    case 0x5:
1153
        gen_op_eval_bcs(r_dst, r_src);
1154
        break;
1155
    case 0x6:
1156
        gen_op_eval_bneg(r_dst, r_src);
1157
        break;
1158
    case 0x7:
1159
        gen_op_eval_bvs(r_dst, r_src);
1160
        break;
1161
    case 0x8:
1162
        gen_op_eval_ba(r_dst);
1163
        break;
1164
    case 0x9:
1165
        gen_op_eval_bne(r_dst, r_src);
1166
        break;
1167
    case 0xa:
1168
        gen_op_eval_bg(r_dst, r_src);
1169
        break;
1170
    case 0xb:
1171
        gen_op_eval_bge(r_dst, r_src);
1172
        break;
1173
    case 0xc:
1174
        gen_op_eval_bgu(r_dst, r_src);
1175
        break;
1176
    case 0xd:
1177
        gen_op_eval_bcc(r_dst, r_src);
1178
        break;
1179
    case 0xe:
1180
        gen_op_eval_bpos(r_dst, r_src);
1181
        break;
1182
    case 0xf:
1183
        gen_op_eval_bvc(r_dst, r_src);
1184
        break;
1185
    }
1186
}
1187

    
1188
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1189
{
1190
    unsigned int offset;
1191

    
1192
    switch (cc) {
1193
    default:
1194
    case 0x0:
1195
        offset = 0;
1196
        break;
1197
    case 0x1:
1198
        offset = 32 - 10;
1199
        break;
1200
    case 0x2:
1201
        offset = 34 - 10;
1202
        break;
1203
    case 0x3:
1204
        offset = 36 - 10;
1205
        break;
1206
    }
1207

    
1208
    switch (cond) {
1209
    case 0x0:
1210
        gen_op_eval_bn(r_dst);
1211
        break;
1212
    case 0x1:
1213
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1214
        break;
1215
    case 0x2:
1216
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1217
        break;
1218
    case 0x3:
1219
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1220
        break;
1221
    case 0x4:
1222
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1223
        break;
1224
    case 0x5:
1225
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1226
        break;
1227
    case 0x6:
1228
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1229
        break;
1230
    case 0x7:
1231
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1232
        break;
1233
    case 0x8:
1234
        gen_op_eval_ba(r_dst);
1235
        break;
1236
    case 0x9:
1237
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1238
        break;
1239
    case 0xa:
1240
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1241
        break;
1242
    case 0xb:
1243
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1244
        break;
1245
    case 0xc:
1246
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1247
        break;
1248
    case 0xd:
1249
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1250
        break;
1251
    case 0xe:
1252
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1253
        break;
1254
    case 0xf:
1255
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1256
        break;
1257
    }
1258
}
1259

    
1260
#ifdef TARGET_SPARC64
1261
// Inverted logic
1262
static const int gen_tcg_cond_reg[8] = {
1263
    -1,
1264
    TCG_COND_NE,
1265
    TCG_COND_GT,
1266
    TCG_COND_GE,
1267
    -1,
1268
    TCG_COND_EQ,
1269
    TCG_COND_LE,
1270
    TCG_COND_LT,
1271
};
1272

    
1273
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1274
{
1275
    int l1;
1276

    
1277
    l1 = gen_new_label();
1278
    tcg_gen_movi_tl(r_dst, 0);
1279
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1280
    tcg_gen_movi_tl(r_dst, 1);
1281
    gen_set_label(l1);
1282
}
1283
#endif
1284

    
1285
/* XXX: potentially incorrect if dynamic npc */
1286
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1287
                      TCGv r_cond)
1288
{
1289
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1290
    target_ulong target = dc->pc + offset;
1291

    
1292
    if (cond == 0x0) {
1293
        /* unconditional not taken */
1294
        if (a) {
1295
            dc->pc = dc->npc + 4;
1296
            dc->npc = dc->pc + 4;
1297
        } else {
1298
            dc->pc = dc->npc;
1299
            dc->npc = dc->pc + 4;
1300
        }
1301
    } else if (cond == 0x8) {
1302
        /* unconditional taken */
1303
        if (a) {
1304
            dc->pc = target;
1305
            dc->npc = dc->pc + 4;
1306
        } else {
1307
            dc->pc = dc->npc;
1308
            dc->npc = target;
1309
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1310
        }
1311
    } else {
1312
        flush_cond(dc, r_cond);
1313
        gen_cond(r_cond, cc, cond, dc);
1314
        if (a) {
1315
            gen_branch_a(dc, target, dc->npc, r_cond);
1316
            dc->is_br = 1;
1317
        } else {
1318
            dc->pc = dc->npc;
1319
            dc->jump_pc[0] = target;
1320
            dc->jump_pc[1] = dc->npc + 4;
1321
            dc->npc = JUMP_PC;
1322
        }
1323
    }
1324
}
1325

    
1326
/* XXX: potentially incorrect if dynamic npc */
1327
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1328
                      TCGv r_cond)
1329
{
1330
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1331
    target_ulong target = dc->pc + offset;
1332

    
1333
    if (cond == 0x0) {
1334
        /* unconditional not taken */
1335
        if (a) {
1336
            dc->pc = dc->npc + 4;
1337
            dc->npc = dc->pc + 4;
1338
        } else {
1339
            dc->pc = dc->npc;
1340
            dc->npc = dc->pc + 4;
1341
        }
1342
    } else if (cond == 0x8) {
1343
        /* unconditional taken */
1344
        if (a) {
1345
            dc->pc = target;
1346
            dc->npc = dc->pc + 4;
1347
        } else {
1348
            dc->pc = dc->npc;
1349
            dc->npc = target;
1350
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1351
        }
1352
    } else {
1353
        flush_cond(dc, r_cond);
1354
        gen_fcond(r_cond, cc, cond);
1355
        if (a) {
1356
            gen_branch_a(dc, target, dc->npc, r_cond);
1357
            dc->is_br = 1;
1358
        } else {
1359
            dc->pc = dc->npc;
1360
            dc->jump_pc[0] = target;
1361
            dc->jump_pc[1] = dc->npc + 4;
1362
            dc->npc = JUMP_PC;
1363
        }
1364
    }
1365
}
1366

    
1367
#ifdef TARGET_SPARC64
1368
/* XXX: potentially incorrect if dynamic npc */
1369
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1370
                          TCGv r_cond, TCGv r_reg)
1371
{
1372
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1373
    target_ulong target = dc->pc + offset;
1374

    
1375
    flush_cond(dc, r_cond);
1376
    gen_cond_reg(r_cond, cond, r_reg);
1377
    if (a) {
1378
        gen_branch_a(dc, target, dc->npc, r_cond);
1379
        dc->is_br = 1;
1380
    } else {
1381
        dc->pc = dc->npc;
1382
        dc->jump_pc[0] = target;
1383
        dc->jump_pc[1] = dc->npc + 4;
1384
        dc->npc = JUMP_PC;
1385
    }
1386
}
1387

    
1388
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1389
{
1390
    switch (fccno) {
1391
    case 0:
1392
        gen_helper_fcmps(r_rs1, r_rs2);
1393
        break;
1394
    case 1:
1395
        gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1396
        break;
1397
    case 2:
1398
        gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1399
        break;
1400
    case 3:
1401
        gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1402
        break;
1403
    }
1404
}
1405

    
1406
static inline void gen_op_fcmpd(int fccno)
1407
{
1408
    switch (fccno) {
1409
    case 0:
1410
        gen_helper_fcmpd();
1411
        break;
1412
    case 1:
1413
        gen_helper_fcmpd_fcc1();
1414
        break;
1415
    case 2:
1416
        gen_helper_fcmpd_fcc2();
1417
        break;
1418
    case 3:
1419
        gen_helper_fcmpd_fcc3();
1420
        break;
1421
    }
1422
}
1423

    
1424
static inline void gen_op_fcmpq(int fccno)
1425
{
1426
    switch (fccno) {
1427
    case 0:
1428
        gen_helper_fcmpq();
1429
        break;
1430
    case 1:
1431
        gen_helper_fcmpq_fcc1();
1432
        break;
1433
    case 2:
1434
        gen_helper_fcmpq_fcc2();
1435
        break;
1436
    case 3:
1437
        gen_helper_fcmpq_fcc3();
1438
        break;
1439
    }
1440
}
1441

    
1442
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1443
{
1444
    switch (fccno) {
1445
    case 0:
1446
        gen_helper_fcmpes(r_rs1, r_rs2);
1447
        break;
1448
    case 1:
1449
        gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1450
        break;
1451
    case 2:
1452
        gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1453
        break;
1454
    case 3:
1455
        gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1456
        break;
1457
    }
1458
}
1459

    
1460
static inline void gen_op_fcmped(int fccno)
1461
{
1462
    switch (fccno) {
1463
    case 0:
1464
        gen_helper_fcmped();
1465
        break;
1466
    case 1:
1467
        gen_helper_fcmped_fcc1();
1468
        break;
1469
    case 2:
1470
        gen_helper_fcmped_fcc2();
1471
        break;
1472
    case 3:
1473
        gen_helper_fcmped_fcc3();
1474
        break;
1475
    }
1476
}
1477

    
1478
static inline void gen_op_fcmpeq(int fccno)
1479
{
1480
    switch (fccno) {
1481
    case 0:
1482
        gen_helper_fcmpeq();
1483
        break;
1484
    case 1:
1485
        gen_helper_fcmpeq_fcc1();
1486
        break;
1487
    case 2:
1488
        gen_helper_fcmpeq_fcc2();
1489
        break;
1490
    case 3:
1491
        gen_helper_fcmpeq_fcc3();
1492
        break;
1493
    }
1494
}
1495

    
1496
#else
1497

    
1498
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1499
{
1500
    gen_helper_fcmps(r_rs1, r_rs2);
1501
}
1502

    
1503
static inline void gen_op_fcmpd(int fccno)
1504
{
1505
    gen_helper_fcmpd();
1506
}
1507

    
1508
static inline void gen_op_fcmpq(int fccno)
1509
{
1510
    gen_helper_fcmpq();
1511
}
1512

    
1513
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1514
{
1515
    gen_helper_fcmpes(r_rs1, r_rs2);
1516
}
1517

    
1518
static inline void gen_op_fcmped(int fccno)
1519
{
1520
    gen_helper_fcmped();
1521
}
1522

    
1523
static inline void gen_op_fcmpeq(int fccno)
1524
{
1525
    gen_helper_fcmpeq();
1526
}
1527
#endif
1528

    
1529
static inline void gen_op_fpexception_im(int fsr_flags)
1530
{
1531
    TCGv_i32 r_const;
1532

    
1533
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1534
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1535
    r_const = tcg_const_i32(TT_FP_EXCP);
1536
    gen_helper_raise_exception(r_const);
1537
    tcg_temp_free_i32(r_const);
1538
}
1539

    
1540
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1541
{
1542
#if !defined(CONFIG_USER_ONLY)
1543
    if (!dc->fpu_enabled) {
1544
        TCGv_i32 r_const;
1545

    
1546
        save_state(dc, r_cond);
1547
        r_const = tcg_const_i32(TT_NFPU_INSN);
1548
        gen_helper_raise_exception(r_const);
1549
        tcg_temp_free_i32(r_const);
1550
        dc->is_br = 1;
1551
        return 1;
1552
    }
1553
#endif
1554
    return 0;
1555
}
1556

    
1557
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1558
{
1559
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1560
}
1561

    
1562
static inline void gen_clear_float_exceptions(void)
1563
{
1564
    gen_helper_clear_float_exceptions();
1565
}
1566

    
1567
/* asi moves */
1568
#ifdef TARGET_SPARC64
1569
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1570
{
1571
    int asi;
1572
    TCGv_i32 r_asi;
1573

    
1574
    if (IS_IMM) {
1575
        r_asi = tcg_temp_new_i32();
1576
        tcg_gen_mov_i32(r_asi, cpu_asi);
1577
    } else {
1578
        asi = GET_FIELD(insn, 19, 26);
1579
        r_asi = tcg_const_i32(asi);
1580
    }
1581
    return r_asi;
1582
}
1583

    
1584
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1585
                              int sign)
1586
{
1587
    TCGv_i32 r_asi, r_size, r_sign;
1588

    
1589
    r_asi = gen_get_asi(insn, addr);
1590
    r_size = tcg_const_i32(size);
1591
    r_sign = tcg_const_i32(sign);
1592
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1593
    tcg_temp_free_i32(r_sign);
1594
    tcg_temp_free_i32(r_size);
1595
    tcg_temp_free_i32(r_asi);
1596
}
1597

    
1598
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1599
{
1600
    TCGv_i32 r_asi, r_size;
1601

    
1602
    r_asi = gen_get_asi(insn, addr);
1603
    r_size = tcg_const_i32(size);
1604
    gen_helper_st_asi(addr, src, r_asi, r_size);
1605
    tcg_temp_free_i32(r_size);
1606
    tcg_temp_free_i32(r_asi);
1607
}
1608

    
1609
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1610
{
1611
    TCGv_i32 r_asi, r_size, r_rd;
1612

    
1613
    r_asi = gen_get_asi(insn, addr);
1614
    r_size = tcg_const_i32(size);
1615
    r_rd = tcg_const_i32(rd);
1616
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1617
    tcg_temp_free_i32(r_rd);
1618
    tcg_temp_free_i32(r_size);
1619
    tcg_temp_free_i32(r_asi);
1620
}
1621

    
1622
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1623
{
1624
    TCGv_i32 r_asi, r_size, r_rd;
1625

    
1626
    r_asi = gen_get_asi(insn, addr);
1627
    r_size = tcg_const_i32(size);
1628
    r_rd = tcg_const_i32(rd);
1629
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1630
    tcg_temp_free_i32(r_rd);
1631
    tcg_temp_free_i32(r_size);
1632
    tcg_temp_free_i32(r_asi);
1633
}
1634

    
1635
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1636
{
1637
    TCGv_i32 r_asi, r_size, r_sign;
1638

    
1639
    r_asi = gen_get_asi(insn, addr);
1640
    r_size = tcg_const_i32(4);
1641
    r_sign = tcg_const_i32(0);
1642
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1643
    tcg_temp_free_i32(r_sign);
1644
    gen_helper_st_asi(addr, dst, r_asi, r_size);
1645
    tcg_temp_free_i32(r_size);
1646
    tcg_temp_free_i32(r_asi);
1647
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1648
}
1649

    
1650
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1651
{
1652
    TCGv_i32 r_asi, r_rd;
1653

    
1654
    r_asi = gen_get_asi(insn, addr);
1655
    r_rd = tcg_const_i32(rd);
1656
    gen_helper_ldda_asi(addr, r_asi, r_rd);
1657
    tcg_temp_free_i32(r_rd);
1658
    tcg_temp_free_i32(r_asi);
1659
}
1660

    
1661
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1662
{
1663
    TCGv_i32 r_asi, r_size;
1664

    
1665
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1666
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1667
    r_asi = gen_get_asi(insn, addr);
1668
    r_size = tcg_const_i32(8);
1669
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1670
    tcg_temp_free_i32(r_size);
1671
    tcg_temp_free_i32(r_asi);
1672
}
1673

    
1674
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1675
                               int rd)
1676
{
1677
    TCGv r_val1;
1678
    TCGv_i32 r_asi;
1679

    
1680
    r_val1 = tcg_temp_new();
1681
    gen_movl_reg_TN(rd, r_val1);
1682
    r_asi = gen_get_asi(insn, addr);
1683
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1684
    tcg_temp_free_i32(r_asi);
1685
    tcg_temp_free(r_val1);
1686
}
1687

    
1688
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1689
                                int rd)
1690
{
1691
    TCGv_i32 r_asi;
1692

    
1693
    gen_movl_reg_TN(rd, cpu_tmp64);
1694
    r_asi = gen_get_asi(insn, addr);
1695
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1696
    tcg_temp_free_i32(r_asi);
1697
}
1698

    
1699
#elif !defined(CONFIG_USER_ONLY)
1700

    
1701
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1702
                              int sign)
1703
{
1704
    TCGv_i32 r_asi, r_size, r_sign;
1705

    
1706
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1707
    r_size = tcg_const_i32(size);
1708
    r_sign = tcg_const_i32(sign);
1709
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1710
    tcg_temp_free(r_sign);
1711
    tcg_temp_free(r_size);
1712
    tcg_temp_free(r_asi);
1713
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1714
}
1715

    
1716
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1717
{
1718
    TCGv_i32 r_asi, r_size;
1719

    
1720
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
1721
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1722
    r_size = tcg_const_i32(size);
1723
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1724
    tcg_temp_free(r_size);
1725
    tcg_temp_free(r_asi);
1726
}
1727

    
1728
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1729
{
1730
    TCGv_i32 r_asi, r_size, r_sign;
1731
    TCGv_i64 r_val;
1732

    
1733
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1734
    r_size = tcg_const_i32(4);
1735
    r_sign = tcg_const_i32(0);
1736
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1737
    tcg_temp_free(r_sign);
1738
    r_val = tcg_temp_new_i64();
1739
    tcg_gen_extu_tl_i64(r_val, dst);
1740
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1741
    tcg_temp_free_i64(r_val);
1742
    tcg_temp_free(r_size);
1743
    tcg_temp_free(r_asi);
1744
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1745
}
1746

    
1747
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1748
{
1749
    TCGv_i32 r_asi, r_size, r_sign;
1750

    
1751
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1752
    r_size = tcg_const_i32(8);
1753
    r_sign = tcg_const_i32(0);
1754
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1755
    tcg_temp_free(r_sign);
1756
    tcg_temp_free(r_size);
1757
    tcg_temp_free(r_asi);
1758
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1759
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
1760
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1761
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1762
    gen_movl_TN_reg(rd, hi);
1763
}
1764

    
1765
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1766
{
1767
    TCGv_i32 r_asi, r_size;
1768

    
1769
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1770
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1771
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1772
    r_size = tcg_const_i32(8);
1773
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1774
    tcg_temp_free(r_size);
1775
    tcg_temp_free(r_asi);
1776
}
1777
#endif
1778

    
1779
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1780
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1781
{
1782
    TCGv_i64 r_val;
1783
    TCGv_i32 r_asi, r_size;
1784

    
1785
    gen_ld_asi(dst, addr, insn, 1, 0);
1786

    
1787
    r_val = tcg_const_i64(0xffULL);
1788
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1789
    r_size = tcg_const_i32(1);
1790
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1791
    tcg_temp_free_i32(r_size);
1792
    tcg_temp_free_i32(r_asi);
1793
    tcg_temp_free_i64(r_val);
1794
}
1795
#endif
1796

    
1797
static inline TCGv get_src1(unsigned int insn, TCGv def)
1798
{
1799
    TCGv r_rs1 = def;
1800
    unsigned int rs1;
1801

    
1802
    rs1 = GET_FIELD(insn, 13, 17);
1803
    if (rs1 == 0) {
1804
        tcg_gen_movi_tl(def, 0);
1805
    } else if (rs1 < 8) {
1806
        r_rs1 = cpu_gregs[rs1];
1807
    } else {
1808
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1809
    }
1810
    return r_rs1;
1811
}
1812

    
1813
static inline TCGv get_src2(unsigned int insn, TCGv def)
1814
{
1815
    TCGv r_rs2 = def;
1816

    
1817
    if (IS_IMM) { /* immediate */
1818
        target_long simm = GET_FIELDs(insn, 19, 31);
1819
        tcg_gen_movi_tl(def, simm);
1820
    } else { /* register */
1821
        unsigned int rs2 = GET_FIELD(insn, 27, 31);
1822
        if (rs2 == 0) {
1823
            tcg_gen_movi_tl(def, 0);
1824
        } else if (rs2 < 8) {
1825
            r_rs2 = cpu_gregs[rs2];
1826
        } else {
1827
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1828
        }
1829
    }
1830
    return r_rs2;
1831
}
1832

    
1833
#ifdef TARGET_SPARC64
1834
static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1835
{
1836
    TCGv_i32 r_tl = tcg_temp_new_i32();
1837

    
1838
    /* load env->tl into r_tl */
1839
    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1840

    
1841
    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1842
    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1843

    
1844
    /* calculate offset to current trap state from env->ts, reuse r_tl */
1845
    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1846
    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1847

    
1848
    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1849
    {
1850
        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1851
        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1852
        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1853
        tcg_temp_free_ptr(r_tl_tmp);
1854
    }
1855

    
1856
    tcg_temp_free_i32(r_tl);
1857
}
1858
#endif
1859

    
1860
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
1861
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1862
        goto illegal_insn;
1863
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1864
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1865
        goto nfpu_insn;
1866

    
1867
/* before an instruction, dc->pc must be static */
1868
static void disas_sparc_insn(DisasContext * dc)
1869
{
1870
    unsigned int insn, opc, rs1, rs2, rd;
1871
    TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1872
    target_long simm;
1873

    
1874
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1875
        tcg_gen_debug_insn_start(dc->pc);
1876
    insn = ldl_code(dc->pc);
1877
    opc = GET_FIELD(insn, 0, 1);
1878

    
1879
    rd = GET_FIELD(insn, 2, 6);
1880

    
1881
    cpu_tmp1 = cpu_src1 = tcg_temp_new();
1882
    cpu_tmp2 = cpu_src2 = tcg_temp_new();
1883

    
1884
    switch (opc) {
1885
    case 0:                     /* branches/sethi */
1886
        {
1887
            unsigned int xop = GET_FIELD(insn, 7, 9);
1888
            int32_t target;
1889
            switch (xop) {
1890
#ifdef TARGET_SPARC64
1891
            case 0x1:           /* V9 BPcc */
1892
                {
1893
                    int cc;
1894

    
1895
                    target = GET_FIELD_SP(insn, 0, 18);
1896
                    target = sign_extend(target, 19);
1897
                    target <<= 2;
1898
                    cc = GET_FIELD_SP(insn, 20, 21);
1899
                    if (cc == 0)
1900
                        do_branch(dc, target, insn, 0, cpu_cond);
1901
                    else if (cc == 2)
1902
                        do_branch(dc, target, insn, 1, cpu_cond);
1903
                    else
1904
                        goto illegal_insn;
1905
                    goto jmp_insn;
1906
                }
1907
            case 0x3:           /* V9 BPr */
1908
                {
1909
                    target = GET_FIELD_SP(insn, 0, 13) |
1910
                        (GET_FIELD_SP(insn, 20, 21) << 14);
1911
                    target = sign_extend(target, 16);
1912
                    target <<= 2;
1913
                    cpu_src1 = get_src1(insn, cpu_src1);
1914
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1915
                    goto jmp_insn;
1916
                }
1917
            case 0x5:           /* V9 FBPcc */
1918
                {
1919
                    int cc = GET_FIELD_SP(insn, 20, 21);
1920
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1921
                        goto jmp_insn;
1922
                    target = GET_FIELD_SP(insn, 0, 18);
1923
                    target = sign_extend(target, 19);
1924
                    target <<= 2;
1925
                    do_fbranch(dc, target, insn, cc, cpu_cond);
1926
                    goto jmp_insn;
1927
                }
1928
#else
1929
            case 0x7:           /* CBN+x */
1930
                {
1931
                    goto ncp_insn;
1932
                }
1933
#endif
1934
            case 0x2:           /* BN+x */
1935
                {
1936
                    target = GET_FIELD(insn, 10, 31);
1937
                    target = sign_extend(target, 22);
1938
                    target <<= 2;
1939
                    do_branch(dc, target, insn, 0, cpu_cond);
1940
                    goto jmp_insn;
1941
                }
1942
            case 0x6:           /* FBN+x */
1943
                {
1944
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1945
                        goto jmp_insn;
1946
                    target = GET_FIELD(insn, 10, 31);
1947
                    target = sign_extend(target, 22);
1948
                    target <<= 2;
1949
                    do_fbranch(dc, target, insn, 0, cpu_cond);
1950
                    goto jmp_insn;
1951
                }
1952
            case 0x4:           /* SETHI */
1953
                if (rd) { // nop
1954
                    uint32_t value = GET_FIELD(insn, 10, 31);
1955
                    TCGv r_const;
1956

    
1957
                    r_const = tcg_const_tl(value << 10);
1958
                    gen_movl_TN_reg(rd, r_const);
1959
                    tcg_temp_free(r_const);
1960
                }
1961
                break;
1962
            case 0x0:           /* UNIMPL */
1963
            default:
1964
                goto illegal_insn;
1965
            }
1966
            break;
1967
        }
1968
        break;
1969
    case 1:                     /*CALL*/
1970
        {
1971
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
1972
            TCGv r_const;
1973

    
1974
            r_const = tcg_const_tl(dc->pc);
1975
            gen_movl_TN_reg(15, r_const);
1976
            tcg_temp_free(r_const);
1977
            target += dc->pc;
1978
            gen_mov_pc_npc(dc, cpu_cond);
1979
            dc->npc = target;
1980
        }
1981
        goto jmp_insn;
1982
    case 2:                     /* FPU & Logical Operations */
1983
        {
1984
            unsigned int xop = GET_FIELD(insn, 7, 12);
1985
            if (xop == 0x3a) {  /* generate trap */
1986
                int cond;
1987

    
1988
                cpu_src1 = get_src1(insn, cpu_src1);
1989
                if (IS_IMM) {
1990
                    rs2 = GET_FIELD(insn, 25, 31);
1991
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1992
                } else {
1993
                    rs2 = GET_FIELD(insn, 27, 31);
1994
                    if (rs2 != 0) {
1995
                        gen_movl_reg_TN(rs2, cpu_src2);
1996
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1997
                    } else
1998
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
1999
                }
2000

    
2001
                cond = GET_FIELD(insn, 3, 6);
2002
                if (cond == 0x8) { /* Trap Always */
2003
                    save_state(dc, cpu_cond);
2004
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2005
                        supervisor(dc))
2006
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2007
                    else
2008
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2009
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2010
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2011

    
2012
                    if (rs2 == 0 &&
2013
                        dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2014

    
2015
                        gen_helper_shutdown();
2016

    
2017
                    } else {
2018
                        gen_helper_raise_exception(cpu_tmp32);
2019
                    }
2020
                } else if (cond != 0) {
2021
                    TCGv r_cond = tcg_temp_new();
2022
                    int l1;
2023
#ifdef TARGET_SPARC64
2024
                    /* V9 icc/xcc */
2025
                    int cc = GET_FIELD_SP(insn, 11, 12);
2026

    
2027
                    save_state(dc, cpu_cond);
2028
                    if (cc == 0)
2029
                        gen_cond(r_cond, 0, cond, dc);
2030
                    else if (cc == 2)
2031
                        gen_cond(r_cond, 1, cond, dc);
2032
                    else
2033
                        goto illegal_insn;
2034
#else
2035
                    save_state(dc, cpu_cond);
2036
                    gen_cond(r_cond, 0, cond, dc);
2037
#endif
2038
                    l1 = gen_new_label();
2039
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2040

    
2041
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2042
                        supervisor(dc))
2043
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2044
                    else
2045
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2046
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2047
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2048
                    gen_helper_raise_exception(cpu_tmp32);
2049

    
2050
                    gen_set_label(l1);
2051
                    tcg_temp_free(r_cond);
2052
                }
2053
                gen_op_next_insn();
2054
                tcg_gen_exit_tb(0);
2055
                dc->is_br = 1;
2056
                goto jmp_insn;
2057
            } else if (xop == 0x28) {
2058
                rs1 = GET_FIELD(insn, 13, 17);
2059
                switch(rs1) {
2060
                case 0: /* rdy */
2061
#ifndef TARGET_SPARC64
2062
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
2063
                                       manual, rdy on the microSPARC
2064
                                       II */
2065
                case 0x0f:          /* stbar in the SPARCv8 manual,
2066
                                       rdy on the microSPARC II */
2067
                case 0x10 ... 0x1f: /* implementation-dependent in the
2068
                                       SPARCv8 manual, rdy on the
2069
                                       microSPARC II */
2070
                    /* Read Asr17 */
2071
                    if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2072
                        TCGv r_const;
2073

    
2074
                        /* Read Asr17 for a Leon3 monoprocessor */
2075
                        r_const = tcg_const_tl((1 << 8)
2076
                                               | (dc->def->nwindows - 1));
2077
                        gen_movl_TN_reg(rd, r_const);
2078
                        tcg_temp_free(r_const);
2079
                        break;
2080
                    }
2081
#endif
2082
                    gen_movl_TN_reg(rd, cpu_y);
2083
                    break;
2084
#ifdef TARGET_SPARC64
2085
                case 0x2: /* V9 rdccr */
2086
                    gen_helper_compute_psr();
2087
                    gen_helper_rdccr(cpu_dst);
2088
                    gen_movl_TN_reg(rd, cpu_dst);
2089
                    break;
2090
                case 0x3: /* V9 rdasi */
2091
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2092
                    gen_movl_TN_reg(rd, cpu_dst);
2093
                    break;
2094
                case 0x4: /* V9 rdtick */
2095
                    {
2096
                        TCGv_ptr r_tickptr;
2097

    
2098
                        r_tickptr = tcg_temp_new_ptr();
2099
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2100
                                       offsetof(CPUState, tick));
2101
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2102
                        tcg_temp_free_ptr(r_tickptr);
2103
                        gen_movl_TN_reg(rd, cpu_dst);
2104
                    }
2105
                    break;
2106
                case 0x5: /* V9 rdpc */
2107
                    {
2108
                        TCGv r_const;
2109

    
2110
                        r_const = tcg_const_tl(dc->pc);
2111
                        gen_movl_TN_reg(rd, r_const);
2112
                        tcg_temp_free(r_const);
2113
                    }
2114
                    break;
2115
                case 0x6: /* V9 rdfprs */
2116
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2117
                    gen_movl_TN_reg(rd, cpu_dst);
2118
                    break;
2119
                case 0xf: /* V9 membar */
2120
                    break; /* no effect */
2121
                case 0x13: /* Graphics Status */
2122
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2123
                        goto jmp_insn;
2124
                    gen_movl_TN_reg(rd, cpu_gsr);
2125
                    break;
2126
                case 0x16: /* Softint */
2127
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2128
                    gen_movl_TN_reg(rd, cpu_dst);
2129
                    break;
2130
                case 0x17: /* Tick compare */
2131
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
2132
                    break;
2133
                case 0x18: /* System tick */
2134
                    {
2135
                        TCGv_ptr r_tickptr;
2136

    
2137
                        r_tickptr = tcg_temp_new_ptr();
2138
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2139
                                       offsetof(CPUState, stick));
2140
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2141
                        tcg_temp_free_ptr(r_tickptr);
2142
                        gen_movl_TN_reg(rd, cpu_dst);
2143
                    }
2144
                    break;
2145
                case 0x19: /* System tick compare */
2146
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
2147
                    break;
2148
                case 0x10: /* Performance Control */
2149
                case 0x11: /* Performance Instrumentation Counter */
2150
                case 0x12: /* Dispatch Control */
2151
                case 0x14: /* Softint set, WO */
2152
                case 0x15: /* Softint clear, WO */
2153
#endif
2154
                default:
2155
                    goto illegal_insn;
2156
                }
2157
#if !defined(CONFIG_USER_ONLY)
2158
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2159
#ifndef TARGET_SPARC64
2160
                if (!supervisor(dc))
2161
                    goto priv_insn;
2162
                gen_helper_compute_psr();
2163
                dc->cc_op = CC_OP_FLAGS;
2164
                gen_helper_rdpsr(cpu_dst);
2165
#else
2166
                CHECK_IU_FEATURE(dc, HYPV);
2167
                if (!hypervisor(dc))
2168
                    goto priv_insn;
2169
                rs1 = GET_FIELD(insn, 13, 17);
2170
                switch (rs1) {
2171
                case 0: // hpstate
2172
                    // gen_op_rdhpstate();
2173
                    break;
2174
                case 1: // htstate
2175
                    // gen_op_rdhtstate();
2176
                    break;
2177
                case 3: // hintp
2178
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2179
                    break;
2180
                case 5: // htba
2181
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
2182
                    break;
2183
                case 6: // hver
2184
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
2185
                    break;
2186
                case 31: // hstick_cmpr
2187
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2188
                    break;
2189
                default:
2190
                    goto illegal_insn;
2191
                }
2192
#endif
2193
                gen_movl_TN_reg(rd, cpu_dst);
2194
                break;
2195
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2196
                if (!supervisor(dc))
2197
                    goto priv_insn;
2198
#ifdef TARGET_SPARC64
2199
                rs1 = GET_FIELD(insn, 13, 17);
2200
                switch (rs1) {
2201
                case 0: // tpc
2202
                    {
2203
                        TCGv_ptr r_tsptr;
2204

    
2205
                        r_tsptr = tcg_temp_new_ptr();
2206
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2207
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2208
                                      offsetof(trap_state, tpc));
2209
                        tcg_temp_free_ptr(r_tsptr);
2210
                    }
2211
                    break;
2212
                case 1: // tnpc
2213
                    {
2214
                        TCGv_ptr r_tsptr;
2215

    
2216
                        r_tsptr = tcg_temp_new_ptr();
2217
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2218
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2219
                                      offsetof(trap_state, tnpc));
2220
                        tcg_temp_free_ptr(r_tsptr);
2221
                    }
2222
                    break;
2223
                case 2: // tstate
2224
                    {
2225
                        TCGv_ptr r_tsptr;
2226

    
2227
                        r_tsptr = tcg_temp_new_ptr();
2228
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2229
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2230
                                      offsetof(trap_state, tstate));
2231
                        tcg_temp_free_ptr(r_tsptr);
2232
                    }
2233
                    break;
2234
                case 3: // tt
2235
                    {
2236
                        TCGv_ptr r_tsptr;
2237

    
2238
                        r_tsptr = tcg_temp_new_ptr();
2239
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2240
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2241
                                       offsetof(trap_state, tt));
2242
                        tcg_temp_free_ptr(r_tsptr);
2243
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2244
                    }
2245
                    break;
2246
                case 4: // tick
2247
                    {
2248
                        TCGv_ptr r_tickptr;
2249

    
2250
                        r_tickptr = tcg_temp_new_ptr();
2251
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2252
                                       offsetof(CPUState, tick));
2253
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2254
                        gen_movl_TN_reg(rd, cpu_tmp0);
2255
                        tcg_temp_free_ptr(r_tickptr);
2256
                    }
2257
                    break;
2258
                case 5: // tba
2259
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2260
                    break;
2261
                case 6: // pstate
2262
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2263
                                   offsetof(CPUSPARCState, pstate));
2264
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2265
                    break;
2266
                case 7: // tl
2267
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2268
                                   offsetof(CPUSPARCState, tl));
2269
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2270
                    break;
2271
                case 8: // pil
2272
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2273
                                   offsetof(CPUSPARCState, psrpil));
2274
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2275
                    break;
2276
                case 9: // cwp
2277
                    gen_helper_rdcwp(cpu_tmp0);
2278
                    break;
2279
                case 10: // cansave
2280
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2281
                                   offsetof(CPUSPARCState, cansave));
2282
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2283
                    break;
2284
                case 11: // canrestore
2285
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2286
                                   offsetof(CPUSPARCState, canrestore));
2287
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2288
                    break;
2289
                case 12: // cleanwin
2290
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2291
                                   offsetof(CPUSPARCState, cleanwin));
2292
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2293
                    break;
2294
                case 13: // otherwin
2295
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2296
                                   offsetof(CPUSPARCState, otherwin));
2297
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2298
                    break;
2299
                case 14: // wstate
2300
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2301
                                   offsetof(CPUSPARCState, wstate));
2302
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2303
                    break;
2304
                case 16: // UA2005 gl
2305
                    CHECK_IU_FEATURE(dc, GL);
2306
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2307
                                   offsetof(CPUSPARCState, gl));
2308
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2309
                    break;
2310
                case 26: // UA2005 strand status
2311
                    CHECK_IU_FEATURE(dc, HYPV);
2312
                    if (!hypervisor(dc))
2313
                        goto priv_insn;
2314
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2315
                    break;
2316
                case 31: // ver
2317
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2318
                    break;
2319
                case 15: // fq
2320
                default:
2321
                    goto illegal_insn;
2322
                }
2323
#else
2324
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2325
#endif
2326
                gen_movl_TN_reg(rd, cpu_tmp0);
2327
                break;
2328
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2329
#ifdef TARGET_SPARC64
2330
                save_state(dc, cpu_cond);
2331
                gen_helper_flushw();
2332
#else
2333
                if (!supervisor(dc))
2334
                    goto priv_insn;
2335
                gen_movl_TN_reg(rd, cpu_tbr);
2336
#endif
2337
                break;
2338
#endif
2339
            } else if (xop == 0x34) {   /* FPU Operations */
2340
                if (gen_trap_ifnofpu(dc, cpu_cond))
2341
                    goto jmp_insn;
2342
                gen_op_clear_ieee_excp_and_FTT();
2343
                rs1 = GET_FIELD(insn, 13, 17);
2344
                rs2 = GET_FIELD(insn, 27, 31);
2345
                xop = GET_FIELD(insn, 18, 26);
2346
                save_state(dc, cpu_cond);
2347
                switch (xop) {
2348
                case 0x1: /* fmovs */
2349
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2350
                    break;
2351
                case 0x5: /* fnegs */
2352
                    gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2353
                    break;
2354
                case 0x9: /* fabss */
2355
                    gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2356
                    break;
2357
                case 0x29: /* fsqrts */
2358
                    CHECK_FPU_FEATURE(dc, FSQRT);
2359
                    gen_clear_float_exceptions();
2360
                    gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2361
                    gen_helper_check_ieee_exceptions();
2362
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2363
                    break;
2364
                case 0x2a: /* fsqrtd */
2365
                    CHECK_FPU_FEATURE(dc, FSQRT);
2366
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2367
                    gen_clear_float_exceptions();
2368
                    gen_helper_fsqrtd();
2369
                    gen_helper_check_ieee_exceptions();
2370
                    gen_op_store_DT0_fpr(DFPREG(rd));
2371
                    break;
2372
                case 0x2b: /* fsqrtq */
2373
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2374
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2375
                    gen_clear_float_exceptions();
2376
                    gen_helper_fsqrtq();
2377
                    gen_helper_check_ieee_exceptions();
2378
                    gen_op_store_QT0_fpr(QFPREG(rd));
2379
                    break;
2380
                case 0x41: /* fadds */
2381
                    gen_clear_float_exceptions();
2382
                    gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2383
                    gen_helper_check_ieee_exceptions();
2384
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2385
                    break;
2386
                case 0x42: /* faddd */
2387
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2388
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2389
                    gen_clear_float_exceptions();
2390
                    gen_helper_faddd();
2391
                    gen_helper_check_ieee_exceptions();
2392
                    gen_op_store_DT0_fpr(DFPREG(rd));
2393
                    break;
2394
                case 0x43: /* faddq */
2395
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2396
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2397
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2398
                    gen_clear_float_exceptions();
2399
                    gen_helper_faddq();
2400
                    gen_helper_check_ieee_exceptions();
2401
                    gen_op_store_QT0_fpr(QFPREG(rd));
2402
                    break;
2403
                case 0x45: /* fsubs */
2404
                    gen_clear_float_exceptions();
2405
                    gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2406
                    gen_helper_check_ieee_exceptions();
2407
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2408
                    break;
2409
                case 0x46: /* fsubd */
2410
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2411
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2412
                    gen_clear_float_exceptions();
2413
                    gen_helper_fsubd();
2414
                    gen_helper_check_ieee_exceptions();
2415
                    gen_op_store_DT0_fpr(DFPREG(rd));
2416
                    break;
2417
                case 0x47: /* fsubq */
2418
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2419
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2420
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2421
                    gen_clear_float_exceptions();
2422
                    gen_helper_fsubq();
2423
                    gen_helper_check_ieee_exceptions();
2424
                    gen_op_store_QT0_fpr(QFPREG(rd));
2425
                    break;
2426
                case 0x49: /* fmuls */
2427
                    CHECK_FPU_FEATURE(dc, FMUL);
2428
                    gen_clear_float_exceptions();
2429
                    gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2430
                    gen_helper_check_ieee_exceptions();
2431
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2432
                    break;
2433
                case 0x4a: /* fmuld */
2434
                    CHECK_FPU_FEATURE(dc, FMUL);
2435
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2436
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2437
                    gen_clear_float_exceptions();
2438
                    gen_helper_fmuld();
2439
                    gen_helper_check_ieee_exceptions();
2440
                    gen_op_store_DT0_fpr(DFPREG(rd));
2441
                    break;
2442
                case 0x4b: /* fmulq */
2443
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2444
                    CHECK_FPU_FEATURE(dc, FMUL);
2445
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2446
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2447
                    gen_clear_float_exceptions();
2448
                    gen_helper_fmulq();
2449
                    gen_helper_check_ieee_exceptions();
2450
                    gen_op_store_QT0_fpr(QFPREG(rd));
2451
                    break;
2452
                case 0x4d: /* fdivs */
2453
                    gen_clear_float_exceptions();
2454
                    gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2455
                    gen_helper_check_ieee_exceptions();
2456
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2457
                    break;
2458
                case 0x4e: /* fdivd */
2459
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2460
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2461
                    gen_clear_float_exceptions();
2462
                    gen_helper_fdivd();
2463
                    gen_helper_check_ieee_exceptions();
2464
                    gen_op_store_DT0_fpr(DFPREG(rd));
2465
                    break;
2466
                case 0x4f: /* fdivq */
2467
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2468
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2469
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2470
                    gen_clear_float_exceptions();
2471
                    gen_helper_fdivq();
2472
                    gen_helper_check_ieee_exceptions();
2473
                    gen_op_store_QT0_fpr(QFPREG(rd));
2474
                    break;
2475
                case 0x69: /* fsmuld */
2476
                    CHECK_FPU_FEATURE(dc, FSMULD);
2477
                    gen_clear_float_exceptions();
2478
                    gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2479
                    gen_helper_check_ieee_exceptions();
2480
                    gen_op_store_DT0_fpr(DFPREG(rd));
2481
                    break;
2482
                case 0x6e: /* fdmulq */
2483
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2484
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2485
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2486
                    gen_clear_float_exceptions();
2487
                    gen_helper_fdmulq();
2488
                    gen_helper_check_ieee_exceptions();
2489
                    gen_op_store_QT0_fpr(QFPREG(rd));
2490
                    break;
2491
                case 0xc4: /* fitos */
2492
                    gen_clear_float_exceptions();
2493
                    gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2494
                    gen_helper_check_ieee_exceptions();
2495
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2496
                    break;
2497
                case 0xc6: /* fdtos */
2498
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2499
                    gen_clear_float_exceptions();
2500
                    gen_helper_fdtos(cpu_tmp32);
2501
                    gen_helper_check_ieee_exceptions();
2502
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2503
                    break;
2504
                case 0xc7: /* fqtos */
2505
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2506
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2507
                    gen_clear_float_exceptions();
2508
                    gen_helper_fqtos(cpu_tmp32);
2509
                    gen_helper_check_ieee_exceptions();
2510
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2511
                    break;
2512
                case 0xc8: /* fitod */
2513
                    gen_helper_fitod(cpu_fpr[rs2]);
2514
                    gen_op_store_DT0_fpr(DFPREG(rd));
2515
                    break;
2516
                case 0xc9: /* fstod */
2517
                    gen_helper_fstod(cpu_fpr[rs2]);
2518
                    gen_op_store_DT0_fpr(DFPREG(rd));
2519
                    break;
2520
                case 0xcb: /* fqtod */
2521
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2522
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2523
                    gen_clear_float_exceptions();
2524
                    gen_helper_fqtod();
2525
                    gen_helper_check_ieee_exceptions();
2526
                    gen_op_store_DT0_fpr(DFPREG(rd));
2527
                    break;
2528
                case 0xcc: /* fitoq */
2529
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2530
                    gen_helper_fitoq(cpu_fpr[rs2]);
2531
                    gen_op_store_QT0_fpr(QFPREG(rd));
2532
                    break;
2533
                case 0xcd: /* fstoq */
2534
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2535
                    gen_helper_fstoq(cpu_fpr[rs2]);
2536
                    gen_op_store_QT0_fpr(QFPREG(rd));
2537
                    break;
2538
                case 0xce: /* fdtoq */
2539
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2540
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2541
                    gen_helper_fdtoq();
2542
                    gen_op_store_QT0_fpr(QFPREG(rd));
2543
                    break;
2544
                case 0xd1: /* fstoi */
2545
                    gen_clear_float_exceptions();
2546
                    gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2547
                    gen_helper_check_ieee_exceptions();
2548
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2549
                    break;
2550
                case 0xd2: /* fdtoi */
2551
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2552
                    gen_clear_float_exceptions();
2553
                    gen_helper_fdtoi(cpu_tmp32);
2554
                    gen_helper_check_ieee_exceptions();
2555
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2556
                    break;
2557
                case 0xd3: /* fqtoi */
2558
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2559
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2560
                    gen_clear_float_exceptions();
2561
                    gen_helper_fqtoi(cpu_tmp32);
2562
                    gen_helper_check_ieee_exceptions();
2563
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2564
                    break;
2565
#ifdef TARGET_SPARC64
2566
                case 0x2: /* V9 fmovd */
2567
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2568
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2569
                                    cpu_fpr[DFPREG(rs2) + 1]);
2570
                    break;
2571
                case 0x3: /* V9 fmovq */
2572
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2573
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2574
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2575
                                    cpu_fpr[QFPREG(rs2) + 1]);
2576
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2577
                                    cpu_fpr[QFPREG(rs2) + 2]);
2578
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2579
                                    cpu_fpr[QFPREG(rs2) + 3]);
2580
                    break;
2581
                case 0x6: /* V9 fnegd */
2582
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2583
                    gen_helper_fnegd();
2584
                    gen_op_store_DT0_fpr(DFPREG(rd));
2585
                    break;
2586
                case 0x7: /* V9 fnegq */
2587
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2588
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2589
                    gen_helper_fnegq();
2590
                    gen_op_store_QT0_fpr(QFPREG(rd));
2591
                    break;
2592
                case 0xa: /* V9 fabsd */
2593
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2594
                    gen_helper_fabsd();
2595
                    gen_op_store_DT0_fpr(DFPREG(rd));
2596
                    break;
2597
                case 0xb: /* V9 fabsq */
2598
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2599
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2600
                    gen_helper_fabsq();
2601
                    gen_op_store_QT0_fpr(QFPREG(rd));
2602
                    break;
2603
                case 0x81: /* V9 fstox */
2604
                    gen_clear_float_exceptions();
2605
                    gen_helper_fstox(cpu_fpr[rs2]);
2606
                    gen_helper_check_ieee_exceptions();
2607
                    gen_op_store_DT0_fpr(DFPREG(rd));
2608
                    break;
2609
                case 0x82: /* V9 fdtox */
2610
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2611
                    gen_clear_float_exceptions();
2612
                    gen_helper_fdtox();
2613
                    gen_helper_check_ieee_exceptions();
2614
                    gen_op_store_DT0_fpr(DFPREG(rd));
2615
                    break;
2616
                case 0x83: /* V9 fqtox */
2617
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2618
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2619
                    gen_clear_float_exceptions();
2620
                    gen_helper_fqtox();
2621
                    gen_helper_check_ieee_exceptions();
2622
                    gen_op_store_DT0_fpr(DFPREG(rd));
2623
                    break;
2624
                case 0x84: /* V9 fxtos */
2625
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2626
                    gen_clear_float_exceptions();
2627
                    gen_helper_fxtos(cpu_tmp32);
2628
                    gen_helper_check_ieee_exceptions();
2629
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2630
                    break;
2631
                case 0x88: /* V9 fxtod */
2632
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2633
                    gen_clear_float_exceptions();
2634
                    gen_helper_fxtod();
2635
                    gen_helper_check_ieee_exceptions();
2636
                    gen_op_store_DT0_fpr(DFPREG(rd));
2637
                    break;
2638
                case 0x8c: /* V9 fxtoq */
2639
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2640
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2641
                    gen_clear_float_exceptions();
2642
                    gen_helper_fxtoq();
2643
                    gen_helper_check_ieee_exceptions();
2644
                    gen_op_store_QT0_fpr(QFPREG(rd));
2645
                    break;
2646
#endif
2647
                default:
2648
                    goto illegal_insn;
2649
                }
2650
            } else if (xop == 0x35) {   /* FPU Operations */
2651
#ifdef TARGET_SPARC64
2652
                int cond;
2653
#endif
2654
                if (gen_trap_ifnofpu(dc, cpu_cond))
2655
                    goto jmp_insn;
2656
                gen_op_clear_ieee_excp_and_FTT();
2657
                rs1 = GET_FIELD(insn, 13, 17);
2658
                rs2 = GET_FIELD(insn, 27, 31);
2659
                xop = GET_FIELD(insn, 18, 26);
2660
                save_state(dc, cpu_cond);
2661
#ifdef TARGET_SPARC64
2662
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2663
                    int l1;
2664

    
2665
                    l1 = gen_new_label();
2666
                    cond = GET_FIELD_SP(insn, 14, 17);
2667
                    cpu_src1 = get_src1(insn, cpu_src1);
2668
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2669
                                       0, l1);
2670
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2671
                    gen_set_label(l1);
2672
                    break;
2673
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2674
                    int l1;
2675

    
2676
                    l1 = gen_new_label();
2677
                    cond = GET_FIELD_SP(insn, 14, 17);
2678
                    cpu_src1 = get_src1(insn, cpu_src1);
2679
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2680
                                       0, l1);
2681
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2682
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2683
                    gen_set_label(l1);
2684
                    break;
2685
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2686
                    int l1;
2687

    
2688
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2689
                    l1 = gen_new_label();
2690
                    cond = GET_FIELD_SP(insn, 14, 17);
2691
                    cpu_src1 = get_src1(insn, cpu_src1);
2692
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2693
                                       0, l1);
2694
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2695
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2696
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2697
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2698
                    gen_set_label(l1);
2699
                    break;
2700
                }
2701
#endif
2702
                switch (xop) {
2703
#ifdef TARGET_SPARC64
2704
#define FMOVSCC(fcc)                                                    \
2705
                    {                                                   \
2706
                        TCGv r_cond;                                    \
2707
                        int l1;                                         \
2708
                                                                        \
2709
                        l1 = gen_new_label();                           \
2710
                        r_cond = tcg_temp_new();                        \
2711
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2712
                        gen_fcond(r_cond, fcc, cond);                   \
2713
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2714
                                           0, l1);                      \
2715
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2716
                        gen_set_label(l1);                              \
2717
                        tcg_temp_free(r_cond);                          \
2718
                    }
2719
#define FMOVDCC(fcc)                                                    \
2720
                    {                                                   \
2721
                        TCGv r_cond;                                    \
2722
                        int l1;                                         \
2723
                                                                        \
2724
                        l1 = gen_new_label();                           \
2725
                        r_cond = tcg_temp_new();                        \
2726
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2727
                        gen_fcond(r_cond, fcc, cond);                   \
2728
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2729
                                           0, l1);                      \
2730
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2731
                                        cpu_fpr[DFPREG(rs2)]);          \
2732
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2733
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2734
                        gen_set_label(l1);                              \
2735
                        tcg_temp_free(r_cond);                          \
2736
                    }
2737
#define FMOVQCC(fcc)                                                    \
2738
                    {                                                   \
2739
                        TCGv r_cond;                                    \
2740
                        int l1;                                         \
2741
                                                                        \
2742
                        l1 = gen_new_label();                           \
2743
                        r_cond = tcg_temp_new();                        \
2744
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2745
                        gen_fcond(r_cond, fcc, cond);                   \
2746
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2747
                                           0, l1);                      \
2748
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2749
                                        cpu_fpr[QFPREG(rs2)]);          \
2750
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2751
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2752
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2753
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2754
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2755
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2756
                        gen_set_label(l1);                              \
2757
                        tcg_temp_free(r_cond);                          \
2758
                    }
2759
                    case 0x001: /* V9 fmovscc %fcc0 */
2760
                        FMOVSCC(0);
2761
                        break;
2762
                    case 0x002: /* V9 fmovdcc %fcc0 */
2763
                        FMOVDCC(0);
2764
                        break;
2765
                    case 0x003: /* V9 fmovqcc %fcc0 */
2766
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2767
                        FMOVQCC(0);
2768
                        break;
2769
                    case 0x041: /* V9 fmovscc %fcc1 */
2770
                        FMOVSCC(1);
2771
                        break;
2772
                    case 0x042: /* V9 fmovdcc %fcc1 */
2773
                        FMOVDCC(1);
2774
                        break;
2775
                    case 0x043: /* V9 fmovqcc %fcc1 */
2776
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2777
                        FMOVQCC(1);
2778
                        break;
2779
                    case 0x081: /* V9 fmovscc %fcc2 */
2780
                        FMOVSCC(2);
2781
                        break;
2782
                    case 0x082: /* V9 fmovdcc %fcc2 */
2783
                        FMOVDCC(2);
2784
                        break;
2785
                    case 0x083: /* V9 fmovqcc %fcc2 */
2786
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2787
                        FMOVQCC(2);
2788
                        break;
2789
                    case 0x0c1: /* V9 fmovscc %fcc3 */
2790
                        FMOVSCC(3);
2791
                        break;
2792
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
2793
                        FMOVDCC(3);
2794
                        break;
2795
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
2796
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2797
                        FMOVQCC(3);
2798
                        break;
2799
#undef FMOVSCC
2800
#undef FMOVDCC
2801
#undef FMOVQCC
2802
#define FMOVSCC(icc)                                                    \
2803
                    {                                                   \
2804
                        TCGv r_cond;                                    \
2805
                        int l1;                                         \
2806
                                                                        \
2807
                        l1 = gen_new_label();                           \
2808
                        r_cond = tcg_temp_new();                        \
2809
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2810
                        gen_cond(r_cond, icc, cond, dc);                \
2811
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2812
                                           0, l1);                      \
2813
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2814
                        gen_set_label(l1);                              \
2815
                        tcg_temp_free(r_cond);                          \
2816
                    }
2817
#define FMOVDCC(icc)                                                    \
2818
                    {                                                   \
2819
                        TCGv r_cond;                                    \
2820
                        int l1;                                         \
2821
                                                                        \
2822
                        l1 = gen_new_label();                           \
2823
                        r_cond = tcg_temp_new();                        \
2824
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2825
                        gen_cond(r_cond, icc, cond, dc);                \
2826
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2827
                                           0, l1);                      \
2828
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2829
                                        cpu_fpr[DFPREG(rs2)]);          \
2830
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2831
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2832
                        gen_set_label(l1);                              \
2833
                        tcg_temp_free(r_cond);                          \
2834
                    }
2835
#define FMOVQCC(icc)                                                    \
2836
                    {                                                   \
2837
                        TCGv r_cond;                                    \
2838
                        int l1;                                         \
2839
                                                                        \
2840
                        l1 = gen_new_label();                           \
2841
                        r_cond = tcg_temp_new();                        \
2842
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2843
                        gen_cond(r_cond, icc, cond, dc);                \
2844
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2845
                                           0, l1);                      \
2846
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2847
                                        cpu_fpr[QFPREG(rs2)]);          \
2848
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2849
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2850
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2851
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2852
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2853
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2854
                        gen_set_label(l1);                              \
2855
                        tcg_temp_free(r_cond);                          \
2856
                    }
2857

    
2858
                    case 0x101: /* V9 fmovscc %icc */
2859
                        FMOVSCC(0);
2860
                        break;
2861
                    case 0x102: /* V9 fmovdcc %icc */
2862
                        FMOVDCC(0);
2863
                    case 0x103: /* V9 fmovqcc %icc */
2864
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2865
                        FMOVQCC(0);
2866
                        break;
2867
                    case 0x181: /* V9 fmovscc %xcc */
2868
                        FMOVSCC(1);
2869
                        break;
2870
                    case 0x182: /* V9 fmovdcc %xcc */
2871
                        FMOVDCC(1);
2872
                        break;
2873
                    case 0x183: /* V9 fmovqcc %xcc */
2874
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2875
                        FMOVQCC(1);
2876
                        break;
2877
#undef FMOVSCC
2878
#undef FMOVDCC
2879
#undef FMOVQCC
2880
#endif
2881
                    case 0x51: /* fcmps, V9 %fcc */
2882
                        gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2883
                        break;
2884
                    case 0x52: /* fcmpd, V9 %fcc */
2885
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2886
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2887
                        gen_op_fcmpd(rd & 3);
2888
                        break;
2889
                    case 0x53: /* fcmpq, V9 %fcc */
2890
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2891
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2892
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2893
                        gen_op_fcmpq(rd & 3);
2894
                        break;
2895
                    case 0x55: /* fcmpes, V9 %fcc */
2896
                        gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2897
                        break;
2898
                    case 0x56: /* fcmped, V9 %fcc */
2899
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2900
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2901
                        gen_op_fcmped(rd & 3);
2902
                        break;
2903
                    case 0x57: /* fcmpeq, V9 %fcc */
2904
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2905
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2906
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2907
                        gen_op_fcmpeq(rd & 3);
2908
                        break;
2909
                    default:
2910
                        goto illegal_insn;
2911
                }
2912
            } else if (xop == 0x2) {
2913
                // clr/mov shortcut
2914

    
2915
                rs1 = GET_FIELD(insn, 13, 17);
2916
                if (rs1 == 0) {
2917
                    // or %g0, x, y -> mov T0, x; mov y, T0
2918
                    if (IS_IMM) {       /* immediate */
2919
                        TCGv r_const;
2920

    
2921
                        simm = GET_FIELDs(insn, 19, 31);
2922
                        r_const = tcg_const_tl(simm);
2923
                        gen_movl_TN_reg(rd, r_const);
2924
                        tcg_temp_free(r_const);
2925
                    } else {            /* register */
2926
                        rs2 = GET_FIELD(insn, 27, 31);
2927
                        gen_movl_reg_TN(rs2, cpu_dst);
2928
                        gen_movl_TN_reg(rd, cpu_dst);
2929
                    }
2930
                } else {
2931
                    cpu_src1 = get_src1(insn, cpu_src1);
2932
                    if (IS_IMM) {       /* immediate */
2933
                        simm = GET_FIELDs(insn, 19, 31);
2934
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2935
                        gen_movl_TN_reg(rd, cpu_dst);
2936
                    } else {            /* register */
2937
                        // or x, %g0, y -> mov T1, x; mov y, T1
2938
                        rs2 = GET_FIELD(insn, 27, 31);
2939
                        if (rs2 != 0) {
2940
                            gen_movl_reg_TN(rs2, cpu_src2);
2941
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2942
                            gen_movl_TN_reg(rd, cpu_dst);
2943
                        } else
2944
                            gen_movl_TN_reg(rd, cpu_src1);
2945
                    }
2946
                }
2947
#ifdef TARGET_SPARC64
2948
            } else if (xop == 0x25) { /* sll, V9 sllx */
2949
                cpu_src1 = get_src1(insn, cpu_src1);
2950
                if (IS_IMM) {   /* immediate */
2951
                    simm = GET_FIELDs(insn, 20, 31);
2952
                    if (insn & (1 << 12)) {
2953
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2954
                    } else {
2955
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2956
                    }
2957
                } else {                /* register */
2958
                    rs2 = GET_FIELD(insn, 27, 31);
2959
                    gen_movl_reg_TN(rs2, cpu_src2);
2960
                    if (insn & (1 << 12)) {
2961
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2962
                    } else {
2963
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2964
                    }
2965
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2966
                }
2967
                gen_movl_TN_reg(rd, cpu_dst);
2968
            } else if (xop == 0x26) { /* srl, V9 srlx */
2969
                cpu_src1 = get_src1(insn, cpu_src1);
2970
                if (IS_IMM) {   /* immediate */
2971
                    simm = GET_FIELDs(insn, 20, 31);
2972
                    if (insn & (1 << 12)) {
2973
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2974
                    } else {
2975
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2976
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2977
                    }
2978
                } else {                /* register */
2979
                    rs2 = GET_FIELD(insn, 27, 31);
2980
                    gen_movl_reg_TN(rs2, cpu_src2);
2981
                    if (insn & (1 << 12)) {
2982
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2983
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2984
                    } else {
2985
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2986
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2987
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2988
                    }
2989
                }
2990
                gen_movl_TN_reg(rd, cpu_dst);
2991
            } else if (xop == 0x27) { /* sra, V9 srax */
2992
                cpu_src1 = get_src1(insn, cpu_src1);
2993
                if (IS_IMM) {   /* immediate */
2994
                    simm = GET_FIELDs(insn, 20, 31);
2995
                    if (insn & (1 << 12)) {
2996
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2997
                    } else {
2998
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2999
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3000
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3001
                    }
3002
                } else {                /* register */
3003
                    rs2 = GET_FIELD(insn, 27, 31);
3004
                    gen_movl_reg_TN(rs2, cpu_src2);
3005
                    if (insn & (1 << 12)) {
3006
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3007
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3008
                    } else {
3009
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3010
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3011
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3012
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3013
                    }
3014
                }
3015
                gen_movl_TN_reg(rd, cpu_dst);
3016
#endif
3017
            } else if (xop < 0x36) {
3018
                if (xop < 0x20) {
3019
                    cpu_src1 = get_src1(insn, cpu_src1);
3020
                    cpu_src2 = get_src2(insn, cpu_src2);
3021
                    switch (xop & ~0x10) {
3022
                    case 0x0: /* add */
3023
                        if (IS_IMM) {
3024
                            simm = GET_FIELDs(insn, 19, 31);
3025
                            if (xop & 0x10) {
3026
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3027
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3028
                                dc->cc_op = CC_OP_ADD;
3029
                            } else {
3030
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3031
                            }
3032
                        } else {
3033
                            if (xop & 0x10) {
3034
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3035
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3036
                                dc->cc_op = CC_OP_ADD;
3037
                            } else {
3038
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3039
                            }
3040
                        }
3041
                        break;
3042
                    case 0x1: /* and */
3043
                        if (IS_IMM) {
3044
                            simm = GET_FIELDs(insn, 19, 31);
3045
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3046
                        } else {
3047
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3048
                        }
3049
                        if (xop & 0x10) {
3050
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3051
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3052
                            dc->cc_op = CC_OP_LOGIC;
3053
                        }
3054
                        break;
3055
                    case 0x2: /* or */
3056
                        if (IS_IMM) {
3057
                            simm = GET_FIELDs(insn, 19, 31);
3058
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3059
                        } else {
3060
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3061
                        }
3062
                        if (xop & 0x10) {
3063
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3064
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3065
                            dc->cc_op = CC_OP_LOGIC;
3066
                        }
3067
                        break;
3068
                    case 0x3: /* xor */
3069
                        if (IS_IMM) {
3070
                            simm = GET_FIELDs(insn, 19, 31);
3071
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3072
                        } else {
3073
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3074
                        }
3075
                        if (xop & 0x10) {
3076
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3077
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3078
                            dc->cc_op = CC_OP_LOGIC;
3079
                        }
3080
                        break;
3081
                    case 0x4: /* sub */
3082
                        if (IS_IMM) {
3083
                            simm = GET_FIELDs(insn, 19, 31);
3084
                            if (xop & 0x10) {
3085
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3086
                            } else {
3087
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3088
                            }
3089
                        } else {
3090
                            if (xop & 0x10) {
3091
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3092
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3093
                                dc->cc_op = CC_OP_SUB;
3094
                            } else {
3095
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3096
                            }
3097
                        }
3098
                        break;
3099
                    case 0x5: /* andn */
3100
                        if (IS_IMM) {
3101
                            simm = GET_FIELDs(insn, 19, 31);
3102
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3103
                        } else {
3104
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3105
                        }
3106
                        if (xop & 0x10) {
3107
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3108
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3109
                            dc->cc_op = CC_OP_LOGIC;
3110
                        }
3111
                        break;
3112
                    case 0x6: /* orn */
3113
                        if (IS_IMM) {
3114
                            simm = GET_FIELDs(insn, 19, 31);
3115
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3116
                        } else {
3117
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3118
                        }
3119
                        if (xop & 0x10) {
3120
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3121
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3122
                            dc->cc_op = CC_OP_LOGIC;
3123
                        }
3124
                        break;
3125
                    case 0x7: /* xorn */
3126
                        if (IS_IMM) {
3127
                            simm = GET_FIELDs(insn, 19, 31);
3128
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3129
                        } else {
3130
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3131
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3132
                        }
3133
                        if (xop & 0x10) {
3134
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3135
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3136
                            dc->cc_op = CC_OP_LOGIC;
3137
                        }
3138
                        break;
3139
                    case 0x8: /* addx, V9 addc */
3140
                        gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3141
                                        (xop & 0x10));
3142
                        break;
3143
#ifdef TARGET_SPARC64
3144
                    case 0x9: /* V9 mulx */
3145
                        if (IS_IMM) {
3146
                            simm = GET_FIELDs(insn, 19, 31);
3147
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3148
                        } else {
3149
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3150
                        }
3151
                        break;
3152
#endif
3153
                    case 0xa: /* umul */
3154
                        CHECK_IU_FEATURE(dc, MUL);
3155
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3156
                        if (xop & 0x10) {
3157
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3158
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3159
                            dc->cc_op = CC_OP_LOGIC;
3160
                        }
3161
                        break;
3162
                    case 0xb: /* smul */
3163
                        CHECK_IU_FEATURE(dc, MUL);
3164
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3165
                        if (xop & 0x10) {
3166
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3167
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3168
                            dc->cc_op = CC_OP_LOGIC;
3169
                        }
3170
                        break;
3171
                    case 0xc: /* subx, V9 subc */
3172
                        gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3173
                                        (xop & 0x10));
3174
                        break;
3175
#ifdef TARGET_SPARC64
3176
                    case 0xd: /* V9 udivx */
3177
                        tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3178
                        tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3179
                        gen_trap_ifdivzero_tl(cpu_cc_src2);
3180
                        tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3181
                        break;
3182
#endif
3183
                    case 0xe: /* udiv */
3184
                        CHECK_IU_FEATURE(dc, DIV);
3185
                        if (xop & 0x10) {
3186
                            gen_helper_udiv_cc(cpu_dst, cpu_src1, cpu_src2);
3187
                            dc->cc_op = CC_OP_DIV;
3188
                        } else {
3189
                            gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3190
                        }
3191
                        break;
3192
                    case 0xf: /* sdiv */
3193
                        CHECK_IU_FEATURE(dc, DIV);
3194
                        if (xop & 0x10) {
3195
                            gen_helper_sdiv_cc(cpu_dst, cpu_src1, cpu_src2);
3196
                            dc->cc_op = CC_OP_DIV;
3197
                        } else {
3198
                            gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3199
                        }
3200
                        break;
3201
                    default:
3202
                        goto illegal_insn;
3203
                    }
3204
                    gen_movl_TN_reg(rd, cpu_dst);
3205
                } else {
3206
                    cpu_src1 = get_src1(insn, cpu_src1);
3207
                    cpu_src2 = get_src2(insn, cpu_src2);
3208
                    switch (xop) {
3209
                    case 0x20: /* taddcc */
3210
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3211
                        gen_movl_TN_reg(rd, cpu_dst);
3212
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3213
                        dc->cc_op = CC_OP_TADD;
3214
                        break;
3215
                    case 0x21: /* tsubcc */
3216
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3217
                        gen_movl_TN_reg(rd, cpu_dst);
3218
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3219
                        dc->cc_op = CC_OP_TSUB;
3220
                        break;
3221
                    case 0x22: /* taddcctv */
3222
                        save_state(dc, cpu_cond);
3223
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3224
                        gen_movl_TN_reg(rd, cpu_dst);
3225
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3226
                        dc->cc_op = CC_OP_TADDTV;
3227
                        break;
3228
                    case 0x23: /* tsubcctv */
3229
                        save_state(dc, cpu_cond);
3230
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3231
                        gen_movl_TN_reg(rd, cpu_dst);
3232
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3233
                        dc->cc_op = CC_OP_TSUBTV;
3234
                        break;
3235
                    case 0x24: /* mulscc */
3236
                        gen_helper_compute_psr();
3237
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3238
                        gen_movl_TN_reg(rd, cpu_dst);
3239
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3240
                        dc->cc_op = CC_OP_ADD;
3241
                        break;
3242
#ifndef TARGET_SPARC64
3243
                    case 0x25:  /* sll */
3244
                        if (IS_IMM) { /* immediate */
3245
                            simm = GET_FIELDs(insn, 20, 31);
3246
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3247
                        } else { /* register */
3248
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3249
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3250
                        }
3251
                        gen_movl_TN_reg(rd, cpu_dst);
3252
                        break;
3253
                    case 0x26:  /* srl */
3254
                        if (IS_IMM) { /* immediate */
3255
                            simm = GET_FIELDs(insn, 20, 31);
3256
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3257
                        } else { /* register */
3258
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3259
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3260
                        }
3261
                        gen_movl_TN_reg(rd, cpu_dst);
3262
                        break;
3263
                    case 0x27:  /* sra */
3264
                        if (IS_IMM) { /* immediate */
3265
                            simm = GET_FIELDs(insn, 20, 31);
3266
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3267
                        } else { /* register */
3268
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3269
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3270
                        }
3271
                        gen_movl_TN_reg(rd, cpu_dst);
3272
                        break;
3273
#endif
3274
                    case 0x30:
3275
                        {
3276
                            switch(rd) {
3277
                            case 0: /* wry */
3278
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3279
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3280
                                break;
3281
#ifndef TARGET_SPARC64
3282
                            case 0x01 ... 0x0f: /* undefined in the
3283
                                                   SPARCv8 manual, nop
3284
                                                   on the microSPARC
3285
                                                   II */
3286
                            case 0x10 ... 0x1f: /* implementation-dependent
3287
                                                   in the SPARCv8
3288
                                                   manual, nop on the
3289
                                                   microSPARC II */
3290
                                break;
3291
#else
3292
                            case 0x2: /* V9 wrccr */
3293
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3294
                                gen_helper_wrccr(cpu_dst);
3295
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3296
                                dc->cc_op = CC_OP_FLAGS;
3297
                                break;
3298
                            case 0x3: /* V9 wrasi */
3299
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3300
                                tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3301
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3302
                                break;
3303
                            case 0x6: /* V9 wrfprs */
3304
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3305
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3306
                                save_state(dc, cpu_cond);
3307
                                gen_op_next_insn();
3308
                                tcg_gen_exit_tb(0);
3309
                                dc->is_br = 1;
3310
                                break;
3311
                            case 0xf: /* V9 sir, nop if user */
3312
#if !defined(CONFIG_USER_ONLY)
3313
                                if (supervisor(dc)) {
3314
                                    ; // XXX
3315
                                }
3316
#endif
3317
                                break;
3318
                            case 0x13: /* Graphics Status */
3319
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3320
                                    goto jmp_insn;
3321
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3322
                                break;
3323
                            case 0x14: /* Softint set */
3324
                                if (!supervisor(dc))
3325
                                    goto illegal_insn;
3326
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3327
                                gen_helper_set_softint(cpu_tmp64);
3328
                                break;
3329
                            case 0x15: /* Softint clear */
3330
                                if (!supervisor(dc))
3331
                                    goto illegal_insn;
3332
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3333
                                gen_helper_clear_softint(cpu_tmp64);
3334
                                break;
3335
                            case 0x16: /* Softint write */
3336
                                if (!supervisor(dc))
3337
                                    goto illegal_insn;
3338
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3339
                                gen_helper_write_softint(cpu_tmp64);
3340
                                break;
3341
                            case 0x17: /* Tick compare */
3342
#if !defined(CONFIG_USER_ONLY)
3343
                                if (!supervisor(dc))
3344
                                    goto illegal_insn;
3345
#endif
3346
                                {
3347
                                    TCGv_ptr r_tickptr;
3348

    
3349
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3350
                                                   cpu_src2);
3351
                                    r_tickptr = tcg_temp_new_ptr();
3352
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3353
                                                   offsetof(CPUState, tick));
3354
                                    gen_helper_tick_set_limit(r_tickptr,
3355
                                                              cpu_tick_cmpr);
3356
                                    tcg_temp_free_ptr(r_tickptr);
3357
                                }
3358
                                break;
3359
                            case 0x18: /* System tick */
3360
#if !defined(CONFIG_USER_ONLY)
3361
                                if (!supervisor(dc))
3362
                                    goto illegal_insn;
3363
#endif
3364
                                {
3365
                                    TCGv_ptr r_tickptr;
3366

    
3367
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3368
                                                   cpu_src2);
3369
                                    r_tickptr = tcg_temp_new_ptr();
3370
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3371
                                                   offsetof(CPUState, stick));
3372
                                    gen_helper_tick_set_count(r_tickptr,
3373
                                                              cpu_dst);
3374
                                    tcg_temp_free_ptr(r_tickptr);
3375
                                }
3376
                                break;
3377
                            case 0x19: /* System tick compare */
3378
#if !defined(CONFIG_USER_ONLY)
3379
                                if (!supervisor(dc))
3380
                                    goto illegal_insn;
3381
#endif
3382
                                {
3383
                                    TCGv_ptr r_tickptr;
3384

    
3385
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3386
                                                   cpu_src2);
3387
                                    r_tickptr = tcg_temp_new_ptr();
3388
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3389
                                                   offsetof(CPUState, stick));
3390
                                    gen_helper_tick_set_limit(r_tickptr,
3391
                                                              cpu_stick_cmpr);
3392
                                    tcg_temp_free_ptr(r_tickptr);
3393
                                }
3394
                                break;
3395

    
3396
                            case 0x10: /* Performance Control */
3397
                            case 0x11: /* Performance Instrumentation
3398
                                          Counter */
3399
                            case 0x12: /* Dispatch Control */
3400
#endif
3401
                            default:
3402
                                goto illegal_insn;
3403
                            }
3404
                        }
3405
                        break;
3406
#if !defined(CONFIG_USER_ONLY)
3407
                    case 0x31: /* wrpsr, V9 saved, restored */
3408
                        {
3409
                            if (!supervisor(dc))
3410
                                goto priv_insn;
3411
#ifdef TARGET_SPARC64
3412
                            switch (rd) {
3413
                            case 0:
3414
                                gen_helper_saved();
3415
                                break;
3416
                            case 1:
3417
                                gen_helper_restored();
3418
                                break;
3419
                            case 2: /* UA2005 allclean */
3420
                            case 3: /* UA2005 otherw */
3421
                            case 4: /* UA2005 normalw */
3422
                            case 5: /* UA2005 invalw */
3423
                                // XXX
3424
                            default:
3425
                                goto illegal_insn;
3426
                            }
3427
#else
3428
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3429
                            gen_helper_wrpsr(cpu_dst);
3430
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3431
                            dc->cc_op = CC_OP_FLAGS;
3432
                            save_state(dc, cpu_cond);
3433
                            gen_op_next_insn();
3434
                            tcg_gen_exit_tb(0);
3435
                            dc->is_br = 1;
3436
#endif
3437
                        }
3438
                        break;
3439
                    case 0x32: /* wrwim, V9 wrpr */
3440
                        {
3441
                            if (!supervisor(dc))
3442
                                goto priv_insn;
3443
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3444
#ifdef TARGET_SPARC64
3445
                            switch (rd) {
3446
                            case 0: // tpc
3447
                                {
3448
                                    TCGv_ptr r_tsptr;
3449

    
3450
                                    r_tsptr = tcg_temp_new_ptr();
3451
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3452
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3453
                                                  offsetof(trap_state, tpc));
3454
                                    tcg_temp_free_ptr(r_tsptr);
3455
                                }
3456
                                break;
3457
                            case 1: // tnpc
3458
                                {
3459
                                    TCGv_ptr r_tsptr;
3460

    
3461
                                    r_tsptr = tcg_temp_new_ptr();
3462
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3463
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3464
                                                  offsetof(trap_state, tnpc));
3465
                                    tcg_temp_free_ptr(r_tsptr);
3466
                                }
3467
                                break;
3468
                            case 2: // tstate
3469
                                {
3470
                                    TCGv_ptr r_tsptr;
3471

    
3472
                                    r_tsptr = tcg_temp_new_ptr();
3473
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3474
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3475
                                                  offsetof(trap_state,
3476
                                                           tstate));
3477
                                    tcg_temp_free_ptr(r_tsptr);
3478
                                }
3479
                                break;
3480
                            case 3: // tt
3481
                                {
3482
                                    TCGv_ptr r_tsptr;
3483

    
3484
                                    r_tsptr = tcg_temp_new_ptr();
3485
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3486
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3487
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3488
                                                   offsetof(trap_state, tt));
3489
                                    tcg_temp_free_ptr(r_tsptr);
3490
                                }
3491
                                break;
3492
                            case 4: // tick
3493
                                {
3494
                                    TCGv_ptr r_tickptr;
3495

    
3496
                                    r_tickptr = tcg_temp_new_ptr();
3497
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3498
                                                   offsetof(CPUState, tick));
3499
                                    gen_helper_tick_set_count(r_tickptr,
3500
                                                              cpu_tmp0);
3501
                                    tcg_temp_free_ptr(r_tickptr);
3502
                                }
3503
                                break;
3504
                            case 5: // tba
3505
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3506
                                break;
3507
                            case 6: // pstate
3508
                                {
3509
                                    TCGv r_tmp = tcg_temp_local_new();
3510

    
3511
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3512
                                    save_state(dc, cpu_cond);
3513
                                    gen_helper_wrpstate(r_tmp);
3514
                                    tcg_temp_free(r_tmp);
3515
                                    dc->npc = DYNAMIC_PC;
3516
                                }
3517
                                break;
3518
                            case 7: // tl
3519
                                {
3520
                                    TCGv r_tmp = tcg_temp_local_new();
3521

    
3522
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3523
                                    save_state(dc, cpu_cond);
3524
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3525
                                    tcg_temp_free(r_tmp);
3526
                                    tcg_gen_st_i32(cpu_tmp32, cpu_env,
3527
                                                   offsetof(CPUSPARCState, tl));
3528
                                    dc->npc = DYNAMIC_PC;
3529
                                }
3530
                                break;
3531
                            case 8: // pil
3532
                                gen_helper_wrpil(cpu_tmp0);
3533
                                break;
3534
                            case 9: // cwp
3535
                                gen_helper_wrcwp(cpu_tmp0);
3536
                                break;
3537
                            case 10: // cansave
3538
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3539
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3540
                                               offsetof(CPUSPARCState,
3541
                                                        cansave));
3542
                                break;
3543
                            case 11: // canrestore
3544
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3545
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3546
                                               offsetof(CPUSPARCState,
3547
                                                        canrestore));
3548
                                break;
3549
                            case 12: // cleanwin
3550
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3551
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3552
                                               offsetof(CPUSPARCState,
3553
                                                        cleanwin));
3554
                                break;
3555
                            case 13: // otherwin
3556
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3557
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3558
                                               offsetof(CPUSPARCState,
3559
                                                        otherwin));
3560
                                break;
3561
                            case 14: // wstate
3562
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3563
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3564
                                               offsetof(CPUSPARCState,
3565
                                                        wstate));
3566
                                break;
3567
                            case 16: // UA2005 gl
3568
                                CHECK_IU_FEATURE(dc, GL);
3569
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3570
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3571
                                               offsetof(CPUSPARCState, gl));
3572
                                break;
3573
                            case 26: // UA2005 strand status
3574
                                CHECK_IU_FEATURE(dc, HYPV);
3575
                                if (!hypervisor(dc))
3576
                                    goto priv_insn;
3577
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3578
                                break;
3579
                            default:
3580
                                goto illegal_insn;
3581
                            }
3582
#else
3583
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3584
                            if (dc->def->nwindows != 32)
3585
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3586
                                                (1 << dc->def->nwindows) - 1);
3587
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3588
#endif
3589
                        }
3590
                        break;
3591
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3592
                        {
3593
#ifndef TARGET_SPARC64
3594
                            if (!supervisor(dc))
3595
                                goto priv_insn;
3596
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3597
#else
3598
                            CHECK_IU_FEATURE(dc, HYPV);
3599
                            if (!hypervisor(dc))
3600
                                goto priv_insn;
3601
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3602
                            switch (rd) {
3603
                            case 0: // hpstate
3604
                                // XXX gen_op_wrhpstate();
3605
                                save_state(dc, cpu_cond);
3606
                                gen_op_next_insn();
3607
                                tcg_gen_exit_tb(0);
3608
                                dc->is_br = 1;
3609
                                break;
3610
                            case 1: // htstate
3611
                                // XXX gen_op_wrhtstate();
3612
                                break;
3613
                            case 3: // hintp
3614
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3615
                                break;
3616
                            case 5: // htba
3617
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3618
                                break;
3619
                            case 31: // hstick_cmpr
3620
                                {
3621
                                    TCGv_ptr r_tickptr;
3622

    
3623
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3624
                                    r_tickptr = tcg_temp_new_ptr();
3625
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3626
                                                   offsetof(CPUState, hstick));
3627
                                    gen_helper_tick_set_limit(r_tickptr,
3628
                                                              cpu_hstick_cmpr);
3629
                                    tcg_temp_free_ptr(r_tickptr);
3630
                                }
3631
                                break;
3632
                            case 6: // hver readonly
3633
                            default:
3634
                                goto illegal_insn;
3635
                            }
3636
#endif
3637
                        }
3638
                        break;
3639
#endif
3640
#ifdef TARGET_SPARC64
3641
                    case 0x2c: /* V9 movcc */
3642
                        {
3643
                            int cc = GET_FIELD_SP(insn, 11, 12);
3644
                            int cond = GET_FIELD_SP(insn, 14, 17);
3645
                            TCGv r_cond;
3646
                            int l1;
3647

    
3648
                            r_cond = tcg_temp_new();
3649
                            if (insn & (1 << 18)) {
3650
                                if (cc == 0)
3651
                                    gen_cond(r_cond, 0, cond, dc);
3652
                                else if (cc == 2)
3653
                                    gen_cond(r_cond, 1, cond, dc);
3654
                                else
3655
                                    goto illegal_insn;
3656
                            } else {
3657
                                gen_fcond(r_cond, cc, cond);
3658
                            }
3659

    
3660
                            l1 = gen_new_label();
3661

    
3662
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3663
                            if (IS_IMM) {       /* immediate */
3664
                                TCGv r_const;
3665

    
3666
                                simm = GET_FIELD_SPs(insn, 0, 10);
3667
                                r_const = tcg_const_tl(simm);
3668
                                gen_movl_TN_reg(rd, r_const);
3669
                                tcg_temp_free(r_const);
3670
                            } else {
3671
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3672
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3673
                                gen_movl_TN_reg(rd, cpu_tmp0);
3674
                            }
3675
                            gen_set_label(l1);
3676
                            tcg_temp_free(r_cond);
3677
                            break;
3678
                        }
3679
                    case 0x2d: /* V9 sdivx */
3680
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3681
                        gen_movl_TN_reg(rd, cpu_dst);
3682
                        break;
3683
                    case 0x2e: /* V9 popc */
3684
                        {
3685
                            cpu_src2 = get_src2(insn, cpu_src2);
3686
                            gen_helper_popc(cpu_dst, cpu_src2);
3687
                            gen_movl_TN_reg(rd, cpu_dst);
3688
                        }
3689
                    case 0x2f: /* V9 movr */
3690
                        {
3691
                            int cond = GET_FIELD_SP(insn, 10, 12);
3692
                            int l1;
3693

    
3694
                            cpu_src1 = get_src1(insn, cpu_src1);
3695

    
3696
                            l1 = gen_new_label();
3697

    
3698
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3699
                                              cpu_src1, 0, l1);
3700
                            if (IS_IMM) {       /* immediate */
3701
                                TCGv r_const;
3702

    
3703
                                simm = GET_FIELD_SPs(insn, 0, 9);
3704
                                r_const = tcg_const_tl(simm);
3705
                                gen_movl_TN_reg(rd, r_const);
3706
                                tcg_temp_free(r_const);
3707
                            } else {
3708
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3709
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3710
                                gen_movl_TN_reg(rd, cpu_tmp0);
3711
                            }
3712
                            gen_set_label(l1);
3713
                            break;
3714
                        }
3715
#endif
3716
                    default:
3717
                        goto illegal_insn;
3718
                    }
3719
                }
3720
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3721
#ifdef TARGET_SPARC64
3722
                int opf = GET_FIELD_SP(insn, 5, 13);
3723
                rs1 = GET_FIELD(insn, 13, 17);
3724
                rs2 = GET_FIELD(insn, 27, 31);
3725
                if (gen_trap_ifnofpu(dc, cpu_cond))
3726
                    goto jmp_insn;
3727

    
3728
                switch (opf) {
3729
                case 0x000: /* VIS I edge8cc */
3730
                case 0x001: /* VIS II edge8n */
3731
                case 0x002: /* VIS I edge8lcc */
3732
                case 0x003: /* VIS II edge8ln */
3733
                case 0x004: /* VIS I edge16cc */
3734
                case 0x005: /* VIS II edge16n */
3735
                case 0x006: /* VIS I edge16lcc */
3736
                case 0x007: /* VIS II edge16ln */
3737
                case 0x008: /* VIS I edge32cc */
3738
                case 0x009: /* VIS II edge32n */
3739
                case 0x00a: /* VIS I edge32lcc */
3740
                case 0x00b: /* VIS II edge32ln */
3741
                    // XXX
3742
                    goto illegal_insn;
3743
                case 0x010: /* VIS I array8 */
3744
                    CHECK_FPU_FEATURE(dc, VIS1);
3745
                    cpu_src1 = get_src1(insn, cpu_src1);
3746
                    gen_movl_reg_TN(rs2, cpu_src2);
3747
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3748
                    gen_movl_TN_reg(rd, cpu_dst);
3749
                    break;
3750
                case 0x012: /* VIS I array16 */
3751
                    CHECK_FPU_FEATURE(dc, VIS1);
3752
                    cpu_src1 = get_src1(insn, cpu_src1);
3753
                    gen_movl_reg_TN(rs2, cpu_src2);
3754
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3755
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3756
                    gen_movl_TN_reg(rd, cpu_dst);
3757
                    break;
3758
                case 0x014: /* VIS I array32 */
3759
                    CHECK_FPU_FEATURE(dc, VIS1);
3760
                    cpu_src1 = get_src1(insn, cpu_src1);
3761
                    gen_movl_reg_TN(rs2, cpu_src2);
3762
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3763
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3764
                    gen_movl_TN_reg(rd, cpu_dst);
3765
                    break;
3766
                case 0x018: /* VIS I alignaddr */
3767
                    CHECK_FPU_FEATURE(dc, VIS1);
3768
                    cpu_src1 = get_src1(insn, cpu_src1);
3769
                    gen_movl_reg_TN(rs2, cpu_src2);
3770
                    gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3771
                    gen_movl_TN_reg(rd, cpu_dst);
3772
                    break;
3773
                case 0x019: /* VIS II bmask */
3774
                case 0x01a: /* VIS I alignaddrl */
3775
                    // XXX
3776
                    goto illegal_insn;
3777
                case 0x020: /* VIS I fcmple16 */
3778
                    CHECK_FPU_FEATURE(dc, VIS1);
3779
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3780
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3781
                    gen_helper_fcmple16();
3782
                    gen_op_store_DT0_fpr(DFPREG(rd));
3783
                    break;
3784
                case 0x022: /* VIS I fcmpne16 */
3785
                    CHECK_FPU_FEATURE(dc, VIS1);
3786
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3787
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3788
                    gen_helper_fcmpne16();
3789
                    gen_op_store_DT0_fpr(DFPREG(rd));
3790
                    break;
3791
                case 0x024: /* VIS I fcmple32 */
3792
                    CHECK_FPU_FEATURE(dc, VIS1);
3793
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3794
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3795
                    gen_helper_fcmple32();
3796
                    gen_op_store_DT0_fpr(DFPREG(rd));
3797
                    break;
3798
                case 0x026: /* VIS I fcmpne32 */
3799
                    CHECK_FPU_FEATURE(dc, VIS1);
3800
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3801
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3802
                    gen_helper_fcmpne32();
3803
                    gen_op_store_DT0_fpr(DFPREG(rd));
3804
                    break;
3805
                case 0x028: /* VIS I fcmpgt16 */
3806
                    CHECK_FPU_FEATURE(dc, VIS1);
3807
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3808
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3809
                    gen_helper_fcmpgt16();
3810
                    gen_op_store_DT0_fpr(DFPREG(rd));
3811
                    break;
3812
                case 0x02a: /* VIS I fcmpeq16 */
3813
                    CHECK_FPU_FEATURE(dc, VIS1);
3814
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3815
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3816
                    gen_helper_fcmpeq16();
3817
                    gen_op_store_DT0_fpr(DFPREG(rd));
3818
                    break;
3819
                case 0x02c: /* VIS I fcmpgt32 */
3820
                    CHECK_FPU_FEATURE(dc, VIS1);
3821
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3822
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3823
                    gen_helper_fcmpgt32();
3824
                    gen_op_store_DT0_fpr(DFPREG(rd));
3825
                    break;
3826
                case 0x02e: /* VIS I fcmpeq32 */
3827
                    CHECK_FPU_FEATURE(dc, VIS1);
3828
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3829
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3830
                    gen_helper_fcmpeq32();
3831
                    gen_op_store_DT0_fpr(DFPREG(rd));
3832
                    break;
3833
                case 0x031: /* VIS I fmul8x16 */
3834
                    CHECK_FPU_FEATURE(dc, VIS1);
3835
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3836
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3837
                    gen_helper_fmul8x16();
3838
                    gen_op_store_DT0_fpr(DFPREG(rd));
3839
                    break;
3840
                case 0x033: /* VIS I fmul8x16au */
3841
                    CHECK_FPU_FEATURE(dc, VIS1);
3842
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3843
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3844
                    gen_helper_fmul8x16au();
3845
                    gen_op_store_DT0_fpr(DFPREG(rd));
3846
                    break;
3847
                case 0x035: /* VIS I fmul8x16al */
3848
                    CHECK_FPU_FEATURE(dc, VIS1);
3849
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3850
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3851
                    gen_helper_fmul8x16al();
3852
                    gen_op_store_DT0_fpr(DFPREG(rd));
3853
                    break;
3854
                case 0x036: /* VIS I fmul8sux16 */
3855
                    CHECK_FPU_FEATURE(dc, VIS1);
3856
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3857
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3858
                    gen_helper_fmul8sux16();
3859
                    gen_op_store_DT0_fpr(DFPREG(rd));
3860
                    break;
3861
                case 0x037: /* VIS I fmul8ulx16 */
3862
                    CHECK_FPU_FEATURE(dc, VIS1);
3863
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3864
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3865
                    gen_helper_fmul8ulx16();
3866
                    gen_op_store_DT0_fpr(DFPREG(rd));
3867
                    break;
3868
                case 0x038: /* VIS I fmuld8sux16 */
3869
                    CHECK_FPU_FEATURE(dc, VIS1);
3870
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3871
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3872
                    gen_helper_fmuld8sux16();
3873
                    gen_op_store_DT0_fpr(DFPREG(rd));
3874
                    break;
3875
                case 0x039: /* VIS I fmuld8ulx16 */
3876
                    CHECK_FPU_FEATURE(dc, VIS1);
3877
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3878
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3879
                    gen_helper_fmuld8ulx16();
3880
                    gen_op_store_DT0_fpr(DFPREG(rd));
3881
                    break;
3882
                case 0x03a: /* VIS I fpack32 */
3883
                case 0x03b: /* VIS I fpack16 */
3884
                case 0x03d: /* VIS I fpackfix */
3885
                case 0x03e: /* VIS I pdist */
3886
                    // XXX
3887
                    goto illegal_insn;
3888
                case 0x048: /* VIS I faligndata */
3889
                    CHECK_FPU_FEATURE(dc, VIS1);
3890
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3891
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3892
                    gen_helper_faligndata();
3893
                    gen_op_store_DT0_fpr(DFPREG(rd));
3894
                    break;
3895
                case 0x04b: /* VIS I fpmerge */
3896
                    CHECK_FPU_FEATURE(dc, VIS1);
3897
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3898
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3899
                    gen_helper_fpmerge();
3900
                    gen_op_store_DT0_fpr(DFPREG(rd));
3901
                    break;
3902
                case 0x04c: /* VIS II bshuffle */
3903
                    // XXX
3904
                    goto illegal_insn;
3905
                case 0x04d: /* VIS I fexpand */
3906
                    CHECK_FPU_FEATURE(dc, VIS1);
3907
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3908
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3909
                    gen_helper_fexpand();
3910
                    gen_op_store_DT0_fpr(DFPREG(rd));
3911
                    break;
3912
                case 0x050: /* VIS I fpadd16 */
3913
                    CHECK_FPU_FEATURE(dc, VIS1);
3914
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3915
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3916
                    gen_helper_fpadd16();
3917
                    gen_op_store_DT0_fpr(DFPREG(rd));
3918
                    break;
3919
                case 0x051: /* VIS I fpadd16s */
3920
                    CHECK_FPU_FEATURE(dc, VIS1);
3921
                    gen_helper_fpadd16s(cpu_fpr[rd],
3922
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3923
                    break;
3924
                case 0x052: /* VIS I fpadd32 */
3925
                    CHECK_FPU_FEATURE(dc, VIS1);
3926
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3927
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3928
                    gen_helper_fpadd32();
3929
                    gen_op_store_DT0_fpr(DFPREG(rd));
3930
                    break;
3931
                case 0x053: /* VIS I fpadd32s */
3932
                    CHECK_FPU_FEATURE(dc, VIS1);
3933
                    gen_helper_fpadd32s(cpu_fpr[rd],
3934
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3935
                    break;
3936
                case 0x054: /* VIS I fpsub16 */
3937
                    CHECK_FPU_FEATURE(dc, VIS1);
3938
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3939
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3940
                    gen_helper_fpsub16();
3941
                    gen_op_store_DT0_fpr(DFPREG(rd));
3942
                    break;
3943
                case 0x055: /* VIS I fpsub16s */
3944
                    CHECK_FPU_FEATURE(dc, VIS1);
3945
                    gen_helper_fpsub16s(cpu_fpr[rd],
3946
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3947
                    break;
3948
                case 0x056: /* VIS I fpsub32 */
3949
                    CHECK_FPU_FEATURE(dc, VIS1);
3950
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3951
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3952
                    gen_helper_fpsub32();
3953
                    gen_op_store_DT0_fpr(DFPREG(rd));
3954
                    break;
3955
                case 0x057: /* VIS I fpsub32s */
3956
                    CHECK_FPU_FEATURE(dc, VIS1);
3957
                    gen_helper_fpsub32s(cpu_fpr[rd],
3958
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3959
                    break;
3960
                case 0x060: /* VIS I fzero */
3961
                    CHECK_FPU_FEATURE(dc, VIS1);
3962
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3963
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3964
                    break;
3965
                case 0x061: /* VIS I fzeros */
3966
                    CHECK_FPU_FEATURE(dc, VIS1);
3967
                    tcg_gen_movi_i32(cpu_fpr[rd], 0);
3968
                    break;
3969
                case 0x062: /* VIS I fnor */
3970
                    CHECK_FPU_FEATURE(dc, VIS1);
3971
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3972
                                    cpu_fpr[DFPREG(rs2)]);
3973
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3974
                                    cpu_fpr[DFPREG(rs2) + 1]);
3975
                    break;
3976
                case 0x063: /* VIS I fnors */
3977
                    CHECK_FPU_FEATURE(dc, VIS1);
3978
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3979
                    break;
3980
                case 0x064: /* VIS I fandnot2 */
3981
                    CHECK_FPU_FEATURE(dc, VIS1);
3982
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3983
                                     cpu_fpr[DFPREG(rs2)]);
3984
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3985
                                     cpu_fpr[DFPREG(rs1) + 1],
3986
                                     cpu_fpr[DFPREG(rs2) + 1]);
3987
                    break;
3988
                case 0x065: /* VIS I fandnot2s */
3989
                    CHECK_FPU_FEATURE(dc, VIS1);
3990
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3991
                    break;
3992
                case 0x066: /* VIS I fnot2 */
3993
                    CHECK_FPU_FEATURE(dc, VIS1);
3994
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3995
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3996
                                    cpu_fpr[DFPREG(rs2) + 1]);
3997
                    break;
3998
                case 0x067: /* VIS I fnot2s */
3999
                    CHECK_FPU_FEATURE(dc, VIS1);
4000
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4001
                    break;
4002
                case 0x068: /* VIS I fandnot1 */
4003
                    CHECK_FPU_FEATURE(dc, VIS1);
4004
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4005
                                     cpu_fpr[DFPREG(rs1)]);
4006
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4007
                                     cpu_fpr[DFPREG(rs2) + 1],
4008
                                     cpu_fpr[DFPREG(rs1) + 1]);
4009
                    break;
4010
                case 0x069: /* VIS I fandnot1s */
4011
                    CHECK_FPU_FEATURE(dc, VIS1);
4012
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4013
                    break;
4014
                case 0x06a: /* VIS I fnot1 */
4015
                    CHECK_FPU_FEATURE(dc, VIS1);
4016
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4017
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4018
                                    cpu_fpr[DFPREG(rs1) + 1]);
4019
                    break;
4020
                case 0x06b: /* VIS I fnot1s */
4021
                    CHECK_FPU_FEATURE(dc, VIS1);
4022
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4023
                    break;
4024
                case 0x06c: /* VIS I fxor */
4025
                    CHECK_FPU_FEATURE(dc, VIS1);
4026
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4027
                                    cpu_fpr[DFPREG(rs2)]);
4028
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4029
                                    cpu_fpr[DFPREG(rs1) + 1],
4030
                                    cpu_fpr[DFPREG(rs2) + 1]);
4031
                    break;
4032
                case 0x06d: /* VIS I fxors */
4033
                    CHECK_FPU_FEATURE(dc, VIS1);
4034
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4035
                    break;
4036
                case 0x06e: /* VIS I fnand */
4037
                    CHECK_FPU_FEATURE(dc, VIS1);
4038
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4039
                                     cpu_fpr[DFPREG(rs2)]);
4040
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4041
                                     cpu_fpr[DFPREG(rs2) + 1]);
4042
                    break;
4043
                case 0x06f: /* VIS I fnands */
4044
                    CHECK_FPU_FEATURE(dc, VIS1);
4045
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4046
                    break;
4047
                case 0x070: /* VIS I fand */
4048
                    CHECK_FPU_FEATURE(dc, VIS1);
4049
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4050
                                    cpu_fpr[DFPREG(rs2)]);
4051
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4052
                                    cpu_fpr[DFPREG(rs1) + 1],
4053
                                    cpu_fpr[DFPREG(rs2) + 1]);
4054
                    break;
4055
                case 0x071: /* VIS I fands */
4056
                    CHECK_FPU_FEATURE(dc, VIS1);
4057
                    tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4058
                    break;
4059
                case 0x072: /* VIS I fxnor */
4060
                    CHECK_FPU_FEATURE(dc, VIS1);
4061
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4062
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4063
                                    cpu_fpr[DFPREG(rs1)]);
4064
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4065
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4066
                                    cpu_fpr[DFPREG(rs1) + 1]);
4067
                    break;
4068
                case 0x073: /* VIS I fxnors */
4069
                    CHECK_FPU_FEATURE(dc, VIS1);
4070
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4071
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4072
                    break;
4073
                case 0x074: /* VIS I fsrc1 */
4074
                    CHECK_FPU_FEATURE(dc, VIS1);
4075
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4076
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4077
                                    cpu_fpr[DFPREG(rs1) + 1]);
4078
                    break;
4079
                case 0x075: /* VIS I fsrc1s */
4080
                    CHECK_FPU_FEATURE(dc, VIS1);
4081
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4082
                    break;
4083
                case 0x076: /* VIS I fornot2 */
4084
                    CHECK_FPU_FEATURE(dc, VIS1);
4085
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4086
                                    cpu_fpr[DFPREG(rs2)]);
4087
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4088
                                    cpu_fpr[DFPREG(rs1) + 1],
4089
                                    cpu_fpr[DFPREG(rs2) + 1]);
4090
                    break;
4091
                case 0x077: /* VIS I fornot2s */
4092
                    CHECK_FPU_FEATURE(dc, VIS1);
4093
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4094
                    break;
4095
                case 0x078: /* VIS I fsrc2 */
4096
                    CHECK_FPU_FEATURE(dc, VIS1);
4097
                    gen_op_load_fpr_DT0(DFPREG(rs2));
4098
                    gen_op_store_DT0_fpr(DFPREG(rd));
4099
                    break;
4100
                case 0x079: /* VIS I fsrc2s */
4101
                    CHECK_FPU_FEATURE(dc, VIS1);
4102
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4103
                    break;
4104
                case 0x07a: /* VIS I fornot1 */
4105
                    CHECK_FPU_FEATURE(dc, VIS1);
4106
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4107
                                    cpu_fpr[DFPREG(rs1)]);
4108
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4109
                                    cpu_fpr[DFPREG(rs2) + 1],
4110
                                    cpu_fpr[DFPREG(rs1) + 1]);
4111
                    break;
4112
                case 0x07b: /* VIS I fornot1s */
4113
                    CHECK_FPU_FEATURE(dc, VIS1);
4114
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4115
                    break;
4116
                case 0x07c: /* VIS I for */
4117
                    CHECK_FPU_FEATURE(dc, VIS1);
4118
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4119
                                   cpu_fpr[DFPREG(rs2)]);
4120
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4121
                                   cpu_fpr[DFPREG(rs1) + 1],
4122
                                   cpu_fpr[DFPREG(rs2) + 1]);
4123
                    break;
4124
                case 0x07d: /* VIS I fors */
4125
                    CHECK_FPU_FEATURE(dc, VIS1);
4126
                    tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4127
                    break;
4128
                case 0x07e: /* VIS I fone */
4129
                    CHECK_FPU_FEATURE(dc, VIS1);
4130
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4131
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4132
                    break;
4133
                case 0x07f: /* VIS I fones */
4134
                    CHECK_FPU_FEATURE(dc, VIS1);
4135
                    tcg_gen_movi_i32(cpu_fpr[rd], -1);
4136
                    break;
4137
                case 0x080: /* VIS I shutdown */
4138
                case 0x081: /* VIS II siam */
4139
                    // XXX
4140
                    goto illegal_insn;
4141
                default:
4142
                    goto illegal_insn;
4143
                }
4144
#else
4145
                goto ncp_insn;
4146
#endif
4147
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4148
#ifdef TARGET_SPARC64
4149
                goto illegal_insn;
4150
#else
4151
                goto ncp_insn;
4152
#endif
4153
#ifdef TARGET_SPARC64
4154
            } else if (xop == 0x39) { /* V9 return */
4155
                TCGv_i32 r_const;
4156

    
4157
                save_state(dc, cpu_cond);
4158
                cpu_src1 = get_src1(insn, cpu_src1);
4159
                if (IS_IMM) {   /* immediate */
4160
                    simm = GET_FIELDs(insn, 19, 31);
4161
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4162
                } else {                /* register */
4163
                    rs2 = GET_FIELD(insn, 27, 31);
4164
                    if (rs2) {
4165
                        gen_movl_reg_TN(rs2, cpu_src2);
4166
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4167
                    } else
4168
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4169
                }
4170
                gen_helper_restore();
4171
                gen_mov_pc_npc(dc, cpu_cond);
4172
                r_const = tcg_const_i32(3);
4173
                gen_helper_check_align(cpu_dst, r_const);
4174
                tcg_temp_free_i32(r_const);
4175
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4176
                dc->npc = DYNAMIC_PC;
4177
                goto jmp_insn;
4178
#endif
4179
            } else {
4180
                cpu_src1 = get_src1(insn, cpu_src1);
4181
                if (IS_IMM) {   /* immediate */
4182
                    simm = GET_FIELDs(insn, 19, 31);
4183
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4184
                } else {                /* register */
4185
                    rs2 = GET_FIELD(insn, 27, 31);
4186
                    if (rs2) {
4187
                        gen_movl_reg_TN(rs2, cpu_src2);
4188
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4189
                    } else
4190
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4191
                }
4192
                switch (xop) {
4193
                case 0x38:      /* jmpl */
4194
                    {
4195
                        TCGv r_pc;
4196
                        TCGv_i32 r_const;
4197

    
4198
                        r_pc = tcg_const_tl(dc->pc);
4199
                        gen_movl_TN_reg(rd, r_pc);
4200
                        tcg_temp_free(r_pc);
4201
                        gen_mov_pc_npc(dc, cpu_cond);
4202
                        r_const = tcg_const_i32(3);
4203
                        gen_helper_check_align(cpu_dst, r_const);
4204
                        tcg_temp_free_i32(r_const);
4205
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4206
                        dc->npc = DYNAMIC_PC;
4207
                    }
4208
                    goto jmp_insn;
4209
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4210
                case 0x39:      /* rett, V9 return */
4211
                    {
4212
                        TCGv_i32 r_const;
4213

    
4214
                        if (!supervisor(dc))
4215
                            goto priv_insn;
4216
                        gen_mov_pc_npc(dc, cpu_cond);
4217
                        r_const = tcg_const_i32(3);
4218
                        gen_helper_check_align(cpu_dst, r_const);
4219
                        tcg_temp_free_i32(r_const);
4220
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4221
                        dc->npc = DYNAMIC_PC;
4222
                        gen_helper_rett();
4223
                    }
4224
                    goto jmp_insn;
4225
#endif
4226
                case 0x3b: /* flush */
4227
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4228
                        goto unimp_flush;
4229
                    /* nop */
4230
                    break;
4231
                case 0x3c:      /* save */
4232
                    save_state(dc, cpu_cond);
4233
                    gen_helper_save();
4234
                    gen_movl_TN_reg(rd, cpu_dst);
4235
                    break;
4236
                case 0x3d:      /* restore */
4237
                    save_state(dc, cpu_cond);
4238
                    gen_helper_restore();
4239
                    gen_movl_TN_reg(rd, cpu_dst);
4240
                    break;
4241
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4242
                case 0x3e:      /* V9 done/retry */
4243
                    {
4244
                        switch (rd) {
4245
                        case 0:
4246
                            if (!supervisor(dc))
4247
                                goto priv_insn;
4248
                            dc->npc = DYNAMIC_PC;
4249
                            dc->pc = DYNAMIC_PC;
4250
                            gen_helper_done();
4251
                            goto jmp_insn;
4252
                        case 1:
4253
                            if (!supervisor(dc))
4254
                                goto priv_insn;
4255
                            dc->npc = DYNAMIC_PC;
4256
                            dc->pc = DYNAMIC_PC;
4257
                            gen_helper_retry();
4258
                            goto jmp_insn;
4259
                        default:
4260
                            goto illegal_insn;
4261
                        }
4262
                    }
4263
                    break;
4264
#endif
4265
                default:
4266
                    goto illegal_insn;
4267
                }
4268
            }
4269
            break;
4270
        }
4271
        break;
4272
    case 3:                     /* load/store instructions */
4273
        {
4274
            unsigned int xop = GET_FIELD(insn, 7, 12);
4275

    
4276
            /* flush pending conditional evaluations before exposing
4277
               cpu state */
4278
            if (dc->cc_op != CC_OP_FLAGS) {
4279
                dc->cc_op = CC_OP_FLAGS;
4280
                gen_helper_compute_psr();
4281
            }
4282
            cpu_src1 = get_src1(insn, cpu_src1);
4283
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4284
                rs2 = GET_FIELD(insn, 27, 31);
4285
                gen_movl_reg_TN(rs2, cpu_src2);
4286
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4287
            } else if (IS_IMM) {     /* immediate */
4288
                simm = GET_FIELDs(insn, 19, 31);
4289
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4290
            } else {            /* register */
4291
                rs2 = GET_FIELD(insn, 27, 31);
4292
                if (rs2 != 0) {
4293
                    gen_movl_reg_TN(rs2, cpu_src2);
4294
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4295
                } else
4296
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4297
            }
4298
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4299
                (xop > 0x17 && xop <= 0x1d ) ||
4300
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4301
                switch (xop) {
4302
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4303
                    gen_address_mask(dc, cpu_addr);
4304
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4305
                    break;
4306
                case 0x1:       /* ldub, load unsigned byte */
4307
                    gen_address_mask(dc, cpu_addr);
4308
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4309
                    break;
4310
                case 0x2:       /* lduh, load unsigned halfword */
4311
                    gen_address_mask(dc, cpu_addr);
4312
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4313
                    break;
4314
                case 0x3:       /* ldd, load double word */
4315
                    if (rd & 1)
4316
                        goto illegal_insn;
4317
                    else {
4318
                        TCGv_i32 r_const;
4319

    
4320
                        save_state(dc, cpu_cond);
4321
                        r_const = tcg_const_i32(7);
4322
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4323
                        tcg_temp_free_i32(r_const);
4324
                        gen_address_mask(dc, cpu_addr);
4325
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4326
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4327
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4328
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4329
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4330
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4331
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4332
                    }
4333
                    break;
4334
                case 0x9:       /* ldsb, load signed byte */
4335
                    gen_address_mask(dc, cpu_addr);
4336
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4337
                    break;
4338
                case 0xa:       /* ldsh, load signed halfword */
4339
                    gen_address_mask(dc, cpu_addr);
4340
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4341
                    break;
4342
                case 0xd:       /* ldstub -- XXX: should be atomically */
4343
                    {
4344
                        TCGv r_const;
4345

    
4346
                        gen_address_mask(dc, cpu_addr);
4347
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4348
                        r_const = tcg_const_tl(0xff);
4349
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4350
                        tcg_temp_free(r_const);
4351
                    }
4352
                    break;
4353
                case 0x0f:      /* swap, swap register with memory. Also
4354
                                   atomically */
4355
                    CHECK_IU_FEATURE(dc, SWAP);
4356
                    gen_movl_reg_TN(rd, cpu_val);
4357
                    gen_address_mask(dc, cpu_addr);
4358
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4359
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4360
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4361
                    break;
4362
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4363
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4364
#ifndef TARGET_SPARC64
4365
                    if (IS_IMM)
4366
                        goto illegal_insn;
4367
                    if (!supervisor(dc))
4368
                        goto priv_insn;
4369
#endif
4370
                    save_state(dc, cpu_cond);
4371
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4372
                    break;
4373
                case 0x11:      /* lduba, load unsigned byte alternate */
4374
#ifndef TARGET_SPARC64
4375
                    if (IS_IMM)
4376
                        goto illegal_insn;
4377
                    if (!supervisor(dc))
4378
                        goto priv_insn;
4379
#endif
4380
                    save_state(dc, cpu_cond);
4381
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4382
                    break;
4383
                case 0x12:      /* lduha, load unsigned halfword alternate */
4384
#ifndef TARGET_SPARC64
4385
                    if (IS_IMM)
4386
                        goto illegal_insn;
4387
                    if (!supervisor(dc))
4388
                        goto priv_insn;
4389
#endif
4390
                    save_state(dc, cpu_cond);
4391
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4392
                    break;
4393
                case 0x13:      /* ldda, load double word alternate */
4394
#ifndef TARGET_SPARC64
4395
                    if (IS_IMM)
4396
                        goto illegal_insn;
4397
                    if (!supervisor(dc))
4398
                        goto priv_insn;
4399
#endif
4400
                    if (rd & 1)
4401
                        goto illegal_insn;
4402
                    save_state(dc, cpu_cond);
4403
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4404
                    goto skip_move;
4405
                case 0x19:      /* ldsba, load signed byte alternate */
4406
#ifndef TARGET_SPARC64
4407
                    if (IS_IMM)
4408
                        goto illegal_insn;
4409
                    if (!supervisor(dc))
4410
                        goto priv_insn;
4411
#endif
4412
                    save_state(dc, cpu_cond);
4413
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4414
                    break;
4415
                case 0x1a:      /* ldsha, load signed halfword alternate */
4416
#ifndef TARGET_SPARC64
4417
                    if (IS_IMM)
4418
                        goto illegal_insn;
4419
                    if (!supervisor(dc))
4420
                        goto priv_insn;
4421
#endif
4422
                    save_state(dc, cpu_cond);
4423
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4424
                    break;
4425
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4426
#ifndef TARGET_SPARC64
4427
                    if (IS_IMM)
4428
                        goto illegal_insn;
4429
                    if (!supervisor(dc))
4430
                        goto priv_insn;
4431
#endif
4432
                    save_state(dc, cpu_cond);
4433
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4434
                    break;
4435
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4436
                                   atomically */
4437
                    CHECK_IU_FEATURE(dc, SWAP);
4438
#ifndef TARGET_SPARC64
4439
                    if (IS_IMM)
4440
                        goto illegal_insn;
4441
                    if (!supervisor(dc))
4442
                        goto priv_insn;
4443
#endif
4444
                    save_state(dc, cpu_cond);
4445
                    gen_movl_reg_TN(rd, cpu_val);
4446
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4447
                    break;
4448

    
4449
#ifndef TARGET_SPARC64
4450
                case 0x30: /* ldc */
4451
                case 0x31: /* ldcsr */
4452
                case 0x33: /* lddc */
4453
                    goto ncp_insn;
4454
#endif
4455
#endif
4456
#ifdef TARGET_SPARC64
4457
                case 0x08: /* V9 ldsw */
4458
                    gen_address_mask(dc, cpu_addr);
4459
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4460
                    break;
4461
                case 0x0b: /* V9 ldx */
4462
                    gen_address_mask(dc, cpu_addr);
4463
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4464
                    break;
4465
                case 0x18: /* V9 ldswa */
4466
                    save_state(dc, cpu_cond);
4467
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4468
                    break;
4469
                case 0x1b: /* V9 ldxa */
4470
                    save_state(dc, cpu_cond);
4471
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4472
                    break;
4473
                case 0x2d: /* V9 prefetch, no effect */
4474
                    goto skip_move;
4475
                case 0x30: /* V9 ldfa */
4476
                    save_state(dc, cpu_cond);
4477
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4478
                    goto skip_move;
4479
                case 0x33: /* V9 lddfa */
4480
                    save_state(dc, cpu_cond);
4481
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4482
                    goto skip_move;
4483
                case 0x3d: /* V9 prefetcha, no effect */
4484
                    goto skip_move;
4485
                case 0x32: /* V9 ldqfa */
4486
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4487
                    save_state(dc, cpu_cond);
4488
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4489
                    goto skip_move;
4490
#endif
4491
                default:
4492
                    goto illegal_insn;
4493
                }
4494
                gen_movl_TN_reg(rd, cpu_val);
4495
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4496
            skip_move: ;
4497
#endif
4498
            } else if (xop >= 0x20 && xop < 0x24) {
4499
                if (gen_trap_ifnofpu(dc, cpu_cond))
4500
                    goto jmp_insn;
4501
                save_state(dc, cpu_cond);
4502
                switch (xop) {
4503
                case 0x20:      /* ldf, load fpreg */
4504
                    gen_address_mask(dc, cpu_addr);
4505
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4506
                    tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4507
                    break;
4508
                case 0x21:      /* ldfsr, V9 ldxfsr */
4509
#ifdef TARGET_SPARC64
4510
                    gen_address_mask(dc, cpu_addr);
4511
                    if (rd == 1) {
4512
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4513
                        gen_helper_ldxfsr(cpu_tmp64);
4514
                    } else {
4515
                        tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4516
                        tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4517
                        gen_helper_ldfsr(cpu_tmp32);
4518
                    }
4519
#else
4520
                    {
4521
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4522
                        gen_helper_ldfsr(cpu_tmp32);
4523
                    }
4524
#endif
4525
                    break;
4526
                case 0x22:      /* ldqf, load quad fpreg */
4527
                    {
4528
                        TCGv_i32 r_const;
4529

    
4530
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4531
                        r_const = tcg_const_i32(dc->mem_idx);
4532
                        gen_address_mask(dc, cpu_addr);
4533
                        gen_helper_ldqf(cpu_addr, r_const);
4534
                        tcg_temp_free_i32(r_const);
4535
                        gen_op_store_QT0_fpr(QFPREG(rd));
4536
                    }
4537
                    break;
4538
                case 0x23:      /* lddf, load double fpreg */
4539
                    {
4540
                        TCGv_i32 r_const;
4541

    
4542
                        r_const = tcg_const_i32(dc->mem_idx);
4543
                        gen_address_mask(dc, cpu_addr);
4544
                        gen_helper_lddf(cpu_addr, r_const);
4545
                        tcg_temp_free_i32(r_const);
4546
                        gen_op_store_DT0_fpr(DFPREG(rd));
4547
                    }
4548
                    break;
4549
                default:
4550
                    goto illegal_insn;
4551
                }
4552
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4553
                       xop == 0xe || xop == 0x1e) {
4554
                gen_movl_reg_TN(rd, cpu_val);
4555
                switch (xop) {
4556
                case 0x4: /* st, store word */
4557
                    gen_address_mask(dc, cpu_addr);
4558
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4559
                    break;
4560
                case 0x5: /* stb, store byte */
4561
                    gen_address_mask(dc, cpu_addr);
4562
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4563
                    break;
4564
                case 0x6: /* sth, store halfword */
4565
                    gen_address_mask(dc, cpu_addr);
4566
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4567
                    break;
4568
                case 0x7: /* std, store double word */
4569
                    if (rd & 1)
4570
                        goto illegal_insn;
4571
                    else {
4572
                        TCGv_i32 r_const;
4573

    
4574
                        save_state(dc, cpu_cond);
4575
                        gen_address_mask(dc, cpu_addr);
4576
                        r_const = tcg_const_i32(7);
4577
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4578
                        tcg_temp_free_i32(r_const);
4579
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4580
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4581
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4582
                    }
4583
                    break;
4584
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4585
                case 0x14: /* sta, V9 stwa, store word alternate */
4586
#ifndef TARGET_SPARC64
4587
                    if (IS_IMM)
4588
                        goto illegal_insn;
4589
                    if (!supervisor(dc))
4590
                        goto priv_insn;
4591
#endif
4592
                    save_state(dc, cpu_cond);
4593
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4594
                    dc->npc = DYNAMIC_PC;
4595
                    break;
4596
                case 0x15: /* stba, store byte alternate */
4597
#ifndef TARGET_SPARC64
4598
                    if (IS_IMM)
4599
                        goto illegal_insn;
4600
                    if (!supervisor(dc))
4601
                        goto priv_insn;
4602
#endif
4603
                    save_state(dc, cpu_cond);
4604
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4605
                    dc->npc = DYNAMIC_PC;
4606
                    break;
4607
                case 0x16: /* stha, store halfword alternate */
4608
#ifndef TARGET_SPARC64
4609
                    if (IS_IMM)
4610
                        goto illegal_insn;
4611
                    if (!supervisor(dc))
4612
                        goto priv_insn;
4613
#endif
4614
                    save_state(dc, cpu_cond);
4615
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4616
                    dc->npc = DYNAMIC_PC;
4617
                    break;
4618
                case 0x17: /* stda, store double word alternate */
4619
#ifndef TARGET_SPARC64
4620
                    if (IS_IMM)
4621
                        goto illegal_insn;
4622
                    if (!supervisor(dc))
4623
                        goto priv_insn;
4624
#endif
4625
                    if (rd & 1)
4626
                        goto illegal_insn;
4627
                    else {
4628
                        save_state(dc, cpu_cond);
4629
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4630
                    }
4631
                    break;
4632
#endif
4633
#ifdef TARGET_SPARC64
4634
                case 0x0e: /* V9 stx */
4635
                    gen_address_mask(dc, cpu_addr);
4636
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4637
                    break;
4638
                case 0x1e: /* V9 stxa */
4639
                    save_state(dc, cpu_cond);
4640
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4641
                    dc->npc = DYNAMIC_PC;
4642
                    break;
4643
#endif
4644
                default:
4645
                    goto illegal_insn;
4646
                }
4647
            } else if (xop > 0x23 && xop < 0x28) {
4648
                if (gen_trap_ifnofpu(dc, cpu_cond))
4649
                    goto jmp_insn;
4650
                save_state(dc, cpu_cond);
4651
                switch (xop) {
4652
                case 0x24: /* stf, store fpreg */
4653
                    gen_address_mask(dc, cpu_addr);
4654
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4655
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4656
                    break;
4657
                case 0x25: /* stfsr, V9 stxfsr */
4658
#ifdef TARGET_SPARC64
4659
                    gen_address_mask(dc, cpu_addr);
4660
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4661
                    if (rd == 1)
4662
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4663
                    else
4664
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4665
#else
4666
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4667
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4668
#endif
4669
                    break;
4670
                case 0x26:
4671
#ifdef TARGET_SPARC64
4672
                    /* V9 stqf, store quad fpreg */
4673
                    {
4674
                        TCGv_i32 r_const;
4675

    
4676
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4677
                        gen_op_load_fpr_QT0(QFPREG(rd));
4678
                        r_const = tcg_const_i32(dc->mem_idx);
4679
                        gen_address_mask(dc, cpu_addr);
4680
                        gen_helper_stqf(cpu_addr, r_const);
4681
                        tcg_temp_free_i32(r_const);
4682
                    }
4683
                    break;
4684
#else /* !TARGET_SPARC64 */
4685
                    /* stdfq, store floating point queue */
4686
#if defined(CONFIG_USER_ONLY)
4687
                    goto illegal_insn;
4688
#else
4689
                    if (!supervisor(dc))
4690
                        goto priv_insn;
4691
                    if (gen_trap_ifnofpu(dc, cpu_cond))
4692
                        goto jmp_insn;
4693
                    goto nfq_insn;
4694
#endif
4695
#endif
4696
                case 0x27: /* stdf, store double fpreg */
4697
                    {
4698
                        TCGv_i32 r_const;
4699

    
4700
                        gen_op_load_fpr_DT0(DFPREG(rd));
4701
                        r_const = tcg_const_i32(dc->mem_idx);
4702
                        gen_address_mask(dc, cpu_addr);
4703
                        gen_helper_stdf(cpu_addr, r_const);
4704
                        tcg_temp_free_i32(r_const);
4705
                    }
4706
                    break;
4707
                default:
4708
                    goto illegal_insn;
4709
                }
4710
            } else if (xop > 0x33 && xop < 0x3f) {
4711
                save_state(dc, cpu_cond);
4712
                switch (xop) {
4713
#ifdef TARGET_SPARC64
4714
                case 0x34: /* V9 stfa */
4715
                    gen_stf_asi(cpu_addr, insn, 4, rd);
4716
                    break;
4717
                case 0x36: /* V9 stqfa */
4718
                    {
4719
                        TCGv_i32 r_const;
4720

    
4721
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4722
                        r_const = tcg_const_i32(7);
4723
                        gen_helper_check_align(cpu_addr, r_const);
4724
                        tcg_temp_free_i32(r_const);
4725
                        gen_op_load_fpr_QT0(QFPREG(rd));
4726
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4727
                    }
4728
                    break;
4729
                case 0x37: /* V9 stdfa */
4730
                    gen_op_load_fpr_DT0(DFPREG(rd));
4731
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4732
                    break;
4733
                case 0x3c: /* V9 casa */
4734
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4735
                    gen_movl_TN_reg(rd, cpu_val);
4736
                    break;
4737
                case 0x3e: /* V9 casxa */
4738
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4739
                    gen_movl_TN_reg(rd, cpu_val);
4740
                    break;
4741
#else
4742
                case 0x34: /* stc */
4743
                case 0x35: /* stcsr */
4744
                case 0x36: /* stdcq */
4745
                case 0x37: /* stdc */
4746
                    goto ncp_insn;
4747
#endif
4748
                default:
4749
                    goto illegal_insn;
4750
                }
4751
            } else
4752
                goto illegal_insn;
4753
        }
4754
        break;
4755
    }
4756
    /* default case for non jump instructions */
4757
    if (dc->npc == DYNAMIC_PC) {
4758
        dc->pc = DYNAMIC_PC;
4759
        gen_op_next_insn();
4760
    } else if (dc->npc == JUMP_PC) {
4761
        /* we can do a static jump */
4762
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4763
        dc->is_br = 1;
4764
    } else {
4765
        dc->pc = dc->npc;
4766
        dc->npc = dc->npc + 4;
4767
    }
4768
 jmp_insn:
4769
    goto egress;
4770
 illegal_insn:
4771
    {
4772
        TCGv_i32 r_const;
4773

    
4774
        save_state(dc, cpu_cond);
4775
        r_const = tcg_const_i32(TT_ILL_INSN);
4776
        gen_helper_raise_exception(r_const);
4777
        tcg_temp_free_i32(r_const);
4778
        dc->is_br = 1;
4779
    }
4780
    goto egress;
4781
 unimp_flush:
4782
    {
4783
        TCGv_i32 r_const;
4784

    
4785
        save_state(dc, cpu_cond);
4786
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4787
        gen_helper_raise_exception(r_const);
4788
        tcg_temp_free_i32(r_const);
4789
        dc->is_br = 1;
4790
    }
4791
    goto egress;
4792
#if !defined(CONFIG_USER_ONLY)
4793
 priv_insn:
4794
    {
4795
        TCGv_i32 r_const;
4796

    
4797
        save_state(dc, cpu_cond);
4798
        r_const = tcg_const_i32(TT_PRIV_INSN);
4799
        gen_helper_raise_exception(r_const);
4800
        tcg_temp_free_i32(r_const);
4801
        dc->is_br = 1;
4802
    }
4803
    goto egress;
4804
#endif
4805
 nfpu_insn:
4806
    save_state(dc, cpu_cond);
4807
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4808
    dc->is_br = 1;
4809
    goto egress;
4810
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4811
 nfq_insn:
4812
    save_state(dc, cpu_cond);
4813
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4814
    dc->is_br = 1;
4815
    goto egress;
4816
#endif
4817
#ifndef TARGET_SPARC64
4818
 ncp_insn:
4819
    {
4820
        TCGv r_const;
4821

    
4822
        save_state(dc, cpu_cond);
4823
        r_const = tcg_const_i32(TT_NCP_INSN);
4824
        gen_helper_raise_exception(r_const);
4825
        tcg_temp_free(r_const);
4826
        dc->is_br = 1;
4827
    }
4828
    goto egress;
4829
#endif
4830
 egress:
4831
    tcg_temp_free(cpu_tmp1);
4832
    tcg_temp_free(cpu_tmp2);
4833
}
4834

    
4835
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4836
                                                  int spc, CPUSPARCState *env)
4837
{
4838
    target_ulong pc_start, last_pc;
4839
    uint16_t *gen_opc_end;
4840
    DisasContext dc1, *dc = &dc1;
4841
    CPUBreakpoint *bp;
4842
    int j, lj = -1;
4843
    int num_insns;
4844
    int max_insns;
4845

    
4846
    memset(dc, 0, sizeof(DisasContext));
4847
    dc->tb = tb;
4848
    pc_start = tb->pc;
4849
    dc->pc = pc_start;
4850
    last_pc = dc->pc;
4851
    dc->npc = (target_ulong) tb->cs_base;
4852
    dc->cc_op = CC_OP_DYNAMIC;
4853
    dc->mem_idx = cpu_mmu_index(env);
4854
    dc->def = env->def;
4855
    if ((dc->def->features & CPU_FEATURE_FLOAT))
4856
        dc->fpu_enabled = cpu_fpu_enabled(env);
4857
    else
4858
        dc->fpu_enabled = 0;
4859
#ifdef TARGET_SPARC64
4860
    dc->address_mask_32bit = env->pstate & PS_AM;
4861
#endif
4862
    dc->singlestep = (env->singlestep_enabled || singlestep);
4863
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4864

    
4865
    cpu_tmp0 = tcg_temp_new();
4866
    cpu_tmp32 = tcg_temp_new_i32();
4867
    cpu_tmp64 = tcg_temp_new_i64();
4868

    
4869
    cpu_dst = tcg_temp_local_new();
4870

    
4871
    // loads and stores
4872
    cpu_val = tcg_temp_local_new();
4873
    cpu_addr = tcg_temp_local_new();
4874

    
4875
    num_insns = 0;
4876
    max_insns = tb->cflags & CF_COUNT_MASK;
4877
    if (max_insns == 0)
4878
        max_insns = CF_COUNT_MASK;
4879
    gen_icount_start();
4880
    do {
4881
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4882
            QTAILQ_FOREACH(bp, &