Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ e87b7cb0

History | View | Annotate | Download (190.7 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20

    
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26

    
27
#include "cpu.h"
28
#include "exec-all.h"
29
#include "disas.h"
30
#include "helper.h"
31
#include "tcg-op.h"
32

    
33
#define GEN_HELPER 1
34
#include "helper.h"
35

    
36
#define DEBUG_DISAS
37

    
38
#define DYNAMIC_PC  1 /* dynamic pc value */
39
#define JUMP_PC     2 /* dynamic pc value which takes only two values
40
                         according to jump_pc[T2] */
41

    
42
/* global register indexes */
43
static TCGv_ptr cpu_env, cpu_regwptr;
44
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45
static TCGv_i32 cpu_cc_op;
46
static TCGv_i32 cpu_psr;
47
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48
static TCGv cpu_y;
49
#ifndef CONFIG_USER_ONLY
50
static TCGv cpu_tbr;
51
#endif
52
static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53
#ifdef TARGET_SPARC64
54
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55
static TCGv cpu_gsr;
56
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58
static TCGv_i32 cpu_softint;
59
#else
60
static TCGv cpu_wim;
61
#endif
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0;
64
static TCGv_i32 cpu_tmp32;
65
static TCGv_i64 cpu_tmp64;
66
/* Floating point registers */
67
static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68

    
69
static target_ulong gen_opc_npc[OPC_BUF_SIZE];
70
static target_ulong gen_opc_jump_pc[2];
71

    
72
#include "gen-icount.h"
73

    
74
typedef struct DisasContext {
75
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
76
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78
    int is_br;
79
    int mem_idx;
80
    int fpu_enabled;
81
    int address_mask_32bit;
82
    int singlestep;
83
    uint32_t cc_op;  /* current CC operation */
84
    struct TranslationBlock *tb;
85
    sparc_def_t *def;
86
} DisasContext;
87

    
88
// This function uses non-native bit order
89
#define GET_FIELD(X, FROM, TO)                                  \
90
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91

    
92
// This function uses the order in the manuals, i.e. bit 0 is 2^0
93
#define GET_FIELD_SP(X, FROM, TO)               \
94
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
95

    
96
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
98

    
99
#ifdef TARGET_SPARC64
100
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102
#else
103
#define DFPREG(r) (r & 0x1e)
104
#define QFPREG(r) (r & 0x1c)
105
#endif
106

    
107
#define UA2005_HTRAP_MASK 0xff
108
#define V8_TRAP_MASK 0x7f
109

    
110
static int sign_extend(int x, int len)
111
{
112
    len = 32 - len;
113
    return (x << len) >> len;
114
}
115

    
116
#define IS_IMM (insn & (1<<13))
117

    
118
/* floating point registers moves */
119
static void gen_op_load_fpr_DT0(unsigned int src)
120
{
121
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
122
                   offsetof(CPU_DoubleU, l.upper));
123
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
124
                   offsetof(CPU_DoubleU, l.lower));
125
}
126

    
127
static void gen_op_load_fpr_DT1(unsigned int src)
128
{
129
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
130
                   offsetof(CPU_DoubleU, l.upper));
131
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
132
                   offsetof(CPU_DoubleU, l.lower));
133
}
134

    
135
static void gen_op_store_DT0_fpr(unsigned int dst)
136
{
137
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
138
                   offsetof(CPU_DoubleU, l.upper));
139
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
140
                   offsetof(CPU_DoubleU, l.lower));
141
}
142

    
143
static void gen_op_load_fpr_QT0(unsigned int src)
144
{
145
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
146
                   offsetof(CPU_QuadU, l.upmost));
147
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
148
                   offsetof(CPU_QuadU, l.upper));
149
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
150
                   offsetof(CPU_QuadU, l.lower));
151
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
152
                   offsetof(CPU_QuadU, l.lowest));
153
}
154

    
155
static void gen_op_load_fpr_QT1(unsigned int src)
156
{
157
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
158
                   offsetof(CPU_QuadU, l.upmost));
159
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
160
                   offsetof(CPU_QuadU, l.upper));
161
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
162
                   offsetof(CPU_QuadU, l.lower));
163
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
164
                   offsetof(CPU_QuadU, l.lowest));
165
}
166

    
167
static void gen_op_store_QT0_fpr(unsigned int dst)
168
{
169
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
170
                   offsetof(CPU_QuadU, l.upmost));
171
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
172
                   offsetof(CPU_QuadU, l.upper));
173
    tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
174
                   offsetof(CPU_QuadU, l.lower));
175
    tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
176
                   offsetof(CPU_QuadU, l.lowest));
177
}
178

    
179
/* moves */
180
#ifdef CONFIG_USER_ONLY
181
#define supervisor(dc) 0
182
#ifdef TARGET_SPARC64
183
#define hypervisor(dc) 0
184
#endif
185
#else
186
#define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
187
#ifdef TARGET_SPARC64
188
#define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
189
#else
190
#endif
191
#endif
192

    
193
#ifdef TARGET_SPARC64
194
#ifndef TARGET_ABI32
195
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
196
#else
197
#define AM_CHECK(dc) (1)
198
#endif
199
#endif
200

    
201
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
202
{
203
#ifdef TARGET_SPARC64
204
    if (AM_CHECK(dc))
205
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
206
#endif
207
}
208

    
209
static inline void gen_movl_reg_TN(int reg, TCGv tn)
210
{
211
    if (reg == 0)
212
        tcg_gen_movi_tl(tn, 0);
213
    else if (reg < 8)
214
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
215
    else {
216
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
217
    }
218
}
219

    
220
static inline void gen_movl_TN_reg(int reg, TCGv tn)
221
{
222
    if (reg == 0)
223
        return;
224
    else if (reg < 8)
225
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
226
    else {
227
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
228
    }
229
}
230

    
231
static inline void gen_goto_tb(DisasContext *s, int tb_num,
232
                               target_ulong pc, target_ulong npc)
233
{
234
    TranslationBlock *tb;
235

    
236
    tb = s->tb;
237
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
239
        !s->singlestep)  {
240
        /* jump to same page: we can use a direct jump */
241
        tcg_gen_goto_tb(tb_num);
242
        tcg_gen_movi_tl(cpu_pc, pc);
243
        tcg_gen_movi_tl(cpu_npc, npc);
244
        tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
245
    } else {
246
        /* jump to another page: currently not optimized */
247
        tcg_gen_movi_tl(cpu_pc, pc);
248
        tcg_gen_movi_tl(cpu_npc, npc);
249
        tcg_gen_exit_tb(0);
250
    }
251
}
252

    
253
// XXX suboptimal
254
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
255
{
256
    tcg_gen_extu_i32_tl(reg, src);
257
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
258
    tcg_gen_andi_tl(reg, reg, 0x1);
259
}
260

    
261
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
262
{
263
    tcg_gen_extu_i32_tl(reg, src);
264
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
265
    tcg_gen_andi_tl(reg, reg, 0x1);
266
}
267

    
268
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
269
{
270
    tcg_gen_extu_i32_tl(reg, src);
271
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
272
    tcg_gen_andi_tl(reg, reg, 0x1);
273
}
274

    
275
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
276
{
277
    tcg_gen_extu_i32_tl(reg, src);
278
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
279
    tcg_gen_andi_tl(reg, reg, 0x1);
280
}
281

    
282
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
283
{
284
    TCGv r_temp;
285
    TCGv_i32 r_const;
286
    int l1;
287

    
288
    l1 = gen_new_label();
289

    
290
    r_temp = tcg_temp_new();
291
    tcg_gen_xor_tl(r_temp, src1, src2);
292
    tcg_gen_not_tl(r_temp, r_temp);
293
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
294
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
295
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
296
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
297
    r_const = tcg_const_i32(TT_TOVF);
298
    gen_helper_raise_exception(r_const);
299
    tcg_temp_free_i32(r_const);
300
    gen_set_label(l1);
301
    tcg_temp_free(r_temp);
302
}
303

    
304
static inline void gen_tag_tv(TCGv src1, TCGv src2)
305
{
306
    int l1;
307
    TCGv_i32 r_const;
308

    
309
    l1 = gen_new_label();
310
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
311
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
312
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
313
    r_const = tcg_const_i32(TT_TOVF);
314
    gen_helper_raise_exception(r_const);
315
    tcg_temp_free_i32(r_const);
316
    gen_set_label(l1);
317
}
318

    
319
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
320
{
321
    tcg_gen_mov_tl(cpu_cc_src, src1);
322
    tcg_gen_movi_tl(cpu_cc_src2, src2);
323
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
324
    tcg_gen_mov_tl(dst, cpu_cc_dst);
325
}
326

    
327
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
328
{
329
    tcg_gen_mov_tl(cpu_cc_src, src1);
330
    tcg_gen_mov_tl(cpu_cc_src2, src2);
331
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
332
    tcg_gen_mov_tl(dst, cpu_cc_dst);
333
}
334

    
335
static TCGv_i32 gen_add32_carry32(void)
336
{
337
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
338

    
339
    /* Carry is computed from a previous add: (dst < src)  */
340
#if TARGET_LONG_BITS == 64
341
    cc_src1_32 = tcg_temp_new_i32();
342
    cc_src2_32 = tcg_temp_new_i32();
343
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
344
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
345
#else
346
    cc_src1_32 = cpu_cc_dst;
347
    cc_src2_32 = cpu_cc_src;
348
#endif
349

    
350
    carry_32 = tcg_temp_new_i32();
351
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
352

    
353
#if TARGET_LONG_BITS == 64
354
    tcg_temp_free_i32(cc_src1_32);
355
    tcg_temp_free_i32(cc_src2_32);
356
#endif
357

    
358
    return carry_32;
359
}
360

    
361
static TCGv_i32 gen_sub32_carry32(void)
362
{
363
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
364

    
365
    /* Carry is computed from a previous borrow: (src1 < src2)  */
366
#if TARGET_LONG_BITS == 64
367
    cc_src1_32 = tcg_temp_new_i32();
368
    cc_src2_32 = tcg_temp_new_i32();
369
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
370
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
371
#else
372
    cc_src1_32 = cpu_cc_src;
373
    cc_src2_32 = cpu_cc_src2;
374
#endif
375

    
376
    carry_32 = tcg_temp_new_i32();
377
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
378

    
379
#if TARGET_LONG_BITS == 64
380
    tcg_temp_free_i32(cc_src1_32);
381
    tcg_temp_free_i32(cc_src2_32);
382
#endif
383

    
384
    return carry_32;
385
}
386

    
387
static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
388
                            TCGv src2, int update_cc)
389
{
390
    TCGv_i32 carry_32;
391
    TCGv carry;
392

    
393
    switch (dc->cc_op) {
394
    case CC_OP_DIV:
395
    case CC_OP_LOGIC:
396
        /* Carry is known to be zero.  Fall back to plain ADD.  */
397
        if (update_cc) {
398
            gen_op_add_cc(dst, src1, src2);
399
        } else {
400
            tcg_gen_add_tl(dst, src1, src2);
401
        }
402
        return;
403

    
404
    case CC_OP_ADD:
405
    case CC_OP_TADD:
406
    case CC_OP_TADDTV:
407
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
408
        {
409
            /* For 32-bit hosts, we can re-use the host's hardware carry
410
               generation by using an ADD2 opcode.  We discard the low
411
               part of the output.  Ideally we'd combine this operation
412
               with the add that generated the carry in the first place.  */
413
            TCGv dst_low = tcg_temp_new();
414
            tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
415
                            cpu_cc_src, src1, cpu_cc_src2, src2);
416
            tcg_temp_free(dst_low);
417
            goto add_done;
418
        }
419
#endif
420
        carry_32 = gen_add32_carry32();
421
        break;
422

    
423
    case CC_OP_SUB:
424
    case CC_OP_TSUB:
425
    case CC_OP_TSUBTV:
426
        carry_32 = gen_sub32_carry32();
427
        break;
428

    
429
    default:
430
        /* We need external help to produce the carry.  */
431
        carry_32 = tcg_temp_new_i32();
432
        gen_helper_compute_C_icc(carry_32);
433
        break;
434
    }
435

    
436
#if TARGET_LONG_BITS == 64
437
    carry = tcg_temp_new();
438
    tcg_gen_extu_i32_i64(carry, carry_32);
439
#else
440
    carry = carry_32;
441
#endif
442

    
443
    tcg_gen_add_tl(dst, src1, src2);
444
    tcg_gen_add_tl(dst, dst, carry);
445

    
446
    tcg_temp_free_i32(carry_32);
447
#if TARGET_LONG_BITS == 64
448
    tcg_temp_free(carry);
449
#endif
450

    
451
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
452
 add_done:
453
#endif
454
    if (update_cc) {
455
        tcg_gen_mov_tl(cpu_cc_src, src1);
456
        tcg_gen_mov_tl(cpu_cc_src2, src2);
457
        tcg_gen_mov_tl(cpu_cc_dst, dst);
458
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
459
        dc->cc_op = CC_OP_ADDX;
460
    }
461
}
462

    
463
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
464
{
465
    tcg_gen_mov_tl(cpu_cc_src, src1);
466
    tcg_gen_mov_tl(cpu_cc_src2, src2);
467
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
468
    tcg_gen_mov_tl(dst, cpu_cc_dst);
469
}
470

    
471
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
472
{
473
    tcg_gen_mov_tl(cpu_cc_src, src1);
474
    tcg_gen_mov_tl(cpu_cc_src2, src2);
475
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
476
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
478
    tcg_gen_mov_tl(dst, cpu_cc_dst);
479
}
480

    
481
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
482
{
483
    TCGv r_temp;
484
    TCGv_i32 r_const;
485
    int l1;
486

    
487
    l1 = gen_new_label();
488

    
489
    r_temp = tcg_temp_new();
490
    tcg_gen_xor_tl(r_temp, src1, src2);
491
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
492
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
493
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
494
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
495
    r_const = tcg_const_i32(TT_TOVF);
496
    gen_helper_raise_exception(r_const);
497
    tcg_temp_free_i32(r_const);
498
    gen_set_label(l1);
499
    tcg_temp_free(r_temp);
500
}
501

    
502
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
503
{
504
    tcg_gen_mov_tl(cpu_cc_src, src1);
505
    tcg_gen_movi_tl(cpu_cc_src2, src2);
506
    if (src2 == 0) {
507
        tcg_gen_mov_tl(cpu_cc_dst, src1);
508
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
509
        dc->cc_op = CC_OP_LOGIC;
510
    } else {
511
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
512
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
513
        dc->cc_op = CC_OP_SUB;
514
    }
515
    tcg_gen_mov_tl(dst, cpu_cc_dst);
516
}
517

    
518
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
519
{
520
    tcg_gen_mov_tl(cpu_cc_src, src1);
521
    tcg_gen_mov_tl(cpu_cc_src2, src2);
522
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
523
    tcg_gen_mov_tl(dst, cpu_cc_dst);
524
}
525

    
526
static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
527
                            TCGv src2, int update_cc)
528
{
529
    TCGv_i32 carry_32;
530
    TCGv carry;
531

    
532
    switch (dc->cc_op) {
533
    case CC_OP_DIV:
534
    case CC_OP_LOGIC:
535
        /* Carry is known to be zero.  Fall back to plain SUB.  */
536
        if (update_cc) {
537
            gen_op_sub_cc(dst, src1, src2);
538
        } else {
539
            tcg_gen_sub_tl(dst, src1, src2);
540
        }
541
        return;
542

    
543
    case CC_OP_ADD:
544
    case CC_OP_TADD:
545
    case CC_OP_TADDTV:
546
        carry_32 = gen_add32_carry32();
547
        break;
548

    
549
    case CC_OP_SUB:
550
    case CC_OP_TSUB:
551
    case CC_OP_TSUBTV:
552
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
553
        {
554
            /* For 32-bit hosts, we can re-use the host's hardware carry
555
               generation by using a SUB2 opcode.  We discard the low
556
               part of the output.  Ideally we'd combine this operation
557
               with the add that generated the carry in the first place.  */
558
            TCGv dst_low = tcg_temp_new();
559
            tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
560
                            cpu_cc_src, src1, cpu_cc_src2, src2);
561
            tcg_temp_free(dst_low);
562
            goto sub_done;
563
        }
564
#endif
565
        carry_32 = gen_sub32_carry32();
566
        break;
567

    
568
    default:
569
        /* We need external help to produce the carry.  */
570
        carry_32 = tcg_temp_new_i32();
571
        gen_helper_compute_C_icc(carry_32);
572
        break;
573
    }
574

    
575
#if TARGET_LONG_BITS == 64
576
    carry = tcg_temp_new();
577
    tcg_gen_extu_i32_i64(carry, carry_32);
578
#else
579
    carry = carry_32;
580
#endif
581

    
582
    tcg_gen_sub_tl(dst, src1, src2);
583
    tcg_gen_sub_tl(dst, dst, carry);
584

    
585
    tcg_temp_free_i32(carry_32);
586
#if TARGET_LONG_BITS == 64
587
    tcg_temp_free(carry);
588
#endif
589

    
590
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
591
 sub_done:
592
#endif
593
    if (update_cc) {
594
        tcg_gen_mov_tl(cpu_cc_src, src1);
595
        tcg_gen_mov_tl(cpu_cc_src2, src2);
596
        tcg_gen_mov_tl(cpu_cc_dst, dst);
597
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
598
        dc->cc_op = CC_OP_SUBX;
599
    }
600
}
601

    
602
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
603
{
604
    tcg_gen_mov_tl(cpu_cc_src, src1);
605
    tcg_gen_mov_tl(cpu_cc_src2, src2);
606
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
607
    tcg_gen_mov_tl(dst, cpu_cc_dst);
608
}
609

    
610
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
611
{
612
    tcg_gen_mov_tl(cpu_cc_src, src1);
613
    tcg_gen_mov_tl(cpu_cc_src2, src2);
614
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
615
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
617
    tcg_gen_mov_tl(dst, cpu_cc_dst);
618
}
619

    
620
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
621
{
622
    TCGv r_temp;
623
    int l1;
624

    
625
    l1 = gen_new_label();
626
    r_temp = tcg_temp_new();
627

    
628
    /* old op:
629
    if (!(env->y & 1))
630
        T1 = 0;
631
    */
632
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
633
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
634
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
635
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
636
    tcg_gen_movi_tl(cpu_cc_src2, 0);
637
    gen_set_label(l1);
638

    
639
    // b2 = T0 & 1;
640
    // env->y = (b2 << 31) | (env->y >> 1);
641
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
642
    tcg_gen_shli_tl(r_temp, r_temp, 31);
643
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
644
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
645
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
646
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
647

    
648
    // b1 = N ^ V;
649
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
650
    gen_mov_reg_V(r_temp, cpu_psr);
651
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
652
    tcg_temp_free(r_temp);
653

    
654
    // T0 = (b1 << 31) | (T0 >> 1);
655
    // src1 = T0;
656
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
657
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
658
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
659

    
660
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
661

    
662
    tcg_gen_mov_tl(dst, cpu_cc_dst);
663
}
664

    
665
static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
666
{
667
    TCGv_i32 r_src1, r_src2;
668
    TCGv_i64 r_temp, r_temp2;
669

    
670
    r_src1 = tcg_temp_new_i32();
671
    r_src2 = tcg_temp_new_i32();
672

    
673
    tcg_gen_trunc_tl_i32(r_src1, src1);
674
    tcg_gen_trunc_tl_i32(r_src2, src2);
675

    
676
    r_temp = tcg_temp_new_i64();
677
    r_temp2 = tcg_temp_new_i64();
678

    
679
    if (sign_ext) {
680
        tcg_gen_ext_i32_i64(r_temp, r_src2);
681
        tcg_gen_ext_i32_i64(r_temp2, r_src1);
682
    } else {
683
        tcg_gen_extu_i32_i64(r_temp, r_src2);
684
        tcg_gen_extu_i32_i64(r_temp2, r_src1);
685
    }
686

    
687
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
688

    
689
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
690
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
691
    tcg_temp_free_i64(r_temp);
692
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
693

    
694
    tcg_gen_trunc_i64_tl(dst, r_temp2);
695

    
696
    tcg_temp_free_i64(r_temp2);
697

    
698
    tcg_temp_free_i32(r_src1);
699
    tcg_temp_free_i32(r_src2);
700
}
701

    
702
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
703
{
704
    /* zero-extend truncated operands before multiplication */
705
    gen_op_multiply(dst, src1, src2, 0);
706
}
707

    
708
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
709
{
710
    /* sign-extend truncated operands before multiplication */
711
    gen_op_multiply(dst, src1, src2, 1);
712
}
713

    
714
#ifdef TARGET_SPARC64
715
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
716
{
717
    TCGv_i32 r_const;
718
    int l1;
719

    
720
    l1 = gen_new_label();
721
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
722
    r_const = tcg_const_i32(TT_DIV_ZERO);
723
    gen_helper_raise_exception(r_const);
724
    tcg_temp_free_i32(r_const);
725
    gen_set_label(l1);
726
}
727

    
728
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
729
{
730
    int l1, l2;
731

    
732
    l1 = gen_new_label();
733
    l2 = gen_new_label();
734
    tcg_gen_mov_tl(cpu_cc_src, src1);
735
    tcg_gen_mov_tl(cpu_cc_src2, src2);
736
    gen_trap_ifdivzero_tl(cpu_cc_src2);
737
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
738
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
739
    tcg_gen_movi_i64(dst, INT64_MIN);
740
    tcg_gen_br(l2);
741
    gen_set_label(l1);
742
    tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
743
    gen_set_label(l2);
744
}
745
#endif
746

    
747
// 1
748
static inline void gen_op_eval_ba(TCGv dst)
749
{
750
    tcg_gen_movi_tl(dst, 1);
751
}
752

    
753
// Z
754
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
755
{
756
    gen_mov_reg_Z(dst, src);
757
}
758

    
759
// Z | (N ^ V)
760
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
761
{
762
    gen_mov_reg_N(cpu_tmp0, src);
763
    gen_mov_reg_V(dst, src);
764
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
765
    gen_mov_reg_Z(cpu_tmp0, src);
766
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
767
}
768

    
769
// N ^ V
770
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
771
{
772
    gen_mov_reg_V(cpu_tmp0, src);
773
    gen_mov_reg_N(dst, src);
774
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
775
}
776

    
777
// C | Z
778
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
779
{
780
    gen_mov_reg_Z(cpu_tmp0, src);
781
    gen_mov_reg_C(dst, src);
782
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
783
}
784

    
785
// C
786
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
787
{
788
    gen_mov_reg_C(dst, src);
789
}
790

    
791
// V
792
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
793
{
794
    gen_mov_reg_V(dst, src);
795
}
796

    
797
// 0
798
static inline void gen_op_eval_bn(TCGv dst)
799
{
800
    tcg_gen_movi_tl(dst, 0);
801
}
802

    
803
// N
804
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
805
{
806
    gen_mov_reg_N(dst, src);
807
}
808

    
809
// !Z
810
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
811
{
812
    gen_mov_reg_Z(dst, src);
813
    tcg_gen_xori_tl(dst, dst, 0x1);
814
}
815

    
816
// !(Z | (N ^ V))
817
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
818
{
819
    gen_mov_reg_N(cpu_tmp0, src);
820
    gen_mov_reg_V(dst, src);
821
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
822
    gen_mov_reg_Z(cpu_tmp0, src);
823
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
824
    tcg_gen_xori_tl(dst, dst, 0x1);
825
}
826

    
827
// !(N ^ V)
828
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
829
{
830
    gen_mov_reg_V(cpu_tmp0, src);
831
    gen_mov_reg_N(dst, src);
832
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
833
    tcg_gen_xori_tl(dst, dst, 0x1);
834
}
835

    
836
// !(C | Z)
837
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
838
{
839
    gen_mov_reg_Z(cpu_tmp0, src);
840
    gen_mov_reg_C(dst, src);
841
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
842
    tcg_gen_xori_tl(dst, dst, 0x1);
843
}
844

    
845
// !C
846
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
847
{
848
    gen_mov_reg_C(dst, src);
849
    tcg_gen_xori_tl(dst, dst, 0x1);
850
}
851

    
852
// !N
853
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
854
{
855
    gen_mov_reg_N(dst, src);
856
    tcg_gen_xori_tl(dst, dst, 0x1);
857
}
858

    
859
// !V
860
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
861
{
862
    gen_mov_reg_V(dst, src);
863
    tcg_gen_xori_tl(dst, dst, 0x1);
864
}
865

    
866
/*
867
  FPSR bit field FCC1 | FCC0:
868
   0 =
869
   1 <
870
   2 >
871
   3 unordered
872
*/
873
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
874
                                    unsigned int fcc_offset)
875
{
876
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
877
    tcg_gen_andi_tl(reg, reg, 0x1);
878
}
879

    
880
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
881
                                    unsigned int fcc_offset)
882
{
883
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
884
    tcg_gen_andi_tl(reg, reg, 0x1);
885
}
886

    
887
// !0: FCC0 | FCC1
888
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
889
                                    unsigned int fcc_offset)
890
{
891
    gen_mov_reg_FCC0(dst, src, fcc_offset);
892
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
893
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
894
}
895

    
896
// 1 or 2: FCC0 ^ FCC1
897
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
898
                                    unsigned int fcc_offset)
899
{
900
    gen_mov_reg_FCC0(dst, src, fcc_offset);
901
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
902
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
903
}
904

    
905
// 1 or 3: FCC0
906
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
907
                                    unsigned int fcc_offset)
908
{
909
    gen_mov_reg_FCC0(dst, src, fcc_offset);
910
}
911

    
912
// 1: FCC0 & !FCC1
913
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
914
                                    unsigned int fcc_offset)
915
{
916
    gen_mov_reg_FCC0(dst, src, fcc_offset);
917
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
918
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
919
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
920
}
921

    
922
// 2 or 3: FCC1
923
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
924
                                    unsigned int fcc_offset)
925
{
926
    gen_mov_reg_FCC1(dst, src, fcc_offset);
927
}
928

    
929
// 2: !FCC0 & FCC1
930
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
931
                                    unsigned int fcc_offset)
932
{
933
    gen_mov_reg_FCC0(dst, src, fcc_offset);
934
    tcg_gen_xori_tl(dst, dst, 0x1);
935
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
936
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
937
}
938

    
939
// 3: FCC0 & FCC1
940
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
941
                                    unsigned int fcc_offset)
942
{
943
    gen_mov_reg_FCC0(dst, src, fcc_offset);
944
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
945
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
946
}
947

    
948
// 0: !(FCC0 | FCC1)
949
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
950
                                    unsigned int fcc_offset)
951
{
952
    gen_mov_reg_FCC0(dst, src, fcc_offset);
953
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
954
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
955
    tcg_gen_xori_tl(dst, dst, 0x1);
956
}
957

    
958
// 0 or 3: !(FCC0 ^ FCC1)
959
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
960
                                    unsigned int fcc_offset)
961
{
962
    gen_mov_reg_FCC0(dst, src, fcc_offset);
963
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
964
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
965
    tcg_gen_xori_tl(dst, dst, 0x1);
966
}
967

    
968
// 0 or 2: !FCC0
969
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
970
                                    unsigned int fcc_offset)
971
{
972
    gen_mov_reg_FCC0(dst, src, fcc_offset);
973
    tcg_gen_xori_tl(dst, dst, 0x1);
974
}
975

    
976
// !1: !(FCC0 & !FCC1)
977
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
978
                                    unsigned int fcc_offset)
979
{
980
    gen_mov_reg_FCC0(dst, src, fcc_offset);
981
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
982
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
983
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
984
    tcg_gen_xori_tl(dst, dst, 0x1);
985
}
986

    
987
// 0 or 1: !FCC1
988
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
989
                                    unsigned int fcc_offset)
990
{
991
    gen_mov_reg_FCC1(dst, src, fcc_offset);
992
    tcg_gen_xori_tl(dst, dst, 0x1);
993
}
994

    
995
// !2: !(!FCC0 & FCC1)
996
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
997
                                    unsigned int fcc_offset)
998
{
999
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1000
    tcg_gen_xori_tl(dst, dst, 0x1);
1001
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1002
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1003
    tcg_gen_xori_tl(dst, dst, 0x1);
1004
}
1005

    
1006
// !3: !(FCC0 & FCC1)
1007
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1008
                                    unsigned int fcc_offset)
1009
{
1010
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1011
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1012
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1013
    tcg_gen_xori_tl(dst, dst, 0x1);
1014
}
1015

    
1016
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1017
                               target_ulong pc2, TCGv r_cond)
1018
{
1019
    int l1;
1020

    
1021
    l1 = gen_new_label();
1022

    
1023
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1024

    
1025
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
1026

    
1027
    gen_set_label(l1);
1028
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
1029
}
1030

    
1031
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1032
                                target_ulong pc2, TCGv r_cond)
1033
{
1034
    int l1;
1035

    
1036
    l1 = gen_new_label();
1037

    
1038
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1039

    
1040
    gen_goto_tb(dc, 0, pc2, pc1);
1041

    
1042
    gen_set_label(l1);
1043
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1044
}
1045

    
1046
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1047
                                      TCGv r_cond)
1048
{
1049
    int l1, l2;
1050

    
1051
    l1 = gen_new_label();
1052
    l2 = gen_new_label();
1053

    
1054
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1055

    
1056
    tcg_gen_movi_tl(cpu_npc, npc1);
1057
    tcg_gen_br(l2);
1058

    
1059
    gen_set_label(l1);
1060
    tcg_gen_movi_tl(cpu_npc, npc2);
1061
    gen_set_label(l2);
1062
}
1063

    
1064
/* call this function before using the condition register as it may
1065
   have been set for a jump */
1066
static inline void flush_cond(DisasContext *dc, TCGv cond)
1067
{
1068
    if (dc->npc == JUMP_PC) {
1069
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1070
        dc->npc = DYNAMIC_PC;
1071
    }
1072
}
1073

    
1074
static inline void save_npc(DisasContext *dc, TCGv cond)
1075
{
1076
    if (dc->npc == JUMP_PC) {
1077
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1078
        dc->npc = DYNAMIC_PC;
1079
    } else if (dc->npc != DYNAMIC_PC) {
1080
        tcg_gen_movi_tl(cpu_npc, dc->npc);
1081
    }
1082
}
1083

    
1084
static inline void save_state(DisasContext *dc, TCGv cond)
1085
{
1086
    tcg_gen_movi_tl(cpu_pc, dc->pc);
1087
    /* flush pending conditional evaluations before exposing cpu state */
1088
    if (dc->cc_op != CC_OP_FLAGS) {
1089
        dc->cc_op = CC_OP_FLAGS;
1090
        gen_helper_compute_psr();
1091
    }
1092
    save_npc(dc, cond);
1093
}
1094

    
1095
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1096
{
1097
    if (dc->npc == JUMP_PC) {
1098
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1099
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1100
        dc->pc = DYNAMIC_PC;
1101
    } else if (dc->npc == DYNAMIC_PC) {
1102
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1103
        dc->pc = DYNAMIC_PC;
1104
    } else {
1105
        dc->pc = dc->npc;
1106
    }
1107
}
1108

    
1109
static inline void gen_op_next_insn(void)
1110
{
1111
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1112
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1113
}
1114

    
1115
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1116
                            DisasContext *dc)
1117
{
1118
    TCGv_i32 r_src;
1119

    
1120
#ifdef TARGET_SPARC64
1121
    if (cc)
1122
        r_src = cpu_xcc;
1123
    else
1124
        r_src = cpu_psr;
1125
#else
1126
    r_src = cpu_psr;
1127
#endif
1128
    switch (dc->cc_op) {
1129
    case CC_OP_FLAGS:
1130
        break;
1131
    default:
1132
        gen_helper_compute_psr();
1133
        dc->cc_op = CC_OP_FLAGS;
1134
        break;
1135
    }
1136
    switch (cond) {
1137
    case 0x0:
1138
        gen_op_eval_bn(r_dst);
1139
        break;
1140
    case 0x1:
1141
        gen_op_eval_be(r_dst, r_src);
1142
        break;
1143
    case 0x2:
1144
        gen_op_eval_ble(r_dst, r_src);
1145
        break;
1146
    case 0x3:
1147
        gen_op_eval_bl(r_dst, r_src);
1148
        break;
1149
    case 0x4:
1150
        gen_op_eval_bleu(r_dst, r_src);
1151
        break;
1152
    case 0x5:
1153
        gen_op_eval_bcs(r_dst, r_src);
1154
        break;
1155
    case 0x6:
1156
        gen_op_eval_bneg(r_dst, r_src);
1157
        break;
1158
    case 0x7:
1159
        gen_op_eval_bvs(r_dst, r_src);
1160
        break;
1161
    case 0x8:
1162
        gen_op_eval_ba(r_dst);
1163
        break;
1164
    case 0x9:
1165
        gen_op_eval_bne(r_dst, r_src);
1166
        break;
1167
    case 0xa:
1168
        gen_op_eval_bg(r_dst, r_src);
1169
        break;
1170
    case 0xb:
1171
        gen_op_eval_bge(r_dst, r_src);
1172
        break;
1173
    case 0xc:
1174
        gen_op_eval_bgu(r_dst, r_src);
1175
        break;
1176
    case 0xd:
1177
        gen_op_eval_bcc(r_dst, r_src);
1178
        break;
1179
    case 0xe:
1180
        gen_op_eval_bpos(r_dst, r_src);
1181
        break;
1182
    case 0xf:
1183
        gen_op_eval_bvc(r_dst, r_src);
1184
        break;
1185
    }
1186
}
1187

    
1188
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1189
{
1190
    unsigned int offset;
1191

    
1192
    switch (cc) {
1193
    default:
1194
    case 0x0:
1195
        offset = 0;
1196
        break;
1197
    case 0x1:
1198
        offset = 32 - 10;
1199
        break;
1200
    case 0x2:
1201
        offset = 34 - 10;
1202
        break;
1203
    case 0x3:
1204
        offset = 36 - 10;
1205
        break;
1206
    }
1207

    
1208
    switch (cond) {
1209
    case 0x0:
1210
        gen_op_eval_bn(r_dst);
1211
        break;
1212
    case 0x1:
1213
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1214
        break;
1215
    case 0x2:
1216
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1217
        break;
1218
    case 0x3:
1219
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1220
        break;
1221
    case 0x4:
1222
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1223
        break;
1224
    case 0x5:
1225
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1226
        break;
1227
    case 0x6:
1228
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1229
        break;
1230
    case 0x7:
1231
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1232
        break;
1233
    case 0x8:
1234
        gen_op_eval_ba(r_dst);
1235
        break;
1236
    case 0x9:
1237
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1238
        break;
1239
    case 0xa:
1240
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1241
        break;
1242
    case 0xb:
1243
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1244
        break;
1245
    case 0xc:
1246
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1247
        break;
1248
    case 0xd:
1249
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1250
        break;
1251
    case 0xe:
1252
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1253
        break;
1254
    case 0xf:
1255
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1256
        break;
1257
    }
1258
}
1259

    
1260
#ifdef TARGET_SPARC64
1261
// Inverted logic
1262
static const int gen_tcg_cond_reg[8] = {
1263
    -1,
1264
    TCG_COND_NE,
1265
    TCG_COND_GT,
1266
    TCG_COND_GE,
1267
    -1,
1268
    TCG_COND_EQ,
1269
    TCG_COND_LE,
1270
    TCG_COND_LT,
1271
};
1272

    
1273
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1274
{
1275
    int l1;
1276

    
1277
    l1 = gen_new_label();
1278
    tcg_gen_movi_tl(r_dst, 0);
1279
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1280
    tcg_gen_movi_tl(r_dst, 1);
1281
    gen_set_label(l1);
1282
}
1283
#endif
1284

    
1285
/* XXX: potentially incorrect if dynamic npc */
1286
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1287
                      TCGv r_cond)
1288
{
1289
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1290
    target_ulong target = dc->pc + offset;
1291

    
1292
    if (cond == 0x0) {
1293
        /* unconditional not taken */
1294
        if (a) {
1295
            dc->pc = dc->npc + 4;
1296
            dc->npc = dc->pc + 4;
1297
        } else {
1298
            dc->pc = dc->npc;
1299
            dc->npc = dc->pc + 4;
1300
        }
1301
    } else if (cond == 0x8) {
1302
        /* unconditional taken */
1303
        if (a) {
1304
            dc->pc = target;
1305
            dc->npc = dc->pc + 4;
1306
        } else {
1307
            dc->pc = dc->npc;
1308
            dc->npc = target;
1309
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1310
        }
1311
    } else {
1312
        flush_cond(dc, r_cond);
1313
        gen_cond(r_cond, cc, cond, dc);
1314
        if (a) {
1315
            gen_branch_a(dc, target, dc->npc, r_cond);
1316
            dc->is_br = 1;
1317
        } else {
1318
            dc->pc = dc->npc;
1319
            dc->jump_pc[0] = target;
1320
            dc->jump_pc[1] = dc->npc + 4;
1321
            dc->npc = JUMP_PC;
1322
        }
1323
    }
1324
}
1325

    
1326
/* XXX: potentially incorrect if dynamic npc */
1327
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1328
                      TCGv r_cond)
1329
{
1330
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1331
    target_ulong target = dc->pc + offset;
1332

    
1333
    if (cond == 0x0) {
1334
        /* unconditional not taken */
1335
        if (a) {
1336
            dc->pc = dc->npc + 4;
1337
            dc->npc = dc->pc + 4;
1338
        } else {
1339
            dc->pc = dc->npc;
1340
            dc->npc = dc->pc + 4;
1341
        }
1342
    } else if (cond == 0x8) {
1343
        /* unconditional taken */
1344
        if (a) {
1345
            dc->pc = target;
1346
            dc->npc = dc->pc + 4;
1347
        } else {
1348
            dc->pc = dc->npc;
1349
            dc->npc = target;
1350
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1351
        }
1352
    } else {
1353
        flush_cond(dc, r_cond);
1354
        gen_fcond(r_cond, cc, cond);
1355
        if (a) {
1356
            gen_branch_a(dc, target, dc->npc, r_cond);
1357
            dc->is_br = 1;
1358
        } else {
1359
            dc->pc = dc->npc;
1360
            dc->jump_pc[0] = target;
1361
            dc->jump_pc[1] = dc->npc + 4;
1362
            dc->npc = JUMP_PC;
1363
        }
1364
    }
1365
}
1366

    
1367
#ifdef TARGET_SPARC64
1368
/* XXX: potentially incorrect if dynamic npc */
1369
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1370
                          TCGv r_cond, TCGv r_reg)
1371
{
1372
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1373
    target_ulong target = dc->pc + offset;
1374

    
1375
    flush_cond(dc, r_cond);
1376
    gen_cond_reg(r_cond, cond, r_reg);
1377
    if (a) {
1378
        gen_branch_a(dc, target, dc->npc, r_cond);
1379
        dc->is_br = 1;
1380
    } else {
1381
        dc->pc = dc->npc;
1382
        dc->jump_pc[0] = target;
1383
        dc->jump_pc[1] = dc->npc + 4;
1384
        dc->npc = JUMP_PC;
1385
    }
1386
}
1387

    
1388
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1389
{
1390
    switch (fccno) {
1391
    case 0:
1392
        gen_helper_fcmps(r_rs1, r_rs2);
1393
        break;
1394
    case 1:
1395
        gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1396
        break;
1397
    case 2:
1398
        gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1399
        break;
1400
    case 3:
1401
        gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1402
        break;
1403
    }
1404
}
1405

    
1406
static inline void gen_op_fcmpd(int fccno)
1407
{
1408
    switch (fccno) {
1409
    case 0:
1410
        gen_helper_fcmpd();
1411
        break;
1412
    case 1:
1413
        gen_helper_fcmpd_fcc1();
1414
        break;
1415
    case 2:
1416
        gen_helper_fcmpd_fcc2();
1417
        break;
1418
    case 3:
1419
        gen_helper_fcmpd_fcc3();
1420
        break;
1421
    }
1422
}
1423

    
1424
static inline void gen_op_fcmpq(int fccno)
1425
{
1426
    switch (fccno) {
1427
    case 0:
1428
        gen_helper_fcmpq();
1429
        break;
1430
    case 1:
1431
        gen_helper_fcmpq_fcc1();
1432
        break;
1433
    case 2:
1434
        gen_helper_fcmpq_fcc2();
1435
        break;
1436
    case 3:
1437
        gen_helper_fcmpq_fcc3();
1438
        break;
1439
    }
1440
}
1441

    
1442
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1443
{
1444
    switch (fccno) {
1445
    case 0:
1446
        gen_helper_fcmpes(r_rs1, r_rs2);
1447
        break;
1448
    case 1:
1449
        gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1450
        break;
1451
    case 2:
1452
        gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1453
        break;
1454
    case 3:
1455
        gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1456
        break;
1457
    }
1458
}
1459

    
1460
static inline void gen_op_fcmped(int fccno)
1461
{
1462
    switch (fccno) {
1463
    case 0:
1464
        gen_helper_fcmped();
1465
        break;
1466
    case 1:
1467
        gen_helper_fcmped_fcc1();
1468
        break;
1469
    case 2:
1470
        gen_helper_fcmped_fcc2();
1471
        break;
1472
    case 3:
1473
        gen_helper_fcmped_fcc3();
1474
        break;
1475
    }
1476
}
1477

    
1478
static inline void gen_op_fcmpeq(int fccno)
1479
{
1480
    switch (fccno) {
1481
    case 0:
1482
        gen_helper_fcmpeq();
1483
        break;
1484
    case 1:
1485
        gen_helper_fcmpeq_fcc1();
1486
        break;
1487
    case 2:
1488
        gen_helper_fcmpeq_fcc2();
1489
        break;
1490
    case 3:
1491
        gen_helper_fcmpeq_fcc3();
1492
        break;
1493
    }
1494
}
1495

    
1496
#else
1497

    
1498
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1499
{
1500
    gen_helper_fcmps(r_rs1, r_rs2);
1501
}
1502

    
1503
static inline void gen_op_fcmpd(int fccno)
1504
{
1505
    gen_helper_fcmpd();
1506
}
1507

    
1508
static inline void gen_op_fcmpq(int fccno)
1509
{
1510
    gen_helper_fcmpq();
1511
}
1512

    
1513
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1514
{
1515
    gen_helper_fcmpes(r_rs1, r_rs2);
1516
}
1517

    
1518
static inline void gen_op_fcmped(int fccno)
1519
{
1520
    gen_helper_fcmped();
1521
}
1522

    
1523
static inline void gen_op_fcmpeq(int fccno)
1524
{
1525
    gen_helper_fcmpeq();
1526
}
1527
#endif
1528

    
1529
static inline void gen_op_fpexception_im(int fsr_flags)
1530
{
1531
    TCGv_i32 r_const;
1532

    
1533
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1534
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1535
    r_const = tcg_const_i32(TT_FP_EXCP);
1536
    gen_helper_raise_exception(r_const);
1537
    tcg_temp_free_i32(r_const);
1538
}
1539

    
1540
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1541
{
1542
#if !defined(CONFIG_USER_ONLY)
1543
    if (!dc->fpu_enabled) {
1544
        TCGv_i32 r_const;
1545

    
1546
        save_state(dc, r_cond);
1547
        r_const = tcg_const_i32(TT_NFPU_INSN);
1548
        gen_helper_raise_exception(r_const);
1549
        tcg_temp_free_i32(r_const);
1550
        dc->is_br = 1;
1551
        return 1;
1552
    }
1553
#endif
1554
    return 0;
1555
}
1556

    
1557
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1558
{
1559
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1560
}
1561

    
1562
static inline void gen_clear_float_exceptions(void)
1563
{
1564
    gen_helper_clear_float_exceptions();
1565
}
1566

    
1567
/* asi moves */
1568
#ifdef TARGET_SPARC64
1569
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1570
{
1571
    int asi;
1572
    TCGv_i32 r_asi;
1573

    
1574
    if (IS_IMM) {
1575
        r_asi = tcg_temp_new_i32();
1576
        tcg_gen_mov_i32(r_asi, cpu_asi);
1577
    } else {
1578
        asi = GET_FIELD(insn, 19, 26);
1579
        r_asi = tcg_const_i32(asi);
1580
    }
1581
    return r_asi;
1582
}
1583

    
1584
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1585
                              int sign)
1586
{
1587
    TCGv_i32 r_asi, r_size, r_sign;
1588

    
1589
    r_asi = gen_get_asi(insn, addr);
1590
    r_size = tcg_const_i32(size);
1591
    r_sign = tcg_const_i32(sign);
1592
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1593
    tcg_temp_free_i32(r_sign);
1594
    tcg_temp_free_i32(r_size);
1595
    tcg_temp_free_i32(r_asi);
1596
}
1597

    
1598
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1599
{
1600
    TCGv_i32 r_asi, r_size;
1601

    
1602
    r_asi = gen_get_asi(insn, addr);
1603
    r_size = tcg_const_i32(size);
1604
    gen_helper_st_asi(addr, src, r_asi, r_size);
1605
    tcg_temp_free_i32(r_size);
1606
    tcg_temp_free_i32(r_asi);
1607
}
1608

    
1609
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1610
{
1611
    TCGv_i32 r_asi, r_size, r_rd;
1612

    
1613
    r_asi = gen_get_asi(insn, addr);
1614
    r_size = tcg_const_i32(size);
1615
    r_rd = tcg_const_i32(rd);
1616
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1617
    tcg_temp_free_i32(r_rd);
1618
    tcg_temp_free_i32(r_size);
1619
    tcg_temp_free_i32(r_asi);
1620
}
1621

    
1622
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1623
{
1624
    TCGv_i32 r_asi, r_size, r_rd;
1625

    
1626
    r_asi = gen_get_asi(insn, addr);
1627
    r_size = tcg_const_i32(size);
1628
    r_rd = tcg_const_i32(rd);
1629
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1630
    tcg_temp_free_i32(r_rd);
1631
    tcg_temp_free_i32(r_size);
1632
    tcg_temp_free_i32(r_asi);
1633
}
1634

    
1635
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1636
{
1637
    TCGv_i32 r_asi, r_size, r_sign;
1638

    
1639
    r_asi = gen_get_asi(insn, addr);
1640
    r_size = tcg_const_i32(4);
1641
    r_sign = tcg_const_i32(0);
1642
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1643
    tcg_temp_free_i32(r_sign);
1644
    gen_helper_st_asi(addr, dst, r_asi, r_size);
1645
    tcg_temp_free_i32(r_size);
1646
    tcg_temp_free_i32(r_asi);
1647
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1648
}
1649

    
1650
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1651
{
1652
    TCGv_i32 r_asi, r_rd;
1653

    
1654
    r_asi = gen_get_asi(insn, addr);
1655
    r_rd = tcg_const_i32(rd);
1656
    gen_helper_ldda_asi(addr, r_asi, r_rd);
1657
    tcg_temp_free_i32(r_rd);
1658
    tcg_temp_free_i32(r_asi);
1659
}
1660

    
1661
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1662
{
1663
    TCGv_i32 r_asi, r_size;
1664

    
1665
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1666
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1667
    r_asi = gen_get_asi(insn, addr);
1668
    r_size = tcg_const_i32(8);
1669
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1670
    tcg_temp_free_i32(r_size);
1671
    tcg_temp_free_i32(r_asi);
1672
}
1673

    
1674
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1675
                               int rd)
1676
{
1677
    TCGv r_val1;
1678
    TCGv_i32 r_asi;
1679

    
1680
    r_val1 = tcg_temp_new();
1681
    gen_movl_reg_TN(rd, r_val1);
1682
    r_asi = gen_get_asi(insn, addr);
1683
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1684
    tcg_temp_free_i32(r_asi);
1685
    tcg_temp_free(r_val1);
1686
}
1687

    
1688
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1689
                                int rd)
1690
{
1691
    TCGv_i32 r_asi;
1692

    
1693
    gen_movl_reg_TN(rd, cpu_tmp64);
1694
    r_asi = gen_get_asi(insn, addr);
1695
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1696
    tcg_temp_free_i32(r_asi);
1697
}
1698

    
1699
#elif !defined(CONFIG_USER_ONLY)
1700

    
1701
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1702
                              int sign)
1703
{
1704
    TCGv_i32 r_asi, r_size, r_sign;
1705

    
1706
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1707
    r_size = tcg_const_i32(size);
1708
    r_sign = tcg_const_i32(sign);
1709
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1710
    tcg_temp_free(r_sign);
1711
    tcg_temp_free(r_size);
1712
    tcg_temp_free(r_asi);
1713
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1714
}
1715

    
1716
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1717
{
1718
    TCGv_i32 r_asi, r_size;
1719

    
1720
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
1721
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1722
    r_size = tcg_const_i32(size);
1723
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1724
    tcg_temp_free(r_size);
1725
    tcg_temp_free(r_asi);
1726
}
1727

    
1728
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1729
{
1730
    TCGv_i32 r_asi, r_size, r_sign;
1731
    TCGv_i64 r_val;
1732

    
1733
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1734
    r_size = tcg_const_i32(4);
1735
    r_sign = tcg_const_i32(0);
1736
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1737
    tcg_temp_free(r_sign);
1738
    r_val = tcg_temp_new_i64();
1739
    tcg_gen_extu_tl_i64(r_val, dst);
1740
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1741
    tcg_temp_free_i64(r_val);
1742
    tcg_temp_free(r_size);
1743
    tcg_temp_free(r_asi);
1744
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1745
}
1746

    
1747
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1748
{
1749
    TCGv_i32 r_asi, r_size, r_sign;
1750

    
1751
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1752
    r_size = tcg_const_i32(8);
1753
    r_sign = tcg_const_i32(0);
1754
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1755
    tcg_temp_free(r_sign);
1756
    tcg_temp_free(r_size);
1757
    tcg_temp_free(r_asi);
1758
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1759
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
1760
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1761
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1762
    gen_movl_TN_reg(rd, hi);
1763
}
1764

    
1765
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1766
{
1767
    TCGv_i32 r_asi, r_size;
1768

    
1769
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1770
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1771
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1772
    r_size = tcg_const_i32(8);
1773
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1774
    tcg_temp_free(r_size);
1775
    tcg_temp_free(r_asi);
1776
}
1777
#endif
1778

    
1779
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1780
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1781
{
1782
    TCGv_i64 r_val;
1783
    TCGv_i32 r_asi, r_size;
1784

    
1785
    gen_ld_asi(dst, addr, insn, 1, 0);
1786

    
1787
    r_val = tcg_const_i64(0xffULL);
1788
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1789
    r_size = tcg_const_i32(1);
1790
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1791
    tcg_temp_free_i32(r_size);
1792
    tcg_temp_free_i32(r_asi);
1793
    tcg_temp_free_i64(r_val);
1794
}
1795
#endif
1796

    
1797
static inline TCGv get_src1(unsigned int insn, TCGv def)
1798
{
1799
    TCGv r_rs1 = def;
1800
    unsigned int rs1;
1801

    
1802
    rs1 = GET_FIELD(insn, 13, 17);
1803
    if (rs1 == 0) {
1804
        tcg_gen_movi_tl(def, 0);
1805
    } else if (rs1 < 8) {
1806
        r_rs1 = cpu_gregs[rs1];
1807
    } else {
1808
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1809
    }
1810
    return r_rs1;
1811
}
1812

    
1813
static inline TCGv get_src2(unsigned int insn, TCGv def)
1814
{
1815
    TCGv r_rs2 = def;
1816

    
1817
    if (IS_IMM) { /* immediate */
1818
        target_long simm = GET_FIELDs(insn, 19, 31);
1819
        tcg_gen_movi_tl(def, simm);
1820
    } else { /* register */
1821
        unsigned int rs2 = GET_FIELD(insn, 27, 31);
1822
        if (rs2 == 0) {
1823
            tcg_gen_movi_tl(def, 0);
1824
        } else if (rs2 < 8) {
1825
            r_rs2 = cpu_gregs[rs2];
1826
        } else {
1827
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1828
        }
1829
    }
1830
    return r_rs2;
1831
}
1832

    
1833
#ifdef TARGET_SPARC64
1834
static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1835
{
1836
    TCGv_i32 r_tl = tcg_temp_new_i32();
1837

    
1838
    /* load env->tl into r_tl */
1839
    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1840

    
1841
    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1842
    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1843

    
1844
    /* calculate offset to current trap state from env->ts, reuse r_tl */
1845
    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1846
    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1847

    
1848
    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1849
    {
1850
        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1851
        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1852
        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1853
        tcg_temp_free_ptr(r_tl_tmp);
1854
    }
1855

    
1856
    tcg_temp_free_i32(r_tl);
1857
}
1858
#endif
1859

    
1860
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
1861
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1862
        goto illegal_insn;
1863
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1864
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1865
        goto nfpu_insn;
1866

    
1867
/* before an instruction, dc->pc must be static */
1868
static void disas_sparc_insn(DisasContext * dc)
1869
{
1870
    unsigned int insn, opc, rs1, rs2, rd;
1871
    TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1872
    target_long simm;
1873

    
1874
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1875
        tcg_gen_debug_insn_start(dc->pc);
1876
    insn = ldl_code(dc->pc);
1877
    opc = GET_FIELD(insn, 0, 1);
1878

    
1879
    rd = GET_FIELD(insn, 2, 6);
1880

    
1881
    cpu_tmp1 = cpu_src1 = tcg_temp_new();
1882
    cpu_tmp2 = cpu_src2 = tcg_temp_new();
1883

    
1884
    switch (opc) {
1885
    case 0:                     /* branches/sethi */
1886
        {
1887
            unsigned int xop = GET_FIELD(insn, 7, 9);
1888
            int32_t target;
1889
            switch (xop) {
1890
#ifdef TARGET_SPARC64
1891
            case 0x1:           /* V9 BPcc */
1892
                {
1893
                    int cc;
1894

    
1895
                    target = GET_FIELD_SP(insn, 0, 18);
1896
                    target = sign_extend(target, 18);
1897
                    target <<= 2;
1898
                    cc = GET_FIELD_SP(insn, 20, 21);
1899
                    if (cc == 0)
1900
                        do_branch(dc, target, insn, 0, cpu_cond);
1901
                    else if (cc == 2)
1902
                        do_branch(dc, target, insn, 1, cpu_cond);
1903
                    else
1904
                        goto illegal_insn;
1905
                    goto jmp_insn;
1906
                }
1907
            case 0x3:           /* V9 BPr */
1908
                {
1909
                    target = GET_FIELD_SP(insn, 0, 13) |
1910
                        (GET_FIELD_SP(insn, 20, 21) << 14);
1911
                    target = sign_extend(target, 16);
1912
                    target <<= 2;
1913
                    cpu_src1 = get_src1(insn, cpu_src1);
1914
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1915
                    goto jmp_insn;
1916
                }
1917
            case 0x5:           /* V9 FBPcc */
1918
                {
1919
                    int cc = GET_FIELD_SP(insn, 20, 21);
1920
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1921
                        goto jmp_insn;
1922
                    target = GET_FIELD_SP(insn, 0, 18);
1923
                    target = sign_extend(target, 19);
1924
                    target <<= 2;
1925
                    do_fbranch(dc, target, insn, cc, cpu_cond);
1926
                    goto jmp_insn;
1927
                }
1928
#else
1929
            case 0x7:           /* CBN+x */
1930
                {
1931
                    goto ncp_insn;
1932
                }
1933
#endif
1934
            case 0x2:           /* BN+x */
1935
                {
1936
                    target = GET_FIELD(insn, 10, 31);
1937
                    target = sign_extend(target, 22);
1938
                    target <<= 2;
1939
                    do_branch(dc, target, insn, 0, cpu_cond);
1940
                    goto jmp_insn;
1941
                }
1942
            case 0x6:           /* FBN+x */
1943
                {
1944
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1945
                        goto jmp_insn;
1946
                    target = GET_FIELD(insn, 10, 31);
1947
                    target = sign_extend(target, 22);
1948
                    target <<= 2;
1949
                    do_fbranch(dc, target, insn, 0, cpu_cond);
1950
                    goto jmp_insn;
1951
                }
1952
            case 0x4:           /* SETHI */
1953
                if (rd) { // nop
1954
                    uint32_t value = GET_FIELD(insn, 10, 31);
1955
                    TCGv r_const;
1956

    
1957
                    r_const = tcg_const_tl(value << 10);
1958
                    gen_movl_TN_reg(rd, r_const);
1959
                    tcg_temp_free(r_const);
1960
                }
1961
                break;
1962
            case 0x0:           /* UNIMPL */
1963
            default:
1964
                goto illegal_insn;
1965
            }
1966
            break;
1967
        }
1968
        break;
1969
    case 1:                     /*CALL*/
1970
        {
1971
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
1972
            TCGv r_const;
1973

    
1974
            r_const = tcg_const_tl(dc->pc);
1975
            gen_movl_TN_reg(15, r_const);
1976
            tcg_temp_free(r_const);
1977
            target += dc->pc;
1978
            gen_mov_pc_npc(dc, cpu_cond);
1979
            dc->npc = target;
1980
        }
1981
        goto jmp_insn;
1982
    case 2:                     /* FPU & Logical Operations */
1983
        {
1984
            unsigned int xop = GET_FIELD(insn, 7, 12);
1985
            if (xop == 0x3a) {  /* generate trap */
1986
                int cond;
1987

    
1988
                cpu_src1 = get_src1(insn, cpu_src1);
1989
                if (IS_IMM) {
1990
                    rs2 = GET_FIELD(insn, 25, 31);
1991
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1992
                } else {
1993
                    rs2 = GET_FIELD(insn, 27, 31);
1994
                    if (rs2 != 0) {
1995
                        gen_movl_reg_TN(rs2, cpu_src2);
1996
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1997
                    } else
1998
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
1999
                }
2000

    
2001
                cond = GET_FIELD(insn, 3, 6);
2002
                if (cond == 0x8) { /* Trap Always */
2003
                    save_state(dc, cpu_cond);
2004
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2005
                        supervisor(dc))
2006
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2007
                    else
2008
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2009
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2010
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2011

    
2012
                    if (rs2 == 0 &&
2013
                        dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2014

    
2015
                        gen_helper_shutdown();
2016

    
2017
                    } else {
2018
                        gen_helper_raise_exception(cpu_tmp32);
2019
                    }
2020
                } else if (cond != 0) {
2021
                    TCGv r_cond = tcg_temp_new();
2022
                    int l1;
2023
#ifdef TARGET_SPARC64
2024
                    /* V9 icc/xcc */
2025
                    int cc = GET_FIELD_SP(insn, 11, 12);
2026

    
2027
                    save_state(dc, cpu_cond);
2028
                    if (cc == 0)
2029
                        gen_cond(r_cond, 0, cond, dc);
2030
                    else if (cc == 2)
2031
                        gen_cond(r_cond, 1, cond, dc);
2032
                    else
2033
                        goto illegal_insn;
2034
#else
2035
                    save_state(dc, cpu_cond);
2036
                    gen_cond(r_cond, 0, cond, dc);
2037
#endif
2038
                    l1 = gen_new_label();
2039
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2040

    
2041
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2042
                        supervisor(dc))
2043
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2044
                    else
2045
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2046
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2047
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2048
                    gen_helper_raise_exception(cpu_tmp32);
2049

    
2050
                    gen_set_label(l1);
2051
                    tcg_temp_free(r_cond);
2052
                }
2053
                gen_op_next_insn();
2054
                tcg_gen_exit_tb(0);
2055
                dc->is_br = 1;
2056
                goto jmp_insn;
2057
            } else if (xop == 0x28) {
2058
                rs1 = GET_FIELD(insn, 13, 17);
2059
                switch(rs1) {
2060
                case 0: /* rdy */
2061
#ifndef TARGET_SPARC64
2062
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
2063
                                       manual, rdy on the microSPARC
2064
                                       II */
2065
                case 0x0f:          /* stbar in the SPARCv8 manual,
2066
                                       rdy on the microSPARC II */
2067
                case 0x10 ... 0x1f: /* implementation-dependent in the
2068
                                       SPARCv8 manual, rdy on the
2069
                                       microSPARC II */
2070
                    /* Read Asr17 */
2071
                    if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2072
                        TCGv r_const;
2073

    
2074
                        /* Read Asr17 for a Leon3 monoprocessor */
2075
                        r_const = tcg_const_tl((1 << 8)
2076
                                               | (dc->def->nwindows - 1));
2077
                        gen_movl_TN_reg(rd, r_const);
2078
                        tcg_temp_free(r_const);
2079
                        break;
2080
                    }
2081
#endif
2082
                    gen_movl_TN_reg(rd, cpu_y);
2083
                    break;
2084
#ifdef TARGET_SPARC64
2085
                case 0x2: /* V9 rdccr */
2086
                    gen_helper_compute_psr();
2087
                    gen_helper_rdccr(cpu_dst);
2088
                    gen_movl_TN_reg(rd, cpu_dst);
2089
                    break;
2090
                case 0x3: /* V9 rdasi */
2091
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2092
                    gen_movl_TN_reg(rd, cpu_dst);
2093
                    break;
2094
                case 0x4: /* V9 rdtick */
2095
                    {
2096
                        TCGv_ptr r_tickptr;
2097

    
2098
                        r_tickptr = tcg_temp_new_ptr();
2099
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2100
                                       offsetof(CPUState, tick));
2101
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2102
                        tcg_temp_free_ptr(r_tickptr);
2103
                        gen_movl_TN_reg(rd, cpu_dst);
2104
                    }
2105
                    break;
2106
                case 0x5: /* V9 rdpc */
2107
                    {
2108
                        TCGv r_const;
2109

    
2110
                        r_const = tcg_const_tl(dc->pc);
2111
                        gen_movl_TN_reg(rd, r_const);
2112
                        tcg_temp_free(r_const);
2113
                    }
2114
                    break;
2115
                case 0x6: /* V9 rdfprs */
2116
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2117
                    gen_movl_TN_reg(rd, cpu_dst);
2118
                    break;
2119
                case 0xf: /* V9 membar */
2120
                    break; /* no effect */
2121
                case 0x13: /* Graphics Status */
2122
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2123
                        goto jmp_insn;
2124
                    gen_movl_TN_reg(rd, cpu_gsr);
2125
                    break;
2126
                case 0x16: /* Softint */
2127
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2128
                    gen_movl_TN_reg(rd, cpu_dst);
2129
                    break;
2130
                case 0x17: /* Tick compare */
2131
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
2132
                    break;
2133
                case 0x18: /* System tick */
2134
                    {
2135
                        TCGv_ptr r_tickptr;
2136

    
2137
                        r_tickptr = tcg_temp_new_ptr();
2138
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2139
                                       offsetof(CPUState, stick));
2140
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2141
                        tcg_temp_free_ptr(r_tickptr);
2142
                        gen_movl_TN_reg(rd, cpu_dst);
2143
                    }
2144
                    break;
2145
                case 0x19: /* System tick compare */
2146
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
2147
                    break;
2148
                case 0x10: /* Performance Control */
2149
                case 0x11: /* Performance Instrumentation Counter */
2150
                case 0x12: /* Dispatch Control */
2151
                case 0x14: /* Softint set, WO */
2152
                case 0x15: /* Softint clear, WO */
2153
#endif
2154
                default:
2155
                    goto illegal_insn;
2156
                }
2157
#if !defined(CONFIG_USER_ONLY)
2158
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2159
#ifndef TARGET_SPARC64
2160
                if (!supervisor(dc))
2161
                    goto priv_insn;
2162
                gen_helper_compute_psr();
2163
                dc->cc_op = CC_OP_FLAGS;
2164
                gen_helper_rdpsr(cpu_dst);
2165
#else
2166
                CHECK_IU_FEATURE(dc, HYPV);
2167
                if (!hypervisor(dc))
2168
                    goto priv_insn;
2169
                rs1 = GET_FIELD(insn, 13, 17);
2170
                switch (rs1) {
2171
                case 0: // hpstate
2172
                    // gen_op_rdhpstate();
2173
                    break;
2174
                case 1: // htstate
2175
                    // gen_op_rdhtstate();
2176
                    break;
2177
                case 3: // hintp
2178
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2179
                    break;
2180
                case 5: // htba
2181
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
2182
                    break;
2183
                case 6: // hver
2184
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
2185
                    break;
2186
                case 31: // hstick_cmpr
2187
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2188
                    break;
2189
                default:
2190
                    goto illegal_insn;
2191
                }
2192
#endif
2193
                gen_movl_TN_reg(rd, cpu_dst);
2194
                break;
2195
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2196
                if (!supervisor(dc))
2197
                    goto priv_insn;
2198
#ifdef TARGET_SPARC64
2199
                rs1 = GET_FIELD(insn, 13, 17);
2200
                switch (rs1) {
2201
                case 0: // tpc
2202
                    {
2203
                        TCGv_ptr r_tsptr;
2204

    
2205
                        r_tsptr = tcg_temp_new_ptr();
2206
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2207
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2208
                                      offsetof(trap_state, tpc));
2209
                        tcg_temp_free_ptr(r_tsptr);
2210
                    }
2211
                    break;
2212
                case 1: // tnpc
2213
                    {
2214
                        TCGv_ptr r_tsptr;
2215

    
2216
                        r_tsptr = tcg_temp_new_ptr();
2217
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2218
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2219
                                      offsetof(trap_state, tnpc));
2220
                        tcg_temp_free_ptr(r_tsptr);
2221
                    }
2222
                    break;
2223
                case 2: // tstate
2224
                    {
2225
                        TCGv_ptr r_tsptr;
2226

    
2227
                        r_tsptr = tcg_temp_new_ptr();
2228
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2229
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2230
                                      offsetof(trap_state, tstate));
2231
                        tcg_temp_free_ptr(r_tsptr);
2232
                    }
2233
                    break;
2234
                case 3: // tt
2235
                    {
2236
                        TCGv_ptr r_tsptr;
2237

    
2238
                        r_tsptr = tcg_temp_new_ptr();
2239
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2240
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2241
                                       offsetof(trap_state, tt));
2242
                        tcg_temp_free_ptr(r_tsptr);
2243
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2244
                    }
2245
                    break;
2246
                case 4: // tick
2247
                    {
2248
                        TCGv_ptr r_tickptr;
2249

    
2250
                        r_tickptr = tcg_temp_new_ptr();
2251
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2252
                                       offsetof(CPUState, tick));
2253
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2254
                        gen_movl_TN_reg(rd, cpu_tmp0);
2255
                        tcg_temp_free_ptr(r_tickptr);
2256
                    }
2257
                    break;
2258
                case 5: // tba
2259
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2260
                    break;
2261
                case 6: // pstate
2262
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2263
                                   offsetof(CPUSPARCState, pstate));
2264
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2265
                    break;
2266
                case 7: // tl
2267
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2268
                                   offsetof(CPUSPARCState, tl));
2269
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2270
                    break;
2271
                case 8: // pil
2272
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2273
                                   offsetof(CPUSPARCState, psrpil));
2274
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2275
                    break;
2276
                case 9: // cwp
2277
                    gen_helper_rdcwp(cpu_tmp0);
2278
                    break;
2279
                case 10: // cansave
2280
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2281
                                   offsetof(CPUSPARCState, cansave));
2282
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2283
                    break;
2284
                case 11: // canrestore
2285
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2286
                                   offsetof(CPUSPARCState, canrestore));
2287
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2288
                    break;
2289
                case 12: // cleanwin
2290
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2291
                                   offsetof(CPUSPARCState, cleanwin));
2292
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2293
                    break;
2294
                case 13: // otherwin
2295
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2296
                                   offsetof(CPUSPARCState, otherwin));
2297
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2298
                    break;
2299
                case 14: // wstate
2300
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2301
                                   offsetof(CPUSPARCState, wstate));
2302
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2303
                    break;
2304
                case 16: // UA2005 gl
2305
                    CHECK_IU_FEATURE(dc, GL);
2306
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2307
                                   offsetof(CPUSPARCState, gl));
2308
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2309
                    break;
2310
                case 26: // UA2005 strand status
2311
                    CHECK_IU_FEATURE(dc, HYPV);
2312
                    if (!hypervisor(dc))
2313
                        goto priv_insn;
2314
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2315
                    break;
2316
                case 31: // ver
2317
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2318
                    break;
2319
                case 15: // fq
2320
                default:
2321
                    goto illegal_insn;
2322
                }
2323
#else
2324
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2325
#endif
2326
                gen_movl_TN_reg(rd, cpu_tmp0);
2327
                break;
2328
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2329
#ifdef TARGET_SPARC64
2330
                save_state(dc, cpu_cond);
2331
                gen_helper_flushw();
2332
#else
2333
                if (!supervisor(dc))
2334
                    goto priv_insn;
2335
                gen_movl_TN_reg(rd, cpu_tbr);
2336
#endif
2337
                break;
2338
#endif
2339
            } else if (xop == 0x34) {   /* FPU Operations */
2340
                if (gen_trap_ifnofpu(dc, cpu_cond))
2341
                    goto jmp_insn;
2342
                gen_op_clear_ieee_excp_and_FTT();
2343
                rs1 = GET_FIELD(insn, 13, 17);
2344
                rs2 = GET_FIELD(insn, 27, 31);
2345
                xop = GET_FIELD(insn, 18, 26);
2346
                save_state(dc, cpu_cond);
2347
                switch (xop) {
2348
                case 0x1: /* fmovs */
2349
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2350
                    break;
2351
                case 0x5: /* fnegs */
2352
                    gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2353
                    break;
2354
                case 0x9: /* fabss */
2355
                    gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2356
                    break;
2357
                case 0x29: /* fsqrts */
2358
                    CHECK_FPU_FEATURE(dc, FSQRT);
2359
                    gen_clear_float_exceptions();
2360
                    gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2361
                    gen_helper_check_ieee_exceptions();
2362
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2363
                    break;
2364
                case 0x2a: /* fsqrtd */
2365
                    CHECK_FPU_FEATURE(dc, FSQRT);
2366
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2367
                    gen_clear_float_exceptions();
2368
                    gen_helper_fsqrtd();
2369
                    gen_helper_check_ieee_exceptions();
2370
                    gen_op_store_DT0_fpr(DFPREG(rd));
2371
                    break;
2372
                case 0x2b: /* fsqrtq */
2373
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2374
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2375
                    gen_clear_float_exceptions();
2376
                    gen_helper_fsqrtq();
2377
                    gen_helper_check_ieee_exceptions();
2378
                    gen_op_store_QT0_fpr(QFPREG(rd));
2379
                    break;
2380
                case 0x41: /* fadds */
2381
                    gen_clear_float_exceptions();
2382
                    gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2383
                    gen_helper_check_ieee_exceptions();
2384
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2385
                    break;
2386
                case 0x42: /* faddd */
2387
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2388
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2389
                    gen_clear_float_exceptions();
2390
                    gen_helper_faddd();
2391
                    gen_helper_check_ieee_exceptions();
2392
                    gen_op_store_DT0_fpr(DFPREG(rd));
2393
                    break;
2394
                case 0x43: /* faddq */
2395
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2396
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2397
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2398
                    gen_clear_float_exceptions();
2399
                    gen_helper_faddq();
2400
                    gen_helper_check_ieee_exceptions();
2401
                    gen_op_store_QT0_fpr(QFPREG(rd));
2402
                    break;
2403
                case 0x45: /* fsubs */
2404
                    gen_clear_float_exceptions();
2405
                    gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2406
                    gen_helper_check_ieee_exceptions();
2407
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2408
                    break;
2409
                case 0x46: /* fsubd */
2410
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2411
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2412
                    gen_clear_float_exceptions();
2413
                    gen_helper_fsubd();
2414
                    gen_helper_check_ieee_exceptions();
2415
                    gen_op_store_DT0_fpr(DFPREG(rd));
2416
                    break;
2417
                case 0x47: /* fsubq */
2418
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2419
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2420
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2421
                    gen_clear_float_exceptions();
2422
                    gen_helper_fsubq();
2423
                    gen_helper_check_ieee_exceptions();
2424
                    gen_op_store_QT0_fpr(QFPREG(rd));
2425
                    break;
2426
                case 0x49: /* fmuls */
2427
                    CHECK_FPU_FEATURE(dc, FMUL);
2428
                    gen_clear_float_exceptions();
2429
                    gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2430
                    gen_helper_check_ieee_exceptions();
2431
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2432
                    break;
2433
                case 0x4a: /* fmuld */
2434
                    CHECK_FPU_FEATURE(dc, FMUL);
2435
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2436
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2437
                    gen_clear_float_exceptions();
2438
                    gen_helper_fmuld();
2439
                    gen_helper_check_ieee_exceptions();
2440
                    gen_op_store_DT0_fpr(DFPREG(rd));
2441
                    break;
2442
                case 0x4b: /* fmulq */
2443
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2444
                    CHECK_FPU_FEATURE(dc, FMUL);
2445
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2446
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2447
                    gen_clear_float_exceptions();
2448
                    gen_helper_fmulq();
2449
                    gen_helper_check_ieee_exceptions();
2450
                    gen_op_store_QT0_fpr(QFPREG(rd));
2451
                    break;
2452
                case 0x4d: /* fdivs */
2453
                    gen_clear_float_exceptions();
2454
                    gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2455
                    gen_helper_check_ieee_exceptions();
2456
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2457
                    break;
2458
                case 0x4e: /* fdivd */
2459
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2460
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2461
                    gen_clear_float_exceptions();
2462
                    gen_helper_fdivd();
2463
                    gen_helper_check_ieee_exceptions();
2464
                    gen_op_store_DT0_fpr(DFPREG(rd));
2465
                    break;
2466
                case 0x4f: /* fdivq */
2467
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2468
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2469
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2470
                    gen_clear_float_exceptions();
2471
                    gen_helper_fdivq();
2472
                    gen_helper_check_ieee_exceptions();
2473
                    gen_op_store_QT0_fpr(QFPREG(rd));
2474
                    break;
2475
                case 0x69: /* fsmuld */
2476
                    CHECK_FPU_FEATURE(dc, FSMULD);
2477
                    gen_clear_float_exceptions();
2478
                    gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2479
                    gen_helper_check_ieee_exceptions();
2480
                    gen_op_store_DT0_fpr(DFPREG(rd));
2481
                    break;
2482
                case 0x6e: /* fdmulq */
2483
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2484
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2485
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2486
                    gen_clear_float_exceptions();
2487
                    gen_helper_fdmulq();
2488
                    gen_helper_check_ieee_exceptions();
2489
                    gen_op_store_QT0_fpr(QFPREG(rd));
2490
                    break;
2491
                case 0xc4: /* fitos */
2492
                    gen_clear_float_exceptions();
2493
                    gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2494
                    gen_helper_check_ieee_exceptions();
2495
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2496
                    break;
2497
                case 0xc6: /* fdtos */
2498
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2499
                    gen_clear_float_exceptions();
2500
                    gen_helper_fdtos(cpu_tmp32);
2501
                    gen_helper_check_ieee_exceptions();
2502
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2503
                    break;
2504
                case 0xc7: /* fqtos */
2505
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2506
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2507
                    gen_clear_float_exceptions();
2508
                    gen_helper_fqtos(cpu_tmp32);
2509
                    gen_helper_check_ieee_exceptions();
2510
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2511
                    break;
2512
                case 0xc8: /* fitod */
2513
                    gen_helper_fitod(cpu_fpr[rs2]);
2514
                    gen_op_store_DT0_fpr(DFPREG(rd));
2515
                    break;
2516
                case 0xc9: /* fstod */
2517
                    gen_helper_fstod(cpu_fpr[rs2]);
2518
                    gen_op_store_DT0_fpr(DFPREG(rd));
2519
                    break;
2520
                case 0xcb: /* fqtod */
2521
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2522
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2523
                    gen_clear_float_exceptions();
2524
                    gen_helper_fqtod();
2525
                    gen_helper_check_ieee_exceptions();
2526
                    gen_op_store_DT0_fpr(DFPREG(rd));
2527
                    break;
2528
                case 0xcc: /* fitoq */
2529
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2530
                    gen_helper_fitoq(cpu_fpr[rs2]);
2531
                    gen_op_store_QT0_fpr(QFPREG(rd));
2532
                    break;
2533
                case 0xcd: /* fstoq */
2534
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2535
                    gen_helper_fstoq(cpu_fpr[rs2]);
2536
                    gen_op_store_QT0_fpr(QFPREG(rd));
2537
                    break;
2538
                case 0xce: /* fdtoq */
2539
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2540
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2541
                    gen_helper_fdtoq();
2542
                    gen_op_store_QT0_fpr(QFPREG(rd));
2543
                    break;
2544
                case 0xd1: /* fstoi */
2545
                    gen_clear_float_exceptions();
2546
                    gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2547
                    gen_helper_check_ieee_exceptions();
2548
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2549
                    break;
2550
                case 0xd2: /* fdtoi */
2551
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2552
                    gen_clear_float_exceptions();
2553
                    gen_helper_fdtoi(cpu_tmp32);
2554
                    gen_helper_check_ieee_exceptions();
2555
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2556
                    break;
2557
                case 0xd3: /* fqtoi */
2558
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2559
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2560
                    gen_clear_float_exceptions();
2561
                    gen_helper_fqtoi(cpu_tmp32);
2562
                    gen_helper_check_ieee_exceptions();
2563
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2564
                    break;
2565
#ifdef TARGET_SPARC64
2566
                case 0x2: /* V9 fmovd */
2567
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2568
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2569
                                    cpu_fpr[DFPREG(rs2) + 1]);
2570
                    break;
2571
                case 0x3: /* V9 fmovq */
2572
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2573
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2574
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2575
                                    cpu_fpr[QFPREG(rs2) + 1]);
2576
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2577
                                    cpu_fpr[QFPREG(rs2) + 2]);
2578
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2579
                                    cpu_fpr[QFPREG(rs2) + 3]);
2580
                    break;
2581
                case 0x6: /* V9 fnegd */
2582
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2583
                    gen_helper_fnegd();
2584
                    gen_op_store_DT0_fpr(DFPREG(rd));
2585
                    break;
2586
                case 0x7: /* V9 fnegq */
2587
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2588
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2589
                    gen_helper_fnegq();
2590
                    gen_op_store_QT0_fpr(QFPREG(rd));
2591
                    break;
2592
                case 0xa: /* V9 fabsd */
2593
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2594
                    gen_helper_fabsd();
2595
                    gen_op_store_DT0_fpr(DFPREG(rd));
2596
                    break;
2597
                case 0xb: /* V9 fabsq */
2598
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2599
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2600
                    gen_helper_fabsq();
2601
                    gen_op_store_QT0_fpr(QFPREG(rd));
2602
                    break;
2603
                case 0x81: /* V9 fstox */
2604
                    gen_clear_float_exceptions();
2605
                    gen_helper_fstox(cpu_fpr[rs2]);
2606
                    gen_helper_check_ieee_exceptions();
2607
                    gen_op_store_DT0_fpr(DFPREG(rd));
2608
                    break;
2609
                case 0x82: /* V9 fdtox */
2610
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2611
                    gen_clear_float_exceptions();
2612
                    gen_helper_fdtox();
2613
                    gen_helper_check_ieee_exceptions();
2614
                    gen_op_store_DT0_fpr(DFPREG(rd));
2615
                    break;
2616
                case 0x83: /* V9 fqtox */
2617
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2618
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2619
                    gen_clear_float_exceptions();
2620
                    gen_helper_fqtox();
2621
                    gen_helper_check_ieee_exceptions();
2622
                    gen_op_store_DT0_fpr(DFPREG(rd));
2623
                    break;
2624
                case 0x84: /* V9 fxtos */
2625
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2626
                    gen_clear_float_exceptions();
2627
                    gen_helper_fxtos(cpu_tmp32);
2628
                    gen_helper_check_ieee_exceptions();
2629
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2630
                    break;
2631
                case 0x88: /* V9 fxtod */
2632
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2633
                    gen_clear_float_exceptions();
2634
                    gen_helper_fxtod();
2635
                    gen_helper_check_ieee_exceptions();
2636
                    gen_op_store_DT0_fpr(DFPREG(rd));
2637
                    break;
2638
                case 0x8c: /* V9 fxtoq */
2639
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2640
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2641
                    gen_clear_float_exceptions();
2642
                    gen_helper_fxtoq();
2643
                    gen_helper_check_ieee_exceptions();
2644
                    gen_op_store_QT0_fpr(QFPREG(rd));
2645
                    break;
2646
#endif
2647
                default:
2648
                    goto illegal_insn;
2649
                }
2650
            } else if (xop == 0x35) {   /* FPU Operations */
2651
#ifdef TARGET_SPARC64
2652
                int cond;
2653
#endif
2654
                if (gen_trap_ifnofpu(dc, cpu_cond))
2655
                    goto jmp_insn;
2656
                gen_op_clear_ieee_excp_and_FTT();
2657
                rs1 = GET_FIELD(insn, 13, 17);
2658
                rs2 = GET_FIELD(insn, 27, 31);
2659
                xop = GET_FIELD(insn, 18, 26);
2660
                save_state(dc, cpu_cond);
2661
#ifdef TARGET_SPARC64
2662
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2663
                    int l1;
2664

    
2665
                    l1 = gen_new_label();
2666
                    cond = GET_FIELD_SP(insn, 14, 17);
2667
                    cpu_src1 = get_src1(insn, cpu_src1);
2668
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2669
                                       0, l1);
2670
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2671
                    gen_set_label(l1);
2672
                    break;
2673
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2674
                    int l1;
2675

    
2676
                    l1 = gen_new_label();
2677
                    cond = GET_FIELD_SP(insn, 14, 17);
2678
                    cpu_src1 = get_src1(insn, cpu_src1);
2679
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2680
                                       0, l1);
2681
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2682
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2683
                    gen_set_label(l1);
2684
                    break;
2685
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2686
                    int l1;
2687

    
2688
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2689
                    l1 = gen_new_label();
2690
                    cond = GET_FIELD_SP(insn, 14, 17);
2691
                    cpu_src1 = get_src1(insn, cpu_src1);
2692
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2693
                                       0, l1);
2694
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2695
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2696
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2697
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2698
                    gen_set_label(l1);
2699
                    break;
2700
                }
2701
#endif
2702
                switch (xop) {
2703
#ifdef TARGET_SPARC64
2704
#define FMOVSCC(fcc)                                                    \
2705
                    {                                                   \
2706
                        TCGv r_cond;                                    \
2707
                        int l1;                                         \
2708
                                                                        \
2709
                        l1 = gen_new_label();                           \
2710
                        r_cond = tcg_temp_new();                        \
2711
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2712
                        gen_fcond(r_cond, fcc, cond);                   \
2713
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2714
                                           0, l1);                      \
2715
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2716
                        gen_set_label(l1);                              \
2717
                        tcg_temp_free(r_cond);                          \
2718
                    }
2719
#define FMOVDCC(fcc)                                                    \
2720
                    {                                                   \
2721
                        TCGv r_cond;                                    \
2722
                        int l1;                                         \
2723
                                                                        \
2724
                        l1 = gen_new_label();                           \
2725
                        r_cond = tcg_temp_new();                        \
2726
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2727
                        gen_fcond(r_cond, fcc, cond);                   \
2728
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2729
                                           0, l1);                      \
2730
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2731
                                        cpu_fpr[DFPREG(rs2)]);          \
2732
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2733
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2734
                        gen_set_label(l1);                              \
2735
                        tcg_temp_free(r_cond);                          \
2736
                    }
2737
#define FMOVQCC(fcc)                                                    \
2738
                    {                                                   \
2739
                        TCGv r_cond;                                    \
2740
                        int l1;                                         \
2741
                                                                        \
2742
                        l1 = gen_new_label();                           \
2743
                        r_cond = tcg_temp_new();                        \
2744
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2745
                        gen_fcond(r_cond, fcc, cond);                   \
2746
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2747
                                           0, l1);                      \
2748
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2749
                                        cpu_fpr[QFPREG(rs2)]);          \
2750
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2751
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2752
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2753
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2754
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2755
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2756
                        gen_set_label(l1);                              \
2757
                        tcg_temp_free(r_cond);                          \
2758
                    }
2759
                    case 0x001: /* V9 fmovscc %fcc0 */
2760
                        FMOVSCC(0);
2761
                        break;
2762
                    case 0x002: /* V9 fmovdcc %fcc0 */
2763
                        FMOVDCC(0);
2764
                        break;
2765
                    case 0x003: /* V9 fmovqcc %fcc0 */
2766
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2767
                        FMOVQCC(0);
2768
                        break;
2769
                    case 0x041: /* V9 fmovscc %fcc1 */
2770
                        FMOVSCC(1);
2771
                        break;
2772
                    case 0x042: /* V9 fmovdcc %fcc1 */
2773
                        FMOVDCC(1);
2774
                        break;
2775
                    case 0x043: /* V9 fmovqcc %fcc1 */
2776
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2777
                        FMOVQCC(1);
2778
                        break;
2779
                    case 0x081: /* V9 fmovscc %fcc2 */
2780
                        FMOVSCC(2);
2781
                        break;
2782
                    case 0x082: /* V9 fmovdcc %fcc2 */
2783
                        FMOVDCC(2);
2784
                        break;
2785
                    case 0x083: /* V9 fmovqcc %fcc2 */
2786
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2787
                        FMOVQCC(2);
2788
                        break;
2789
                    case 0x0c1: /* V9 fmovscc %fcc3 */
2790
                        FMOVSCC(3);
2791
                        break;
2792
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
2793
                        FMOVDCC(3);
2794
                        break;
2795
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
2796
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2797
                        FMOVQCC(3);
2798
                        break;
2799
#undef FMOVSCC
2800
#undef FMOVDCC
2801
#undef FMOVQCC
2802
#define FMOVSCC(icc)                                                    \
2803
                    {                                                   \
2804
                        TCGv r_cond;                                    \
2805
                        int l1;                                         \
2806
                                                                        \
2807
                        l1 = gen_new_label();                           \
2808
                        r_cond = tcg_temp_new();                        \
2809
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2810
                        gen_cond(r_cond, icc, cond, dc);                \
2811
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2812
                                           0, l1);                      \
2813
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2814
                        gen_set_label(l1);                              \
2815
                        tcg_temp_free(r_cond);                          \
2816
                    }
2817
#define FMOVDCC(icc)                                                    \
2818
                    {                                                   \
2819
                        TCGv r_cond;                                    \
2820
                        int l1;                                         \
2821
                                                                        \
2822
                        l1 = gen_new_label();                           \
2823
                        r_cond = tcg_temp_new();                        \
2824
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2825
                        gen_cond(r_cond, icc, cond, dc);                \
2826
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2827
                                           0, l1);                      \
2828
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2829
                                        cpu_fpr[DFPREG(rs2)]);          \
2830
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2831
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2832
                        gen_set_label(l1);                              \
2833
                        tcg_temp_free(r_cond);                          \
2834
                    }
2835
#define FMOVQCC(icc)                                                    \
2836
                    {                                                   \
2837
                        TCGv r_cond;                                    \
2838
                        int l1;                                         \
2839
                                                                        \
2840
                        l1 = gen_new_label();                           \
2841
                        r_cond = tcg_temp_new();                        \
2842
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2843
                        gen_cond(r_cond, icc, cond, dc);                \
2844
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2845
                                           0, l1);                      \
2846
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2847
                                        cpu_fpr[QFPREG(rs2)]);          \
2848
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2849
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2850
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2851
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2852
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2853
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2854
                        gen_set_label(l1);                              \
2855
                        tcg_temp_free(r_cond);                          \
2856
                    }
2857

    
2858
                    case 0x101: /* V9 fmovscc %icc */
2859
                        FMOVSCC(0);
2860
                        break;
2861
                    case 0x102: /* V9 fmovdcc %icc */
2862
                        FMOVDCC(0);
2863
                    case 0x103: /* V9 fmovqcc %icc */
2864
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2865
                        FMOVQCC(0);
2866
                        break;
2867
                    case 0x181: /* V9 fmovscc %xcc */
2868
                        FMOVSCC(1);
2869
                        break;
2870
                    case 0x182: /* V9 fmovdcc %xcc */
2871
                        FMOVDCC(1);
2872
                        break;
2873
                    case 0x183: /* V9 fmovqcc %xcc */
2874
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2875
                        FMOVQCC(1);
2876
                        break;
2877
#undef FMOVSCC
2878
#undef FMOVDCC
2879
#undef FMOVQCC
2880
#endif
2881
                    case 0x51: /* fcmps, V9 %fcc */
2882
                        gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2883
                        break;
2884
                    case 0x52: /* fcmpd, V9 %fcc */
2885
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2886
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2887
                        gen_op_fcmpd(rd & 3);
2888
                        break;
2889
                    case 0x53: /* fcmpq, V9 %fcc */
2890
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2891
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2892
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2893
                        gen_op_fcmpq(rd & 3);
2894
                        break;
2895
                    case 0x55: /* fcmpes, V9 %fcc */
2896
                        gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2897
                        break;
2898
                    case 0x56: /* fcmped, V9 %fcc */
2899
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2900
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2901
                        gen_op_fcmped(rd & 3);
2902
                        break;
2903
                    case 0x57: /* fcmpeq, V9 %fcc */
2904
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2905
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2906
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2907
                        gen_op_fcmpeq(rd & 3);
2908
                        break;
2909
                    default:
2910
                        goto illegal_insn;
2911
                }
2912
            } else if (xop == 0x2) {
2913
                // clr/mov shortcut
2914

    
2915
                rs1 = GET_FIELD(insn, 13, 17);
2916
                if (rs1 == 0) {
2917
                    // or %g0, x, y -> mov T0, x; mov y, T0
2918
                    if (IS_IMM) {       /* immediate */
2919
                        TCGv r_const;
2920

    
2921
                        simm = GET_FIELDs(insn, 19, 31);
2922
                        r_const = tcg_const_tl(simm);
2923
                        gen_movl_TN_reg(rd, r_const);
2924
                        tcg_temp_free(r_const);
2925
                    } else {            /* register */
2926
                        rs2 = GET_FIELD(insn, 27, 31);
2927
                        gen_movl_reg_TN(rs2, cpu_dst);
2928
                        gen_movl_TN_reg(rd, cpu_dst);
2929
                    }
2930
                } else {
2931
                    cpu_src1 = get_src1(insn, cpu_src1);
2932
                    if (IS_IMM) {       /* immediate */
2933
                        simm = GET_FIELDs(insn, 19, 31);
2934
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2935
                        gen_movl_TN_reg(rd, cpu_dst);
2936
                    } else {            /* register */
2937
                        // or x, %g0, y -> mov T1, x; mov y, T1
2938
                        rs2 = GET_FIELD(insn, 27, 31);
2939
                        if (rs2 != 0) {
2940
                            gen_movl_reg_TN(rs2, cpu_src2);
2941
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2942
                            gen_movl_TN_reg(rd, cpu_dst);
2943
                        } else
2944
                            gen_movl_TN_reg(rd, cpu_src1);
2945
                    }
2946
                }
2947
#ifdef TARGET_SPARC64
2948
            } else if (xop == 0x25) { /* sll, V9 sllx */
2949
                cpu_src1 = get_src1(insn, cpu_src1);
2950
                if (IS_IMM) {   /* immediate */
2951
                    simm = GET_FIELDs(insn, 20, 31);
2952
                    if (insn & (1 << 12)) {
2953
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2954
                    } else {
2955
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2956
                    }
2957
                } else {                /* register */
2958
                    rs2 = GET_FIELD(insn, 27, 31);
2959
                    gen_movl_reg_TN(rs2, cpu_src2);
2960
                    if (insn & (1 << 12)) {
2961
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2962
                    } else {
2963
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2964
                    }
2965
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2966
                }
2967
                gen_movl_TN_reg(rd, cpu_dst);
2968
            } else if (xop == 0x26) { /* srl, V9 srlx */
2969
                cpu_src1 = get_src1(insn, cpu_src1);
2970
                if (IS_IMM) {   /* immediate */
2971
                    simm = GET_FIELDs(insn, 20, 31);
2972
                    if (insn & (1 << 12)) {
2973
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2974
                    } else {
2975
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2976
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2977
                    }
2978
                } else {                /* register */
2979
                    rs2 = GET_FIELD(insn, 27, 31);
2980
                    gen_movl_reg_TN(rs2, cpu_src2);
2981
                    if (insn & (1 << 12)) {
2982
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2983
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2984
                    } else {
2985
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2986
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2987
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2988
                    }
2989
                }
2990
                gen_movl_TN_reg(rd, cpu_dst);
2991
            } else if (xop == 0x27) { /* sra, V9 srax */
2992
                cpu_src1 = get_src1(insn, cpu_src1);
2993
                if (IS_IMM) {   /* immediate */
2994
                    simm = GET_FIELDs(insn, 20, 31);
2995
                    if (insn & (1 << 12)) {
2996
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2997
                    } else {
2998
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2999
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3000
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3001
                    }
3002
                } else {                /* register */
3003
                    rs2 = GET_FIELD(insn, 27, 31);
3004
                    gen_movl_reg_TN(rs2, cpu_src2);
3005
                    if (insn & (1 << 12)) {
3006
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3007
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3008
                    } else {
3009
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3010
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3011
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3012
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3013
                    }
3014
                }
3015
                gen_movl_TN_reg(rd, cpu_dst);
3016
#endif
3017
            } else if (xop < 0x36) {
3018
                if (xop < 0x20) {
3019
                    cpu_src1 = get_src1(insn, cpu_src1);
3020
                    cpu_src2 = get_src2(insn, cpu_src2);
3021
                    switch (xop & ~0x10) {
3022
                    case 0x0: /* add */
3023
                        if (IS_IMM) {
3024
                            simm = GET_FIELDs(insn, 19, 31);
3025
                            if (xop & 0x10) {
3026
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3027
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3028
                                dc->cc_op = CC_OP_ADD;
3029
                            } else {
3030
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3031
                            }
3032
                        } else {
3033
                            if (xop & 0x10) {
3034
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3035
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3036
                                dc->cc_op = CC_OP_ADD;
3037
                            } else {
3038
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3039
                            }
3040
                        }
3041
                        break;
3042
                    case 0x1: /* and */
3043
                        if (IS_IMM) {
3044
                            simm = GET_FIELDs(insn, 19, 31);
3045
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3046
                        } else {
3047
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3048
                        }
3049
                        if (xop & 0x10) {
3050
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3051
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3052
                            dc->cc_op = CC_OP_LOGIC;
3053
                        }
3054
                        break;
3055
                    case 0x2: /* or */
3056
                        if (IS_IMM) {
3057
                            simm = GET_FIELDs(insn, 19, 31);
3058
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3059
                        } else {
3060
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3061
                        }
3062
                        if (xop & 0x10) {
3063
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3064
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3065
                            dc->cc_op = CC_OP_LOGIC;
3066
                        }
3067
                        break;
3068
                    case 0x3: /* xor */
3069
                        if (IS_IMM) {
3070
                            simm = GET_FIELDs(insn, 19, 31);
3071
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3072
                        } else {
3073
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3074
                        }
3075
                        if (xop & 0x10) {
3076
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3077
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3078
                            dc->cc_op = CC_OP_LOGIC;
3079
                        }
3080
                        break;
3081
                    case 0x4: /* sub */
3082
                        if (IS_IMM) {
3083
                            simm = GET_FIELDs(insn, 19, 31);
3084
                            if (xop & 0x10) {
3085
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3086
                            } else {
3087
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3088
                            }
3089
                        } else {
3090
                            if (xop & 0x10) {
3091
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3092
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3093
                                dc->cc_op = CC_OP_SUB;
3094
                            } else {
3095
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3096
                            }
3097
                        }
3098
                        break;
3099
                    case 0x5: /* andn */
3100
                        if (IS_IMM) {
3101
                            simm = GET_FIELDs(insn, 19, 31);
3102
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3103
                        } else {
3104
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3105
                        }
3106
                        if (xop & 0x10) {
3107
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3108
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3109
                            dc->cc_op = CC_OP_LOGIC;
3110
                        }
3111
                        break;
3112
                    case 0x6: /* orn */
3113
                        if (IS_IMM) {
3114
                            simm = GET_FIELDs(insn, 19, 31);
3115
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3116
                        } else {
3117
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3118
                        }
3119
                        if (xop & 0x10) {
3120
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3121
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3122
                            dc->cc_op = CC_OP_LOGIC;
3123
                        }
3124
                        break;
3125
                    case 0x7: /* xorn */
3126
                        if (IS_IMM) {
3127
                            simm = GET_FIELDs(insn, 19, 31);
3128
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3129
                        } else {
3130
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3131
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3132
                        }
3133
                        if (xop & 0x10) {
3134
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3135
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3136
                            dc->cc_op = CC_OP_LOGIC;
3137
                        }
3138
                        break;
3139
                    case 0x8: /* addx, V9 addc */
3140
                        gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3141
                                        (xop & 0x10));
3142
                        break;
3143
#ifdef TARGET_SPARC64
3144
                    case 0x9: /* V9 mulx */
3145
                        if (IS_IMM) {
3146
                            simm = GET_FIELDs(insn, 19, 31);
3147
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3148
                        } else {
3149
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3150
                        }
3151
                        break;
3152
#endif
3153
                    case 0xa: /* umul */
3154
                        CHECK_IU_FEATURE(dc, MUL);
3155
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3156
                        if (xop & 0x10) {
3157
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3158
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3159
                            dc->cc_op = CC_OP_LOGIC;
3160
                        }
3161
                        break;
3162
                    case 0xb: /* smul */
3163
                        CHECK_IU_FEATURE(dc, MUL);
3164
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3165
                        if (xop & 0x10) {
3166
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3167
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3168
                            dc->cc_op = CC_OP_LOGIC;
3169
                        }
3170
                        break;
3171
                    case 0xc: /* subx, V9 subc */
3172
                        gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3173
                                        (xop & 0x10));
3174
                        break;
3175
#ifdef TARGET_SPARC64
3176
                    case 0xd: /* V9 udivx */
3177
                        tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3178
                        tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3179
                        gen_trap_ifdivzero_tl(cpu_cc_src2);
3180
                        tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3181
                        break;
3182
#endif
3183
                    case 0xe: /* udiv */
3184
                        CHECK_IU_FEATURE(dc, DIV);
3185
                        if (xop & 0x10) {
3186
                            gen_helper_udiv_cc(cpu_dst, cpu_src1, cpu_src2);
3187
                            dc->cc_op = CC_OP_DIV;
3188
                        } else {
3189
                            gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3190
                        }
3191
                        break;
3192
                    case 0xf: /* sdiv */
3193
                        CHECK_IU_FEATURE(dc, DIV);
3194
                        if (xop & 0x10) {
3195
                            gen_helper_sdiv_cc(cpu_dst, cpu_src1, cpu_src2);
3196
                            dc->cc_op = CC_OP_DIV;
3197
                        } else {
3198
                            gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3199
                        }
3200
                        break;
3201
                    default:
3202
                        goto illegal_insn;
3203
                    }
3204
                    gen_movl_TN_reg(rd, cpu_dst);
3205
                } else {
3206
                    cpu_src1 = get_src1(insn, cpu_src1);
3207
                    cpu_src2 = get_src2(insn, cpu_src2);
3208
                    switch (xop) {
3209
                    case 0x20: /* taddcc */
3210
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3211
                        gen_movl_TN_reg(rd, cpu_dst);
3212
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3213
                        dc->cc_op = CC_OP_TADD;
3214
                        break;
3215
                    case 0x21: /* tsubcc */
3216
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3217
                        gen_movl_TN_reg(rd, cpu_dst);
3218
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3219
                        dc->cc_op = CC_OP_TSUB;
3220
                        break;
3221
                    case 0x22: /* taddcctv */
3222
                        save_state(dc, cpu_cond);
3223
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3224
                        gen_movl_TN_reg(rd, cpu_dst);
3225
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3226
                        dc->cc_op = CC_OP_TADDTV;
3227
                        break;
3228
                    case 0x23: /* tsubcctv */
3229
                        save_state(dc, cpu_cond);
3230
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3231
                        gen_movl_TN_reg(rd, cpu_dst);
3232
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3233
                        dc->cc_op = CC_OP_TSUBTV;
3234
                        break;
3235
                    case 0x24: /* mulscc */
3236
                        gen_helper_compute_psr();
3237
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3238
                        gen_movl_TN_reg(rd, cpu_dst);
3239
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3240
                        dc->cc_op = CC_OP_ADD;
3241
                        break;
3242
#ifndef TARGET_SPARC64
3243
                    case 0x25:  /* sll */
3244
                        if (IS_IMM) { /* immediate */
3245
                            simm = GET_FIELDs(insn, 20, 31);
3246
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3247
                        } else { /* register */
3248
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3249
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3250
                        }
3251
                        gen_movl_TN_reg(rd, cpu_dst);
3252
                        break;
3253
                    case 0x26:  /* srl */
3254
                        if (IS_IMM) { /* immediate */
3255
                            simm = GET_FIELDs(insn, 20, 31);
3256
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3257
                        } else { /* register */
3258
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3259
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3260
                        }
3261
                        gen_movl_TN_reg(rd, cpu_dst);
3262
                        break;
3263
                    case 0x27:  /* sra */
3264
                        if (IS_IMM) { /* immediate */
3265
                            simm = GET_FIELDs(insn, 20, 31);
3266
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3267
                        } else { /* register */
3268
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3269
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3270
                        }
3271
                        gen_movl_TN_reg(rd, cpu_dst);
3272
                        break;
3273
#endif
3274
                    case 0x30:
3275
                        {
3276
                            switch(rd) {
3277
                            case 0: /* wry */
3278
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3279
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3280
                                break;
3281
#ifndef TARGET_SPARC64
3282
                            case 0x01 ... 0x0f: /* undefined in the
3283
                                                   SPARCv8 manual, nop
3284
                                                   on the microSPARC
3285
                                                   II */
3286
                            case 0x10 ... 0x1f: /* implementation-dependent
3287
                                                   in the SPARCv8
3288
                                                   manual, nop on the
3289
                                                   microSPARC II */
3290
                                break;
3291
#else
3292
                            case 0x2: /* V9 wrccr */
3293
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3294
                                gen_helper_wrccr(cpu_dst);
3295
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3296
                                dc->cc_op = CC_OP_FLAGS;
3297
                                break;
3298
                            case 0x3: /* V9 wrasi */
3299
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3300
                                tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3301
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3302
                                break;
3303
                            case 0x6: /* V9 wrfprs */
3304
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3305
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3306
                                save_state(dc, cpu_cond);
3307
                                gen_op_next_insn();
3308
                                tcg_gen_exit_tb(0);
3309
                                dc->is_br = 1;
3310
                                break;
3311
                            case 0xf: /* V9 sir, nop if user */
3312
#if !defined(CONFIG_USER_ONLY)
3313
                                if (supervisor(dc)) {
3314
                                    ; // XXX
3315
                                }
3316
#endif
3317
                                break;
3318
                            case 0x13: /* Graphics Status */
3319
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3320
                                    goto jmp_insn;
3321
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3322
                                break;
3323
                            case 0x14: /* Softint set */
3324
                                if (!supervisor(dc))
3325
                                    goto illegal_insn;
3326
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3327
                                gen_helper_set_softint(cpu_tmp64);
3328
                                break;
3329
                            case 0x15: /* Softint clear */
3330
                                if (!supervisor(dc))
3331
                                    goto illegal_insn;
3332
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3333
                                gen_helper_clear_softint(cpu_tmp64);
3334
                                break;
3335
                            case 0x16: /* Softint write */
3336
                                if (!supervisor(dc))
3337
                                    goto illegal_insn;
3338
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3339
                                gen_helper_write_softint(cpu_tmp64);
3340
                                break;
3341
                            case 0x17: /* Tick compare */
3342
#if !defined(CONFIG_USER_ONLY)
3343
                                if (!supervisor(dc))
3344
                                    goto illegal_insn;
3345
#endif
3346
                                {
3347
                                    TCGv_ptr r_tickptr;
3348

    
3349
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3350
                                                   cpu_src2);
3351
                                    r_tickptr = tcg_temp_new_ptr();
3352
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3353
                                                   offsetof(CPUState, tick));
3354
                                    gen_helper_tick_set_limit(r_tickptr,
3355
                                                              cpu_tick_cmpr);
3356
                                    tcg_temp_free_ptr(r_tickptr);
3357
                                }
3358
                                break;
3359
                            case 0x18: /* System tick */
3360
#if !defined(CONFIG_USER_ONLY)
3361
                                if (!supervisor(dc))
3362
                                    goto illegal_insn;
3363
#endif
3364
                                {
3365
                                    TCGv_ptr r_tickptr;
3366

    
3367
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3368
                                                   cpu_src2);
3369
                                    r_tickptr = tcg_temp_new_ptr();
3370
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3371
                                                   offsetof(CPUState, stick));
3372
                                    gen_helper_tick_set_count(r_tickptr,
3373
                                                              cpu_dst);
3374
                                    tcg_temp_free_ptr(r_tickptr);
3375
                                }
3376
                                break;
3377
                            case 0x19: /* System tick compare */
3378
#if !defined(CONFIG_USER_ONLY)
3379
                                if (!supervisor(dc))
3380
                                    goto illegal_insn;
3381
#endif
3382
                                {
3383
                                    TCGv_ptr r_tickptr;
3384

    
3385
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3386
                                                   cpu_src2);
3387
                                    r_tickptr = tcg_temp_new_ptr();
3388
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3389
                                                   offsetof(CPUState, stick));
3390
                                    gen_helper_tick_set_limit(r_tickptr,
3391
                                                              cpu_stick_cmpr);
3392
                                    tcg_temp_free_ptr(r_tickptr);
3393
                                }
3394
                                break;
3395

    
3396
                            case 0x10: /* Performance Control */
3397
                            case 0x11: /* Performance Instrumentation
3398
                                          Counter */
3399
                            case 0x12: /* Dispatch Control */
3400
#endif
3401
                            default:
3402
                                goto illegal_insn;
3403
                            }
3404
                        }
3405
                        break;
3406
#if !defined(CONFIG_USER_ONLY)
3407
                    case 0x31: /* wrpsr, V9 saved, restored */
3408
                        {
3409
                            if (!supervisor(dc))
3410
                                goto priv_insn;
3411
#ifdef TARGET_SPARC64
3412
                            switch (rd) {
3413
                            case 0:
3414
                                gen_helper_saved();
3415
                                break;
3416
                            case 1:
3417
                                gen_helper_restored();
3418
                                break;
3419
                            case 2: /* UA2005 allclean */
3420
                            case 3: /* UA2005 otherw */
3421
                            case 4: /* UA2005 normalw */
3422
                            case 5: /* UA2005 invalw */
3423
                                // XXX
3424
                            default:
3425
                                goto illegal_insn;
3426
                            }
3427
#else
3428
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3429
                            gen_helper_wrpsr(cpu_dst);
3430
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3431
                            dc->cc_op = CC_OP_FLAGS;
3432
                            save_state(dc, cpu_cond);
3433
                            gen_op_next_insn();
3434
                            tcg_gen_exit_tb(0);
3435
                            dc->is_br = 1;
3436
#endif
3437
                        }
3438
                        break;
3439
                    case 0x32: /* wrwim, V9 wrpr */
3440
                        {
3441
                            if (!supervisor(dc))
3442
                                goto priv_insn;
3443
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3444
#ifdef TARGET_SPARC64
3445
                            switch (rd) {
3446
                            case 0: // tpc
3447
                                {
3448
                                    TCGv_ptr r_tsptr;
3449

    
3450
                                    r_tsptr = tcg_temp_new_ptr();
3451
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3452
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3453
                                                  offsetof(trap_state, tpc));
3454
                                    tcg_temp_free_ptr(r_tsptr);
3455
                                }
3456
                                break;
3457
                            case 1: // tnpc
3458
                                {
3459
                                    TCGv_ptr r_tsptr;
3460

    
3461
                                    r_tsptr = tcg_temp_new_ptr();
3462
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3463
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3464
                                                  offsetof(trap_state, tnpc));
3465
                                    tcg_temp_free_ptr(r_tsptr);
3466
                                }
3467
                                break;
3468
                            case 2: // tstate
3469
                                {
3470
                                    TCGv_ptr r_tsptr;
3471

    
3472
                                    r_tsptr = tcg_temp_new_ptr();
3473
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3474
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3475
                                                  offsetof(trap_state,
3476
                                                           tstate));
3477
                                    tcg_temp_free_ptr(r_tsptr);
3478
                                }
3479
                                break;
3480
                            case 3: // tt
3481
                                {
3482
                                    TCGv_ptr r_tsptr;
3483

    
3484
                                    r_tsptr = tcg_temp_new_ptr();
3485
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3486
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3487
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3488
                                                   offsetof(trap_state, tt));
3489
                                    tcg_temp_free_ptr(r_tsptr);
3490
                                }
3491
                                break;
3492
                            case 4: // tick
3493
                                {
3494
                                    TCGv_ptr r_tickptr;
3495

    
3496
                                    r_tickptr = tcg_temp_new_ptr();
3497
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3498
                                                   offsetof(CPUState, tick));
3499
                                    gen_helper_tick_set_count(r_tickptr,
3500
                                                              cpu_tmp0);
3501
                                    tcg_temp_free_ptr(r_tickptr);
3502
                                }
3503
                                break;
3504
                            case 5: // tba
3505
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3506
                                break;
3507
                            case 6: // pstate
3508
                                save_state(dc, cpu_cond);
3509
                                gen_helper_wrpstate(cpu_tmp0);
3510
                                dc->npc = DYNAMIC_PC;
3511
                                break;
3512
                            case 7: // tl
3513
                                save_state(dc, cpu_cond);
3514
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3515
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3516
                                               offsetof(CPUSPARCState, tl));
3517
                                dc->npc = DYNAMIC_PC;
3518
                                break;
3519
                            case 8: // pil
3520
                                gen_helper_wrpil(cpu_tmp0);
3521
                                break;
3522
                            case 9: // cwp
3523
                                gen_helper_wrcwp(cpu_tmp0);
3524
                                break;
3525
                            case 10: // cansave
3526
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3527
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3528
                                               offsetof(CPUSPARCState,
3529
                                                        cansave));
3530
                                break;
3531
                            case 11: // canrestore
3532
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3533
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3534
                                               offsetof(CPUSPARCState,
3535
                                                        canrestore));
3536
                                break;
3537
                            case 12: // cleanwin
3538
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3539
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3540
                                               offsetof(CPUSPARCState,
3541
                                                        cleanwin));
3542
                                break;
3543
                            case 13: // otherwin
3544
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3545
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3546
                                               offsetof(CPUSPARCState,
3547
                                                        otherwin));
3548
                                break;
3549
                            case 14: // wstate
3550
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3551
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3552
                                               offsetof(CPUSPARCState,
3553
                                                        wstate));
3554
                                break;
3555
                            case 16: // UA2005 gl
3556
                                CHECK_IU_FEATURE(dc, GL);
3557
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3558
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3559
                                               offsetof(CPUSPARCState, gl));
3560
                                break;
3561
                            case 26: // UA2005 strand status
3562
                                CHECK_IU_FEATURE(dc, HYPV);
3563
                                if (!hypervisor(dc))
3564
                                    goto priv_insn;
3565
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3566
                                break;
3567
                            default:
3568
                                goto illegal_insn;
3569
                            }
3570
#else
3571
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3572
                            if (dc->def->nwindows != 32)
3573
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3574
                                                (1 << dc->def->nwindows) - 1);
3575
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3576
#endif
3577
                        }
3578
                        break;
3579
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3580
                        {
3581
#ifndef TARGET_SPARC64
3582
                            if (!supervisor(dc))
3583
                                goto priv_insn;
3584
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3585
#else
3586
                            CHECK_IU_FEATURE(dc, HYPV);
3587
                            if (!hypervisor(dc))
3588
                                goto priv_insn;
3589
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3590
                            switch (rd) {
3591
                            case 0: // hpstate
3592
                                // XXX gen_op_wrhpstate();
3593
                                save_state(dc, cpu_cond);
3594
                                gen_op_next_insn();
3595
                                tcg_gen_exit_tb(0);
3596
                                dc->is_br = 1;
3597
                                break;
3598
                            case 1: // htstate
3599
                                // XXX gen_op_wrhtstate();
3600
                                break;
3601
                            case 3: // hintp
3602
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3603
                                break;
3604
                            case 5: // htba
3605
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3606
                                break;
3607
                            case 31: // hstick_cmpr
3608
                                {
3609
                                    TCGv_ptr r_tickptr;
3610

    
3611
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3612
                                    r_tickptr = tcg_temp_new_ptr();
3613
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3614
                                                   offsetof(CPUState, hstick));
3615
                                    gen_helper_tick_set_limit(r_tickptr,
3616
                                                              cpu_hstick_cmpr);
3617
                                    tcg_temp_free_ptr(r_tickptr);
3618
                                }
3619
                                break;
3620
                            case 6: // hver readonly
3621
                            default:
3622
                                goto illegal_insn;
3623
                            }
3624
#endif
3625
                        }
3626
                        break;
3627
#endif
3628
#ifdef TARGET_SPARC64
3629
                    case 0x2c: /* V9 movcc */
3630
                        {
3631
                            int cc = GET_FIELD_SP(insn, 11, 12);
3632
                            int cond = GET_FIELD_SP(insn, 14, 17);
3633
                            TCGv r_cond;
3634
                            int l1;
3635

    
3636
                            r_cond = tcg_temp_new();
3637
                            if (insn & (1 << 18)) {
3638
                                if (cc == 0)
3639
                                    gen_cond(r_cond, 0, cond, dc);
3640
                                else if (cc == 2)
3641
                                    gen_cond(r_cond, 1, cond, dc);
3642
                                else
3643
                                    goto illegal_insn;
3644
                            } else {
3645
                                gen_fcond(r_cond, cc, cond);
3646
                            }
3647

    
3648
                            l1 = gen_new_label();
3649

    
3650
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3651
                            if (IS_IMM) {       /* immediate */
3652
                                TCGv r_const;
3653

    
3654
                                simm = GET_FIELD_SPs(insn, 0, 10);
3655
                                r_const = tcg_const_tl(simm);
3656
                                gen_movl_TN_reg(rd, r_const);
3657
                                tcg_temp_free(r_const);
3658
                            } else {
3659
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3660
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3661
                                gen_movl_TN_reg(rd, cpu_tmp0);
3662
                            }
3663
                            gen_set_label(l1);
3664
                            tcg_temp_free(r_cond);
3665
                            break;
3666
                        }
3667
                    case 0x2d: /* V9 sdivx */
3668
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3669
                        gen_movl_TN_reg(rd, cpu_dst);
3670
                        break;
3671
                    case 0x2e: /* V9 popc */
3672
                        {
3673
                            cpu_src2 = get_src2(insn, cpu_src2);
3674
                            gen_helper_popc(cpu_dst, cpu_src2);
3675
                            gen_movl_TN_reg(rd, cpu_dst);
3676
                        }
3677
                    case 0x2f: /* V9 movr */
3678
                        {
3679
                            int cond = GET_FIELD_SP(insn, 10, 12);
3680
                            int l1;
3681

    
3682
                            cpu_src1 = get_src1(insn, cpu_src1);
3683

    
3684
                            l1 = gen_new_label();
3685

    
3686
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3687
                                              cpu_src1, 0, l1);
3688
                            if (IS_IMM) {       /* immediate */
3689
                                TCGv r_const;
3690

    
3691
                                simm = GET_FIELD_SPs(insn, 0, 9);
3692
                                r_const = tcg_const_tl(simm);
3693
                                gen_movl_TN_reg(rd, r_const);
3694
                                tcg_temp_free(r_const);
3695
                            } else {
3696
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3697
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3698
                                gen_movl_TN_reg(rd, cpu_tmp0);
3699
                            }
3700
                            gen_set_label(l1);
3701
                            break;
3702
                        }
3703
#endif
3704
                    default:
3705
                        goto illegal_insn;
3706
                    }
3707
                }
3708
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3709
#ifdef TARGET_SPARC64
3710
                int opf = GET_FIELD_SP(insn, 5, 13);
3711
                rs1 = GET_FIELD(insn, 13, 17);
3712
                rs2 = GET_FIELD(insn, 27, 31);
3713
                if (gen_trap_ifnofpu(dc, cpu_cond))
3714
                    goto jmp_insn;
3715

    
3716
                switch (opf) {
3717
                case 0x000: /* VIS I edge8cc */
3718
                case 0x001: /* VIS II edge8n */
3719
                case 0x002: /* VIS I edge8lcc */
3720
                case 0x003: /* VIS II edge8ln */
3721
                case 0x004: /* VIS I edge16cc */
3722
                case 0x005: /* VIS II edge16n */
3723
                case 0x006: /* VIS I edge16lcc */
3724
                case 0x007: /* VIS II edge16ln */
3725
                case 0x008: /* VIS I edge32cc */
3726
                case 0x009: /* VIS II edge32n */
3727
                case 0x00a: /* VIS I edge32lcc */
3728
                case 0x00b: /* VIS II edge32ln */
3729
                    // XXX
3730
                    goto illegal_insn;
3731
                case 0x010: /* VIS I array8 */
3732
                    CHECK_FPU_FEATURE(dc, VIS1);
3733
                    cpu_src1 = get_src1(insn, cpu_src1);
3734
                    gen_movl_reg_TN(rs2, cpu_src2);
3735
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3736
                    gen_movl_TN_reg(rd, cpu_dst);
3737
                    break;
3738
                case 0x012: /* VIS I array16 */
3739
                    CHECK_FPU_FEATURE(dc, VIS1);
3740
                    cpu_src1 = get_src1(insn, cpu_src1);
3741
                    gen_movl_reg_TN(rs2, cpu_src2);
3742
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3743
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3744
                    gen_movl_TN_reg(rd, cpu_dst);
3745
                    break;
3746
                case 0x014: /* VIS I array32 */
3747
                    CHECK_FPU_FEATURE(dc, VIS1);
3748
                    cpu_src1 = get_src1(insn, cpu_src1);
3749
                    gen_movl_reg_TN(rs2, cpu_src2);
3750
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3751
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3752
                    gen_movl_TN_reg(rd, cpu_dst);
3753
                    break;
3754
                case 0x018: /* VIS I alignaddr */
3755
                    CHECK_FPU_FEATURE(dc, VIS1);
3756
                    cpu_src1 = get_src1(insn, cpu_src1);
3757
                    gen_movl_reg_TN(rs2, cpu_src2);
3758
                    gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3759
                    gen_movl_TN_reg(rd, cpu_dst);
3760
                    break;
3761
                case 0x019: /* VIS II bmask */
3762
                case 0x01a: /* VIS I alignaddrl */
3763
                    // XXX
3764
                    goto illegal_insn;
3765
                case 0x020: /* VIS I fcmple16 */
3766
                    CHECK_FPU_FEATURE(dc, VIS1);
3767
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3768
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3769
                    gen_helper_fcmple16();
3770
                    gen_op_store_DT0_fpr(DFPREG(rd));
3771
                    break;
3772
                case 0x022: /* VIS I fcmpne16 */
3773
                    CHECK_FPU_FEATURE(dc, VIS1);
3774
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3775
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3776
                    gen_helper_fcmpne16();
3777
                    gen_op_store_DT0_fpr(DFPREG(rd));
3778
                    break;
3779
                case 0x024: /* VIS I fcmple32 */
3780
                    CHECK_FPU_FEATURE(dc, VIS1);
3781
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3782
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3783
                    gen_helper_fcmple32();
3784
                    gen_op_store_DT0_fpr(DFPREG(rd));
3785
                    break;
3786
                case 0x026: /* VIS I fcmpne32 */
3787
                    CHECK_FPU_FEATURE(dc, VIS1);
3788
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3789
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3790
                    gen_helper_fcmpne32();
3791
                    gen_op_store_DT0_fpr(DFPREG(rd));
3792
                    break;
3793
                case 0x028: /* VIS I fcmpgt16 */
3794
                    CHECK_FPU_FEATURE(dc, VIS1);
3795
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3796
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3797
                    gen_helper_fcmpgt16();
3798
                    gen_op_store_DT0_fpr(DFPREG(rd));
3799
                    break;
3800
                case 0x02a: /* VIS I fcmpeq16 */
3801
                    CHECK_FPU_FEATURE(dc, VIS1);
3802
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3803
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3804
                    gen_helper_fcmpeq16();
3805
                    gen_op_store_DT0_fpr(DFPREG(rd));
3806
                    break;
3807
                case 0x02c: /* VIS I fcmpgt32 */
3808
                    CHECK_FPU_FEATURE(dc, VIS1);
3809
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3810
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3811
                    gen_helper_fcmpgt32();
3812
                    gen_op_store_DT0_fpr(DFPREG(rd));
3813
                    break;
3814
                case 0x02e: /* VIS I fcmpeq32 */
3815
                    CHECK_FPU_FEATURE(dc, VIS1);
3816
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3817
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3818
                    gen_helper_fcmpeq32();
3819
                    gen_op_store_DT0_fpr(DFPREG(rd));
3820
                    break;
3821
                case 0x031: /* VIS I fmul8x16 */
3822
                    CHECK_FPU_FEATURE(dc, VIS1);
3823
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3824
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3825
                    gen_helper_fmul8x16();
3826
                    gen_op_store_DT0_fpr(DFPREG(rd));
3827
                    break;
3828
                case 0x033: /* VIS I fmul8x16au */
3829
                    CHECK_FPU_FEATURE(dc, VIS1);
3830
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3831
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3832
                    gen_helper_fmul8x16au();
3833
                    gen_op_store_DT0_fpr(DFPREG(rd));
3834
                    break;
3835
                case 0x035: /* VIS I fmul8x16al */
3836
                    CHECK_FPU_FEATURE(dc, VIS1);
3837
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3838
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3839
                    gen_helper_fmul8x16al();
3840
                    gen_op_store_DT0_fpr(DFPREG(rd));
3841
                    break;
3842
                case 0x036: /* VIS I fmul8sux16 */
3843
                    CHECK_FPU_FEATURE(dc, VIS1);
3844
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3845
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3846
                    gen_helper_fmul8sux16();
3847
                    gen_op_store_DT0_fpr(DFPREG(rd));
3848
                    break;
3849
                case 0x037: /* VIS I fmul8ulx16 */
3850
                    CHECK_FPU_FEATURE(dc, VIS1);
3851
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3852
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3853
                    gen_helper_fmul8ulx16();
3854
                    gen_op_store_DT0_fpr(DFPREG(rd));
3855
                    break;
3856
                case 0x038: /* VIS I fmuld8sux16 */
3857
                    CHECK_FPU_FEATURE(dc, VIS1);
3858
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3859
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3860
                    gen_helper_fmuld8sux16();
3861
                    gen_op_store_DT0_fpr(DFPREG(rd));
3862
                    break;
3863
                case 0x039: /* VIS I fmuld8ulx16 */
3864
                    CHECK_FPU_FEATURE(dc, VIS1);
3865
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3866
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3867
                    gen_helper_fmuld8ulx16();
3868
                    gen_op_store_DT0_fpr(DFPREG(rd));
3869
                    break;
3870
                case 0x03a: /* VIS I fpack32 */
3871
                case 0x03b: /* VIS I fpack16 */
3872
                case 0x03d: /* VIS I fpackfix */
3873
                case 0x03e: /* VIS I pdist */
3874
                    // XXX
3875
                    goto illegal_insn;
3876
                case 0x048: /* VIS I faligndata */
3877
                    CHECK_FPU_FEATURE(dc, VIS1);
3878
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3879
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3880
                    gen_helper_faligndata();
3881
                    gen_op_store_DT0_fpr(DFPREG(rd));
3882
                    break;
3883
                case 0x04b: /* VIS I fpmerge */
3884
                    CHECK_FPU_FEATURE(dc, VIS1);
3885
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3886
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3887
                    gen_helper_fpmerge();
3888
                    gen_op_store_DT0_fpr(DFPREG(rd));
3889
                    break;
3890
                case 0x04c: /* VIS II bshuffle */
3891
                    // XXX
3892
                    goto illegal_insn;
3893
                case 0x04d: /* VIS I fexpand */
3894
                    CHECK_FPU_FEATURE(dc, VIS1);
3895
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3896
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3897
                    gen_helper_fexpand();
3898
                    gen_op_store_DT0_fpr(DFPREG(rd));
3899
                    break;
3900
                case 0x050: /* VIS I fpadd16 */
3901
                    CHECK_FPU_FEATURE(dc, VIS1);
3902
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3903
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3904
                    gen_helper_fpadd16();
3905
                    gen_op_store_DT0_fpr(DFPREG(rd));
3906
                    break;
3907
                case 0x051: /* VIS I fpadd16s */
3908
                    CHECK_FPU_FEATURE(dc, VIS1);
3909
                    gen_helper_fpadd16s(cpu_fpr[rd],
3910
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3911
                    break;
3912
                case 0x052: /* VIS I fpadd32 */
3913
                    CHECK_FPU_FEATURE(dc, VIS1);
3914
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3915
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3916
                    gen_helper_fpadd32();
3917
                    gen_op_store_DT0_fpr(DFPREG(rd));
3918
                    break;
3919
                case 0x053: /* VIS I fpadd32s */
3920
                    CHECK_FPU_FEATURE(dc, VIS1);
3921
                    gen_helper_fpadd32s(cpu_fpr[rd],
3922
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3923
                    break;
3924
                case 0x054: /* VIS I fpsub16 */
3925
                    CHECK_FPU_FEATURE(dc, VIS1);
3926
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3927
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3928
                    gen_helper_fpsub16();
3929
                    gen_op_store_DT0_fpr(DFPREG(rd));
3930
                    break;
3931
                case 0x055: /* VIS I fpsub16s */
3932
                    CHECK_FPU_FEATURE(dc, VIS1);
3933
                    gen_helper_fpsub16s(cpu_fpr[rd],
3934
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3935
                    break;
3936
                case 0x056: /* VIS I fpsub32 */
3937
                    CHECK_FPU_FEATURE(dc, VIS1);
3938
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3939
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3940
                    gen_helper_fpsub32();
3941
                    gen_op_store_DT0_fpr(DFPREG(rd));
3942
                    break;
3943
                case 0x057: /* VIS I fpsub32s */
3944
                    CHECK_FPU_FEATURE(dc, VIS1);
3945
                    gen_helper_fpsub32s(cpu_fpr[rd],
3946
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3947
                    break;
3948
                case 0x060: /* VIS I fzero */
3949
                    CHECK_FPU_FEATURE(dc, VIS1);
3950
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3951
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3952
                    break;
3953
                case 0x061: /* VIS I fzeros */
3954
                    CHECK_FPU_FEATURE(dc, VIS1);
3955
                    tcg_gen_movi_i32(cpu_fpr[rd], 0);
3956
                    break;
3957
                case 0x062: /* VIS I fnor */
3958
                    CHECK_FPU_FEATURE(dc, VIS1);
3959
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3960
                                    cpu_fpr[DFPREG(rs2)]);
3961
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3962
                                    cpu_fpr[DFPREG(rs2) + 1]);
3963
                    break;
3964
                case 0x063: /* VIS I fnors */
3965
                    CHECK_FPU_FEATURE(dc, VIS1);
3966
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3967
                    break;
3968
                case 0x064: /* VIS I fandnot2 */
3969
                    CHECK_FPU_FEATURE(dc, VIS1);
3970
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3971
                                     cpu_fpr[DFPREG(rs2)]);
3972
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3973
                                     cpu_fpr[DFPREG(rs1) + 1],
3974
                                     cpu_fpr[DFPREG(rs2) + 1]);
3975
                    break;
3976
                case 0x065: /* VIS I fandnot2s */
3977
                    CHECK_FPU_FEATURE(dc, VIS1);
3978
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3979
                    break;
3980
                case 0x066: /* VIS I fnot2 */
3981
                    CHECK_FPU_FEATURE(dc, VIS1);
3982
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3983
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3984
                                    cpu_fpr[DFPREG(rs2) + 1]);
3985
                    break;
3986
                case 0x067: /* VIS I fnot2s */
3987
                    CHECK_FPU_FEATURE(dc, VIS1);
3988
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3989
                    break;
3990
                case 0x068: /* VIS I fandnot1 */
3991
                    CHECK_FPU_FEATURE(dc, VIS1);
3992
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3993
                                     cpu_fpr[DFPREG(rs1)]);
3994
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3995
                                     cpu_fpr[DFPREG(rs2) + 1],
3996
                                     cpu_fpr[DFPREG(rs1) + 1]);
3997
                    break;
3998
                case 0x069: /* VIS I fandnot1s */
3999
                    CHECK_FPU_FEATURE(dc, VIS1);
4000
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4001
                    break;
4002
                case 0x06a: /* VIS I fnot1 */
4003
                    CHECK_FPU_FEATURE(dc, VIS1);
4004
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4005
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4006
                                    cpu_fpr[DFPREG(rs1) + 1]);
4007
                    break;
4008
                case 0x06b: /* VIS I fnot1s */
4009
                    CHECK_FPU_FEATURE(dc, VIS1);
4010
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4011
                    break;
4012
                case 0x06c: /* VIS I fxor */
4013
                    CHECK_FPU_FEATURE(dc, VIS1);
4014
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4015
                                    cpu_fpr[DFPREG(rs2)]);
4016
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4017
                                    cpu_fpr[DFPREG(rs1) + 1],
4018
                                    cpu_fpr[DFPREG(rs2) + 1]);
4019
                    break;
4020
                case 0x06d: /* VIS I fxors */
4021
                    CHECK_FPU_FEATURE(dc, VIS1);
4022
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4023
                    break;
4024
                case 0x06e: /* VIS I fnand */
4025
                    CHECK_FPU_FEATURE(dc, VIS1);
4026
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4027
                                     cpu_fpr[DFPREG(rs2)]);
4028
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4029
                                     cpu_fpr[DFPREG(rs2) + 1]);
4030
                    break;
4031
                case 0x06f: /* VIS I fnands */
4032
                    CHECK_FPU_FEATURE(dc, VIS1);
4033
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4034
                    break;
4035
                case 0x070: /* VIS I fand */
4036
                    CHECK_FPU_FEATURE(dc, VIS1);
4037
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4038
                                    cpu_fpr[DFPREG(rs2)]);
4039
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4040
                                    cpu_fpr[DFPREG(rs1) + 1],
4041
                                    cpu_fpr[DFPREG(rs2) + 1]);
4042
                    break;
4043
                case 0x071: /* VIS I fands */
4044
                    CHECK_FPU_FEATURE(dc, VIS1);
4045
                    tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4046
                    break;
4047
                case 0x072: /* VIS I fxnor */
4048
                    CHECK_FPU_FEATURE(dc, VIS1);
4049
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4050
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4051
                                    cpu_fpr[DFPREG(rs1)]);
4052
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4053
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4054
                                    cpu_fpr[DFPREG(rs1) + 1]);
4055
                    break;
4056
                case 0x073: /* VIS I fxnors */
4057
                    CHECK_FPU_FEATURE(dc, VIS1);
4058
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4059
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4060
                    break;
4061
                case 0x074: /* VIS I fsrc1 */
4062
                    CHECK_FPU_FEATURE(dc, VIS1);
4063
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4064
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4065
                                    cpu_fpr[DFPREG(rs1) + 1]);
4066
                    break;
4067
                case 0x075: /* VIS I fsrc1s */
4068
                    CHECK_FPU_FEATURE(dc, VIS1);
4069
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4070
                    break;
4071
                case 0x076: /* VIS I fornot2 */
4072
                    CHECK_FPU_FEATURE(dc, VIS1);
4073
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4074
                                    cpu_fpr[DFPREG(rs2)]);
4075
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4076
                                    cpu_fpr[DFPREG(rs1) + 1],
4077
                                    cpu_fpr[DFPREG(rs2) + 1]);
4078
                    break;
4079
                case 0x077: /* VIS I fornot2s */
4080
                    CHECK_FPU_FEATURE(dc, VIS1);
4081
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4082
                    break;
4083
                case 0x078: /* VIS I fsrc2 */
4084
                    CHECK_FPU_FEATURE(dc, VIS1);
4085
                    gen_op_load_fpr_DT0(DFPREG(rs2));
4086
                    gen_op_store_DT0_fpr(DFPREG(rd));
4087
                    break;
4088
                case 0x079: /* VIS I fsrc2s */
4089
                    CHECK_FPU_FEATURE(dc, VIS1);
4090
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4091
                    break;
4092
                case 0x07a: /* VIS I fornot1 */
4093
                    CHECK_FPU_FEATURE(dc, VIS1);
4094
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4095
                                    cpu_fpr[DFPREG(rs1)]);
4096
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4097
                                    cpu_fpr[DFPREG(rs2) + 1],
4098
                                    cpu_fpr[DFPREG(rs1) + 1]);
4099
                    break;
4100
                case 0x07b: /* VIS I fornot1s */
4101
                    CHECK_FPU_FEATURE(dc, VIS1);
4102
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4103
                    break;
4104
                case 0x07c: /* VIS I for */
4105
                    CHECK_FPU_FEATURE(dc, VIS1);
4106
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4107
                                   cpu_fpr[DFPREG(rs2)]);
4108
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4109
                                   cpu_fpr[DFPREG(rs1) + 1],
4110
                                   cpu_fpr[DFPREG(rs2) + 1]);
4111
                    break;
4112
                case 0x07d: /* VIS I fors */
4113
                    CHECK_FPU_FEATURE(dc, VIS1);
4114
                    tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4115
                    break;
4116
                case 0x07e: /* VIS I fone */
4117
                    CHECK_FPU_FEATURE(dc, VIS1);
4118
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4119
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4120
                    break;
4121
                case 0x07f: /* VIS I fones */
4122
                    CHECK_FPU_FEATURE(dc, VIS1);
4123
                    tcg_gen_movi_i32(cpu_fpr[rd], -1);
4124
                    break;
4125
                case 0x080: /* VIS I shutdown */
4126
                case 0x081: /* VIS II siam */
4127
                    // XXX
4128
                    goto illegal_insn;
4129
                default:
4130
                    goto illegal_insn;
4131
                }
4132
#else
4133
                goto ncp_insn;
4134
#endif
4135
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4136
#ifdef TARGET_SPARC64
4137
                goto illegal_insn;
4138
#else
4139
                goto ncp_insn;
4140
#endif
4141
#ifdef TARGET_SPARC64
4142
            } else if (xop == 0x39) { /* V9 return */
4143
                TCGv_i32 r_const;
4144

    
4145
                save_state(dc, cpu_cond);
4146
                cpu_src1 = get_src1(insn, cpu_src1);
4147
                if (IS_IMM) {   /* immediate */
4148
                    simm = GET_FIELDs(insn, 19, 31);
4149
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4150
                } else {                /* register */
4151
                    rs2 = GET_FIELD(insn, 27, 31);
4152
                    if (rs2) {
4153
                        gen_movl_reg_TN(rs2, cpu_src2);
4154
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4155
                    } else
4156
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4157
                }
4158
                gen_helper_restore();
4159
                gen_mov_pc_npc(dc, cpu_cond);
4160
                r_const = tcg_const_i32(3);
4161
                gen_helper_check_align(cpu_dst, r_const);
4162
                tcg_temp_free_i32(r_const);
4163
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4164
                dc->npc = DYNAMIC_PC;
4165
                goto jmp_insn;
4166
#endif
4167
            } else {
4168
                cpu_src1 = get_src1(insn, cpu_src1);
4169
                if (IS_IMM) {   /* immediate */
4170
                    simm = GET_FIELDs(insn, 19, 31);
4171
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4172
                } else {                /* register */
4173
                    rs2 = GET_FIELD(insn, 27, 31);
4174
                    if (rs2) {
4175
                        gen_movl_reg_TN(rs2, cpu_src2);
4176
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4177
                    } else
4178
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4179
                }
4180
                switch (xop) {
4181
                case 0x38:      /* jmpl */
4182
                    {
4183
                        TCGv r_pc;
4184
                        TCGv_i32 r_const;
4185

    
4186
                        r_pc = tcg_const_tl(dc->pc);
4187
                        gen_movl_TN_reg(rd, r_pc);
4188
                        tcg_temp_free(r_pc);
4189
                        gen_mov_pc_npc(dc, cpu_cond);
4190
                        r_const = tcg_const_i32(3);
4191
                        gen_helper_check_align(cpu_dst, r_const);
4192
                        tcg_temp_free_i32(r_const);
4193
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4194
                        dc->npc = DYNAMIC_PC;
4195
                    }
4196
                    goto jmp_insn;
4197
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4198
                case 0x39:      /* rett, V9 return */
4199
                    {
4200
                        TCGv_i32 r_const;
4201

    
4202
                        if (!supervisor(dc))
4203
                            goto priv_insn;
4204
                        gen_mov_pc_npc(dc, cpu_cond);
4205
                        r_const = tcg_const_i32(3);
4206
                        gen_helper_check_align(cpu_dst, r_const);
4207
                        tcg_temp_free_i32(r_const);
4208
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4209
                        dc->npc = DYNAMIC_PC;
4210
                        gen_helper_rett();
4211
                    }
4212
                    goto jmp_insn;
4213
#endif
4214
                case 0x3b: /* flush */
4215
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4216
                        goto unimp_flush;
4217
                    gen_helper_flush(cpu_dst);
4218
                    break;
4219
                case 0x3c:      /* save */
4220
                    save_state(dc, cpu_cond);
4221
                    gen_helper_save();
4222
                    gen_movl_TN_reg(rd, cpu_dst);
4223
                    break;
4224
                case 0x3d:      /* restore */
4225
                    save_state(dc, cpu_cond);
4226
                    gen_helper_restore();
4227
                    gen_movl_TN_reg(rd, cpu_dst);
4228
                    break;
4229
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4230
                case 0x3e:      /* V9 done/retry */
4231
                    {
4232
                        switch (rd) {
4233
                        case 0:
4234
                            if (!supervisor(dc))
4235
                                goto priv_insn;
4236
                            dc->npc = DYNAMIC_PC;
4237
                            dc->pc = DYNAMIC_PC;
4238
                            gen_helper_done();
4239
                            goto jmp_insn;
4240
                        case 1:
4241
                            if (!supervisor(dc))
4242
                                goto priv_insn;
4243
                            dc->npc = DYNAMIC_PC;
4244
                            dc->pc = DYNAMIC_PC;
4245
                            gen_helper_retry();
4246
                            goto jmp_insn;
4247
                        default:
4248
                            goto illegal_insn;
4249
                        }
4250
                    }
4251
                    break;
4252
#endif
4253
                default:
4254
                    goto illegal_insn;
4255
                }
4256
            }
4257
            break;
4258
        }
4259
        break;
4260
    case 3:                     /* load/store instructions */
4261
        {
4262
            unsigned int xop = GET_FIELD(insn, 7, 12);
4263

    
4264
            /* flush pending conditional evaluations before exposing
4265
               cpu state */
4266
            if (dc->cc_op != CC_OP_FLAGS) {
4267
                dc->cc_op = CC_OP_FLAGS;
4268
                gen_helper_compute_psr();
4269
            }
4270
            cpu_src1 = get_src1(insn, cpu_src1);
4271
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4272
                rs2 = GET_FIELD(insn, 27, 31);
4273
                gen_movl_reg_TN(rs2, cpu_src2);
4274
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4275
            } else if (IS_IMM) {     /* immediate */
4276
                simm = GET_FIELDs(insn, 19, 31);
4277
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4278
            } else {            /* register */
4279
                rs2 = GET_FIELD(insn, 27, 31);
4280
                if (rs2 != 0) {
4281
                    gen_movl_reg_TN(rs2, cpu_src2);
4282
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4283
                } else
4284
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4285
            }
4286
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4287
                (xop > 0x17 && xop <= 0x1d ) ||
4288
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4289
                switch (xop) {
4290
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4291
                    gen_address_mask(dc, cpu_addr);
4292
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4293
                    break;
4294
                case 0x1:       /* ldub, load unsigned byte */
4295
                    gen_address_mask(dc, cpu_addr);
4296
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4297
                    break;
4298
                case 0x2:       /* lduh, load unsigned halfword */
4299
                    gen_address_mask(dc, cpu_addr);
4300
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4301
                    break;
4302
                case 0x3:       /* ldd, load double word */
4303
                    if (rd & 1)
4304
                        goto illegal_insn;
4305
                    else {
4306
                        TCGv_i32 r_const;
4307

    
4308
                        save_state(dc, cpu_cond);
4309
                        r_const = tcg_const_i32(7);
4310
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4311
                        tcg_temp_free_i32(r_const);
4312
                        gen_address_mask(dc, cpu_addr);
4313
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4314
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4315
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4316
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4317
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4318
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4319
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4320
                    }
4321
                    break;
4322
                case 0x9:       /* ldsb, load signed byte */
4323
                    gen_address_mask(dc, cpu_addr);
4324
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4325
                    break;
4326
                case 0xa:       /* ldsh, load signed halfword */
4327
                    gen_address_mask(dc, cpu_addr);
4328
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4329
                    break;
4330
                case 0xd:       /* ldstub -- XXX: should be atomically */
4331
                    {
4332
                        TCGv r_const;
4333

    
4334
                        gen_address_mask(dc, cpu_addr);
4335
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4336
                        r_const = tcg_const_tl(0xff);
4337
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4338
                        tcg_temp_free(r_const);
4339
                    }
4340
                    break;
4341
                case 0x0f:      /* swap, swap register with memory. Also
4342
                                   atomically */
4343
                    CHECK_IU_FEATURE(dc, SWAP);
4344
                    gen_movl_reg_TN(rd, cpu_val);
4345
                    gen_address_mask(dc, cpu_addr);
4346
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4347
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4348
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4349
                    break;
4350
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4351
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4352
#ifndef TARGET_SPARC64
4353
                    if (IS_IMM)
4354
                        goto illegal_insn;
4355
                    if (!supervisor(dc))
4356
                        goto priv_insn;
4357
#endif
4358
                    save_state(dc, cpu_cond);
4359
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4360
                    break;
4361
                case 0x11:      /* lduba, load unsigned byte alternate */
4362
#ifndef TARGET_SPARC64
4363
                    if (IS_IMM)
4364
                        goto illegal_insn;
4365
                    if (!supervisor(dc))
4366
                        goto priv_insn;
4367
#endif
4368
                    save_state(dc, cpu_cond);
4369
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4370
                    break;
4371
                case 0x12:      /* lduha, load unsigned halfword alternate */
4372
#ifndef TARGET_SPARC64
4373
                    if (IS_IMM)
4374
                        goto illegal_insn;
4375
                    if (!supervisor(dc))
4376
                        goto priv_insn;
4377
#endif
4378
                    save_state(dc, cpu_cond);
4379
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4380
                    break;
4381
                case 0x13:      /* ldda, load double word alternate */
4382
#ifndef TARGET_SPARC64
4383
                    if (IS_IMM)
4384
                        goto illegal_insn;
4385
                    if (!supervisor(dc))
4386
                        goto priv_insn;
4387
#endif
4388
                    if (rd & 1)
4389
                        goto illegal_insn;
4390
                    save_state(dc, cpu_cond);
4391
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4392
                    goto skip_move;
4393
                case 0x19:      /* ldsba, load signed byte alternate */
4394
#ifndef TARGET_SPARC64
4395
                    if (IS_IMM)
4396
                        goto illegal_insn;
4397
                    if (!supervisor(dc))
4398
                        goto priv_insn;
4399
#endif
4400
                    save_state(dc, cpu_cond);
4401
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4402
                    break;
4403
                case 0x1a:      /* ldsha, load signed halfword alternate */
4404
#ifndef TARGET_SPARC64
4405
                    if (IS_IMM)
4406
                        goto illegal_insn;
4407
                    if (!supervisor(dc))
4408
                        goto priv_insn;
4409
#endif
4410
                    save_state(dc, cpu_cond);
4411
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4412
                    break;
4413
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4414
#ifndef TARGET_SPARC64
4415
                    if (IS_IMM)
4416
                        goto illegal_insn;
4417
                    if (!supervisor(dc))
4418
                        goto priv_insn;
4419
#endif
4420
                    save_state(dc, cpu_cond);
4421
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4422
                    break;
4423
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4424
                                   atomically */
4425
                    CHECK_IU_FEATURE(dc, SWAP);
4426
#ifndef TARGET_SPARC64
4427
                    if (IS_IMM)
4428
                        goto illegal_insn;
4429
                    if (!supervisor(dc))
4430
                        goto priv_insn;
4431
#endif
4432
                    save_state(dc, cpu_cond);
4433
                    gen_movl_reg_TN(rd, cpu_val);
4434
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4435
                    break;
4436

    
4437
#ifndef TARGET_SPARC64
4438
                case 0x30: /* ldc */
4439
                case 0x31: /* ldcsr */
4440
                case 0x33: /* lddc */
4441
                    goto ncp_insn;
4442
#endif
4443
#endif
4444
#ifdef TARGET_SPARC64
4445
                case 0x08: /* V9 ldsw */
4446
                    gen_address_mask(dc, cpu_addr);
4447
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4448
                    break;
4449
                case 0x0b: /* V9 ldx */
4450
                    gen_address_mask(dc, cpu_addr);
4451
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4452
                    break;
4453
                case 0x18: /* V9 ldswa */
4454
                    save_state(dc, cpu_cond);
4455
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4456
                    break;
4457
                case 0x1b: /* V9 ldxa */
4458
                    save_state(dc, cpu_cond);
4459
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4460
                    break;
4461
                case 0x2d: /* V9 prefetch, no effect */
4462
                    goto skip_move;
4463
                case 0x30: /* V9 ldfa */
4464
                    save_state(dc, cpu_cond);
4465
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4466
                    goto skip_move;
4467
                case 0x33: /* V9 lddfa */
4468
                    save_state(dc, cpu_cond);
4469
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4470
                    goto skip_move;
4471
                case 0x3d: /* V9 prefetcha, no effect */
4472
                    goto skip_move;
4473
                case 0x32: /* V9 ldqfa */
4474
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4475
                    save_state(dc, cpu_cond);
4476
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4477
                    goto skip_move;
4478
#endif
4479
                default:
4480
                    goto illegal_insn;
4481
                }
4482
                gen_movl_TN_reg(rd, cpu_val);
4483
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4484
            skip_move: ;
4485
#endif
4486
            } else if (xop >= 0x20 && xop < 0x24) {
4487
                if (gen_trap_ifnofpu(dc, cpu_cond))
4488
                    goto jmp_insn;
4489
                save_state(dc, cpu_cond);
4490
                switch (xop) {
4491
                case 0x20:      /* ldf, load fpreg */
4492
                    gen_address_mask(dc, cpu_addr);
4493
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4494
                    tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4495
                    break;
4496
                case 0x21:      /* ldfsr, V9 ldxfsr */
4497
#ifdef TARGET_SPARC64
4498
                    gen_address_mask(dc, cpu_addr);
4499
                    if (rd == 1) {
4500
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4501
                        gen_helper_ldxfsr(cpu_tmp64);
4502
                    } else {
4503
                        tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4504
                        tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4505
                        gen_helper_ldfsr(cpu_tmp32);
4506
                    }
4507
#else
4508
                    {
4509
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4510
                        gen_helper_ldfsr(cpu_tmp32);
4511
                    }
4512
#endif
4513
                    break;
4514
                case 0x22:      /* ldqf, load quad fpreg */
4515
                    {
4516
                        TCGv_i32 r_const;
4517

    
4518
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4519
                        r_const = tcg_const_i32(dc->mem_idx);
4520
                        gen_address_mask(dc, cpu_addr);
4521
                        gen_helper_ldqf(cpu_addr, r_const);
4522
                        tcg_temp_free_i32(r_const);
4523
                        gen_op_store_QT0_fpr(QFPREG(rd));
4524
                    }
4525
                    break;
4526
                case 0x23:      /* lddf, load double fpreg */
4527
                    {
4528
                        TCGv_i32 r_const;
4529

    
4530
                        r_const = tcg_const_i32(dc->mem_idx);
4531
                        gen_address_mask(dc, cpu_addr);
4532
                        gen_helper_lddf(cpu_addr, r_const);
4533
                        tcg_temp_free_i32(r_const);
4534
                        gen_op_store_DT0_fpr(DFPREG(rd));
4535
                    }
4536
                    break;
4537
                default:
4538
                    goto illegal_insn;
4539
                }
4540
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4541
                       xop == 0xe || xop == 0x1e) {
4542
                gen_movl_reg_TN(rd, cpu_val);
4543
                switch (xop) {
4544
                case 0x4: /* st, store word */
4545
                    gen_address_mask(dc, cpu_addr);
4546
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4547
                    break;
4548
                case 0x5: /* stb, store byte */
4549
                    gen_address_mask(dc, cpu_addr);
4550
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4551
                    break;
4552
                case 0x6: /* sth, store halfword */
4553
                    gen_address_mask(dc, cpu_addr);
4554
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4555
                    break;
4556
                case 0x7: /* std, store double word */
4557
                    if (rd & 1)
4558
                        goto illegal_insn;
4559
                    else {
4560
                        TCGv_i32 r_const;
4561

    
4562
                        save_state(dc, cpu_cond);
4563
                        gen_address_mask(dc, cpu_addr);
4564
                        r_const = tcg_const_i32(7);
4565
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4566
                        tcg_temp_free_i32(r_const);
4567
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4568
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4569
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4570
                    }
4571
                    break;
4572
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4573
                case 0x14: /* sta, V9 stwa, store word alternate */
4574
#ifndef TARGET_SPARC64
4575
                    if (IS_IMM)
4576
                        goto illegal_insn;
4577
                    if (!supervisor(dc))
4578
                        goto priv_insn;
4579
#endif
4580
                    save_state(dc, cpu_cond);
4581
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4582
                    dc->npc = DYNAMIC_PC;
4583
                    break;
4584
                case 0x15: /* stba, store byte alternate */
4585
#ifndef TARGET_SPARC64
4586
                    if (IS_IMM)
4587
                        goto illegal_insn;
4588
                    if (!supervisor(dc))
4589
                        goto priv_insn;
4590
#endif
4591
                    save_state(dc, cpu_cond);
4592
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4593
                    dc->npc = DYNAMIC_PC;
4594
                    break;
4595
                case 0x16: /* stha, store halfword alternate */
4596
#ifndef TARGET_SPARC64
4597
                    if (IS_IMM)
4598
                        goto illegal_insn;
4599
                    if (!supervisor(dc))
4600
                        goto priv_insn;
4601
#endif
4602
                    save_state(dc, cpu_cond);
4603
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4604
                    dc->npc = DYNAMIC_PC;
4605
                    break;
4606
                case 0x17: /* stda, store double word alternate */
4607
#ifndef TARGET_SPARC64
4608
                    if (IS_IMM)
4609
                        goto illegal_insn;
4610
                    if (!supervisor(dc))
4611
                        goto priv_insn;
4612
#endif
4613
                    if (rd & 1)
4614
                        goto illegal_insn;
4615
                    else {
4616
                        save_state(dc, cpu_cond);
4617
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4618
                    }
4619
                    break;
4620
#endif
4621
#ifdef TARGET_SPARC64
4622
                case 0x0e: /* V9 stx */
4623
                    gen_address_mask(dc, cpu_addr);
4624
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4625
                    break;
4626
                case 0x1e: /* V9 stxa */
4627
                    save_state(dc, cpu_cond);
4628
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4629
                    dc->npc = DYNAMIC_PC;
4630
                    break;
4631
#endif
4632
                default:
4633
                    goto illegal_insn;
4634
                }
4635
            } else if (xop > 0x23 && xop < 0x28) {
4636
                if (gen_trap_ifnofpu(dc, cpu_cond))
4637
                    goto jmp_insn;
4638
                save_state(dc, cpu_cond);
4639
                switch (xop) {
4640
                case 0x24: /* stf, store fpreg */
4641
                    gen_address_mask(dc, cpu_addr);
4642
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4643
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4644
                    break;
4645
                case 0x25: /* stfsr, V9 stxfsr */
4646
#ifdef TARGET_SPARC64
4647
                    gen_address_mask(dc, cpu_addr);
4648
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4649
                    if (rd == 1)
4650
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4651
                    else
4652
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4653
#else
4654
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4655
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4656
#endif
4657
                    break;
4658
                case 0x26:
4659
#ifdef TARGET_SPARC64
4660
                    /* V9 stqf, store quad fpreg */
4661
                    {
4662
                        TCGv_i32 r_const;
4663

    
4664
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4665
                        gen_op_load_fpr_QT0(QFPREG(rd));
4666
                        r_const = tcg_const_i32(dc->mem_idx);
4667
                        gen_address_mask(dc, cpu_addr);
4668
                        gen_helper_stqf(cpu_addr, r_const);
4669
                        tcg_temp_free_i32(r_const);
4670
                    }
4671
                    break;
4672
#else /* !TARGET_SPARC64 */
4673
                    /* stdfq, store floating point queue */
4674
#if defined(CONFIG_USER_ONLY)
4675
                    goto illegal_insn;
4676
#else
4677
                    if (!supervisor(dc))
4678
                        goto priv_insn;
4679
                    if (gen_trap_ifnofpu(dc, cpu_cond))
4680
                        goto jmp_insn;
4681
                    goto nfq_insn;
4682
#endif
4683
#endif
4684
                case 0x27: /* stdf, store double fpreg */
4685
                    {
4686
                        TCGv_i32 r_const;
4687

    
4688
                        gen_op_load_fpr_DT0(DFPREG(rd));
4689
                        r_const = tcg_const_i32(dc->mem_idx);
4690
                        gen_address_mask(dc, cpu_addr);
4691
                        gen_helper_stdf(cpu_addr, r_const);
4692
                        tcg_temp_free_i32(r_const);
4693
                    }
4694
                    break;
4695
                default:
4696
                    goto illegal_insn;
4697
                }
4698
            } else if (xop > 0x33 && xop < 0x3f) {
4699
                save_state(dc, cpu_cond);
4700
                switch (xop) {
4701
#ifdef TARGET_SPARC64
4702
                case 0x34: /* V9 stfa */
4703
                    gen_stf_asi(cpu_addr, insn, 4, rd);
4704
                    break;
4705
                case 0x36: /* V9 stqfa */
4706
                    {
4707
                        TCGv_i32 r_const;
4708

    
4709
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4710
                        r_const = tcg_const_i32(7);
4711
                        gen_helper_check_align(cpu_addr, r_const);
4712
                        tcg_temp_free_i32(r_const);
4713
                        gen_op_load_fpr_QT0(QFPREG(rd));
4714
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4715
                    }
4716
                    break;
4717
                case 0x37: /* V9 stdfa */
4718
                    gen_op_load_fpr_DT0(DFPREG(rd));
4719
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4720
                    break;
4721
                case 0x3c: /* V9 casa */
4722
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4723
                    gen_movl_TN_reg(rd, cpu_val);
4724
                    break;
4725
                case 0x3e: /* V9 casxa */
4726
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4727
                    gen_movl_TN_reg(rd, cpu_val);
4728
                    break;
4729
#else
4730
                case 0x34: /* stc */
4731
                case 0x35: /* stcsr */
4732
                case 0x36: /* stdcq */
4733
                case 0x37: /* stdc */
4734
                    goto ncp_insn;
4735
#endif
4736
                default:
4737
                    goto illegal_insn;
4738
                }
4739
            } else
4740
                goto illegal_insn;
4741
        }
4742
        break;
4743
    }
4744
    /* default case for non jump instructions */
4745
    if (dc->npc == DYNAMIC_PC) {
4746
        dc->pc = DYNAMIC_PC;
4747
        gen_op_next_insn();
4748
    } else if (dc->npc == JUMP_PC) {
4749
        /* we can do a static jump */
4750
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4751
        dc->is_br = 1;
4752
    } else {
4753
        dc->pc = dc->npc;
4754
        dc->npc = dc->npc + 4;
4755
    }
4756
 jmp_insn:
4757
    goto egress;
4758
 illegal_insn:
4759
    {
4760
        TCGv_i32 r_const;
4761

    
4762
        save_state(dc, cpu_cond);
4763
        r_const = tcg_const_i32(TT_ILL_INSN);
4764
        gen_helper_raise_exception(r_const);
4765
        tcg_temp_free_i32(r_const);
4766
        dc->is_br = 1;
4767
    }
4768
    goto egress;
4769
 unimp_flush:
4770
    {
4771
        TCGv_i32 r_const;
4772

    
4773
        save_state(dc, cpu_cond);
4774
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4775
        gen_helper_raise_exception(r_const);
4776
        tcg_temp_free_i32(r_const);
4777
        dc->is_br = 1;
4778
    }
4779
    goto egress;
4780
#if !defined(CONFIG_USER_ONLY)
4781
 priv_insn:
4782
    {
4783
        TCGv_i32 r_const;
4784

    
4785
        save_state(dc, cpu_cond);
4786
        r_const = tcg_const_i32(TT_PRIV_INSN);
4787
        gen_helper_raise_exception(r_const);
4788
        tcg_temp_free_i32(r_const);
4789
        dc->is_br = 1;
4790
    }
4791
    goto egress;
4792
#endif
4793
 nfpu_insn:
4794
    save_state(dc, cpu_cond);
4795
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4796
    dc->is_br = 1;
4797
    goto egress;
4798
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4799
 nfq_insn:
4800
    save_state(dc, cpu_cond);
4801
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4802
    dc->is_br = 1;
4803
    goto egress;
4804
#endif
4805
#ifndef TARGET_SPARC64
4806
 ncp_insn:
4807
    {
4808
        TCGv r_const;
4809

    
4810
        save_state(dc, cpu_cond);
4811
        r_const = tcg_const_i32(TT_NCP_INSN);
4812
        gen_helper_raise_exception(r_const);
4813
        tcg_temp_free(r_const);
4814
        dc->is_br = 1;
4815
    }
4816
    goto egress;
4817
#endif
4818
 egress:
4819
    tcg_temp_free(cpu_tmp1);
4820
    tcg_temp_free(cpu_tmp2);
4821
}
4822

    
4823
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4824
                                                  int spc, CPUSPARCState *env)
4825
{
4826
    target_ulong pc_start, last_pc;
4827
    uint16_t *gen_opc_end;
4828
    DisasContext dc1, *dc = &dc1;
4829
    CPUBreakpoint *bp;
4830
    int j, lj = -1;
4831
    int num_insns;
4832
    int max_insns;
4833

    
4834
    memset(dc, 0, sizeof(DisasContext));
4835
    dc->tb = tb;
4836
    pc_start = tb->pc;
4837
    dc->pc = pc_start;
4838
    last_pc = dc->pc;
4839
    dc->npc = (target_ulong) tb->cs_base;
4840
    dc->cc_op = CC_OP_DYNAMIC;
4841
    dc->mem_idx = cpu_mmu_index(env);
4842
    dc->def = env->def;
4843
    if ((dc->def->features & CPU_FEATURE_FLOAT))
4844
        dc->fpu_enabled = cpu_fpu_enabled(env);
4845
    else
4846
        dc->fpu_enabled = 0;
4847
#ifdef TARGET_SPARC64
4848
    dc->address_mask_32bit = env->pstate & PS_AM;
4849
#endif
4850
    dc->singlestep = (env->singlestep_enabled || singlestep);
4851
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4852

    
4853
    cpu_tmp0 = tcg_temp_new();
4854
    cpu_tmp32 = tcg_temp_new_i32();
4855
    cpu_tmp64 = tcg_temp_new_i64();
4856

    
4857
    cpu_dst = tcg_temp_local_new();
4858

    
4859
    // loads and stores
4860
    cpu_val = tcg_temp_local_new();
4861
    cpu_addr = tcg_temp_local_new();
4862

    
4863
    num_insns = 0;
4864
    max_insns = tb->cflags & CF_COUNT_MASK;
4865
    if (max_insns == 0)
4866
        max_insns = CF_COUNT_MASK;
4867
    gen_icount_start();
4868
    do {
4869
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4870
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4871
                if (bp->pc == dc->pc) {
4872
                    if (dc->pc != pc_start)
4873
                        save_state(dc, cpu_cond);
4874
                    gen_helper_debug();
4875
                    tcg_gen_exit_tb(0);
4876
                    dc->is_br = 1;
4877
                    goto exit_gen_loop;
4878
                }
4879
            }
4880
        }
4881
        if (spc) {
4882
            qemu_log("Search PC...\n");
4883
            j = gen_opc_ptr - gen_opc_buf;
4884
            if (lj < j) {
4885
                lj++;
4886
                while (lj < j)
4887
                    gen_opc_instr_start[lj++] = 0;
4888
                gen_opc_pc[lj] = dc->pc;
4889
                gen_opc_npc[lj] = dc->npc;
4890
                gen_opc_instr_start[lj] = 1;
4891
                gen_opc_icount[lj] = num_insns;
4892
            }
4893
        }
4894
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4895
            gen_io_start();
4896
        last_pc = dc->pc;
4897
        disas_sparc_insn(dc);
4898
        num_insns++;
4899

    
4900
        if (dc->is_br)
4901
            break;
4902
        /* if the next PC is different, we abort now */
4903
        if (dc->pc != (last_pc + 4))
4904
            break;
4905
        /* if we reach a page boundary, we stop generation so that the
4906
           PC of a TT_TFAULT exception is always in the right page */
4907
        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4908
            break;
4909
        /* if single step mode, we generate only one instruction and
4910
           generate an exception */
4911
        if (dc->singlestep) {
4912
            break;
4913
        }
4914
    } while ((gen_opc_ptr < gen_opc_end) &&
4915
             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4916
             num_insns < max_insns);
4917

    
4918
 exit_gen_loop:
4919
    tcg_temp_free(cpu_addr);
4920
    tcg_temp_free(cpu_val);
4921
    tcg_temp_free(cpu_dst);
4922
    tcg_temp_free_i64(cpu_tmp64);
4923
    tcg_temp_free_i32(cpu_tmp32);
4924
    tcg_temp_free(cpu_tmp0);
4925
    if (tb->cflags & CF_LAST_IO)
4926
        gen_io_end();
4927
    if (!dc->is_br) {
4928
        if (dc->pc != DYNAMIC_PC &&
4929
            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4930
            /* static PC and NPC: we can use direct chaining */
4931
            gen_goto_tb(dc, 0, dc->pc, dc->npc);
4932
        } else {
4933
            if (dc->pc != DYNAMIC_PC)
4934
                tcg_gen_movi_tl(cpu_pc, dc->pc);
4935
            save_npc(dc, cpu_cond);
4936
            tcg_gen_exit_tb(0);
4937
        }
4938
    }
4939
    gen_icount_end(tb, num_insns);
4940
    *gen_opc_ptr = INDEX_op_end;
4941
    if (spc) {
4942
        j = gen_opc_ptr - gen_opc_buf;
4943
        lj++;
4944
        while (lj <= j)
4945
            gen_opc_instr_start[lj++] = 0;
4946
#if 0
4947
        log_page_dump();
4948
#endif
4949
        gen_opc_jump_pc[0] = dc->jump_pc[0];
4950
        gen_opc_jump_pc[1] = dc->jump_pc[1];
4951
    } else {
4952
        tb->size = last_pc + 4 - pc_start;
4953
        tb->icount = num_insns;
4954
    }
4955
#ifdef DEBUG_DISAS
4956
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4957
        qemu_log("--------------\n");
4958
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
4959
        log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4960
        qemu_log("\n");
4961
    }
4962
#endif
4963
}
4964

    
4965
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4966
{
4967
    gen_intermediate_code_internal(tb, 0, env);
4968
}
4969

    
4970
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4971
{
4972
    gen_intermediate_code_internal(tb, 1, env);
4973
}
4974

    
4975
void gen_intermediate_code_init(CPUSPARCState *env)
4976
{
4977
    unsigned int i;
4978
    static int inited;
4979
    static const char * const gregnames[8] = {
4980
        NULL, // g0 not used
4981
        "g1",
4982
        "g2",
4983
        "g3",
4984
        "g4",
4985
        "g5",
4986
        "g6",
4987
        "g7",
4988
    };
4989
    static const char * const fregnames[64] = {
4990
        "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4991
        "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4992
        "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4993
        "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4994
        "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4995
        "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4996
        "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4997
        "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4998
    };
4999

    
5000
    /* init various static tables */
5001
    if (!inited) {
5002
        inited = 1;
5003

    
5004
        cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5005
        cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5006
                                             offsetof(CPUState, regwptr),
5007
                                             "regwptr");
5008
#ifdef TARGET_SPARC64
5009
        cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5010
                                         "xcc");
5011
        cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5012
                                         "asi");
5013
        cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5014
                                          "fprs");
5015
        cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5016
                                     "gsr");
5017
        cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5018
                                           offsetof(CPUState, tick_cmpr),
5019
                                           "tick_cmpr");
5020
        cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5021
                                            offsetof(CPUState, stick_cmpr),
5022
                                            "stick_cmpr");
5023
        cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5024
                                             offsetof(CPUState, hstick_cmpr),
5025
                                             "hstick_cmpr");
5026
        cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5027
                                       "hintp");
5028
        cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5029
                                      "htba");
5030
        cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5031
                                      "hver");
5032
        cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5033
                                     offsetof(CPUState, ssr), "ssr");
5034
        cpu_ver = tcg_global_mem_new(TCG_AREG0,
5035
                                     offsetof(CPUState, version), "ver");
5036
        cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5037
                                             offsetof(CPUState, softint),
5038
                                             "softint");
5039
#else
5040
        cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5041
                                     "wim");
5042
#endif
5043
        cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5044
                                      "cond");
5045
        cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5046
                                        "cc_src");
5047
        cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5048
                                         offsetof(CPUState, cc_src2),
5049
                                         "cc_src2");
5050
        cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5051
                                        "cc_dst");
5052
        cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5053
                                           "cc_op");
5054
        cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5055
                                         "psr");
5056
        cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5057
                                     "fsr");
5058
        cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5059
                                    "pc");
5060
        cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5061
                                     "npc");
5062
        cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5063
#ifndef CONFIG_USER_ONLY
5064
        cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5065
                                     "tbr");
5066
#endif
5067
        for (i = 1; i < 8; i++)
5068
            cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5069
                                              offsetof(CPUState, gregs[i]),
5070
                                              gregnames[i]);
5071
        for (i = 0; i < TARGET_FPREGS; i++)
5072
            cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5073
                                                offsetof(CPUState, fpr[i]),
5074
                                                fregnames[i]);
5075

    
5076
        /* register helpers */
5077

    
5078
#define GEN_HELPER 2
5079
#include "helper.h"
5080
    }
5081
}
5082

    
5083
void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5084
{
5085
    target_ulong npc;
5086
    env->pc = gen_opc_pc[pc_pos];
5087
    npc = gen_opc_npc[pc_pos];
5088
    if (npc == 1) {
5089
        /* dynamic NPC: already stored */
5090
    } else if (npc == 2) {
5091
        /* jump PC: use 'cond' and the jump targets of the translation */
5092
        if (env->cond) {
5093
            env->npc = gen_opc_jump_pc[0];
5094
        } else {
5095
            env->npc = gen_opc_jump_pc[1];
5096
        }
5097
    } else {
5098
        env->npc = npc;
5099
    }
5100

    
5101
    /* flush pending conditional evaluations before exposing cpu state */
5102
    if (CC_OP != CC_OP_FLAGS) {
5103
        helper_compute_psr();
5104
    }
5105
}