Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ 7a5e4488

History | View | Annotate | Download (200.2 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20

    
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26

    
27
#include "cpu.h"
28
#include "disas.h"
29
#include "helper.h"
30
#include "tcg-op.h"
31

    
32
#define GEN_HELPER 1
33
#include "helper.h"
34

    
35
#define DEBUG_DISAS
36

    
37
#define DYNAMIC_PC  1 /* dynamic pc value */
38
#define JUMP_PC     2 /* dynamic pc value which takes only two values
39
                         according to jump_pc[T2] */
40

    
41
/* global register indexes */
42
static TCGv_ptr cpu_env, cpu_regwptr;
43
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44
static TCGv_i32 cpu_cc_op;
45
static TCGv_i32 cpu_psr;
46
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47
static TCGv cpu_y;
48
#ifndef CONFIG_USER_ONLY
49
static TCGv cpu_tbr;
50
#endif
51
static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52
#ifdef TARGET_SPARC64
53
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54
static TCGv cpu_gsr;
55
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57
static TCGv_i32 cpu_softint;
58
#else
59
static TCGv cpu_wim;
60
#endif
61
/* local register indexes (only used inside old micro ops) */
62
static TCGv cpu_tmp0;
63
static TCGv_i32 cpu_tmp32;
64
static TCGv_i64 cpu_tmp64;
65
/* Floating point registers */
66
static TCGv_i32 cpu_fpr[TARGET_FPREGS];
67

    
68
static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69
static target_ulong gen_opc_jump_pc[2];
70

    
71
#include "gen-icount.h"
72

    
73
typedef struct DisasContext {
74
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
75
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77
    int is_br;
78
    int mem_idx;
79
    int fpu_enabled;
80
    int address_mask_32bit;
81
    int singlestep;
82
    uint32_t cc_op;  /* current CC operation */
83
    struct TranslationBlock *tb;
84
    sparc_def_t *def;
85
} DisasContext;
86

    
87
// This function uses non-native bit order
88
#define GET_FIELD(X, FROM, TO)                                  \
89
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
90

    
91
// This function uses the order in the manuals, i.e. bit 0 is 2^0
92
#define GET_FIELD_SP(X, FROM, TO)               \
93
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
94

    
95
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
97

    
98
#ifdef TARGET_SPARC64
99
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101
#else
102
#define DFPREG(r) (r & 0x1e)
103
#define QFPREG(r) (r & 0x1c)
104
#endif
105

    
106
#define UA2005_HTRAP_MASK 0xff
107
#define V8_TRAP_MASK 0x7f
108

    
109
static int sign_extend(int x, int len)
110
{
111
    len = 32 - len;
112
    return (x << len) >> len;
113
}
114

    
115
#define IS_IMM (insn & (1<<13))
116

    
117
/* floating point registers moves */
118
static void gen_op_load_fpr_DT0(unsigned int src)
119
{
120
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
121
                   offsetof(CPU_DoubleU, l.upper));
122
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
123
                   offsetof(CPU_DoubleU, l.lower));
124
}
125

    
126
static void gen_op_load_fpr_DT1(unsigned int src)
127
{
128
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
129
                   offsetof(CPU_DoubleU, l.upper));
130
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
131
                   offsetof(CPU_DoubleU, l.lower));
132
}
133

    
134
static void gen_op_store_DT0_fpr(unsigned int dst)
135
{
136
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
137
                   offsetof(CPU_DoubleU, l.upper));
138
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
139
                   offsetof(CPU_DoubleU, l.lower));
140
}
141

    
142
static void gen_op_load_fpr_QT0(unsigned int src)
143
{
144
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
145
                   offsetof(CPU_QuadU, l.upmost));
146
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
147
                   offsetof(CPU_QuadU, l.upper));
148
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
149
                   offsetof(CPU_QuadU, l.lower));
150
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
151
                   offsetof(CPU_QuadU, l.lowest));
152
}
153

    
154
static void gen_op_load_fpr_QT1(unsigned int src)
155
{
156
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
157
                   offsetof(CPU_QuadU, l.upmost));
158
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
159
                   offsetof(CPU_QuadU, l.upper));
160
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
161
                   offsetof(CPU_QuadU, l.lower));
162
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
163
                   offsetof(CPU_QuadU, l.lowest));
164
}
165

    
166
static void gen_op_store_QT0_fpr(unsigned int dst)
167
{
168
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
169
                   offsetof(CPU_QuadU, l.upmost));
170
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
171
                   offsetof(CPU_QuadU, l.upper));
172
    tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
173
                   offsetof(CPU_QuadU, l.lower));
174
    tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
175
                   offsetof(CPU_QuadU, l.lowest));
176
}
177

    
178
/* moves */
179
#ifdef CONFIG_USER_ONLY
180
#define supervisor(dc) 0
181
#ifdef TARGET_SPARC64
182
#define hypervisor(dc) 0
183
#endif
184
#else
185
#define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
186
#ifdef TARGET_SPARC64
187
#define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
188
#else
189
#endif
190
#endif
191

    
192
#ifdef TARGET_SPARC64
193
#ifndef TARGET_ABI32
194
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
195
#else
196
#define AM_CHECK(dc) (1)
197
#endif
198
#endif
199

    
200
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
201
{
202
#ifdef TARGET_SPARC64
203
    if (AM_CHECK(dc))
204
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205
#endif
206
}
207

    
208
static inline void gen_movl_reg_TN(int reg, TCGv tn)
209
{
210
    if (reg == 0)
211
        tcg_gen_movi_tl(tn, 0);
212
    else if (reg < 8)
213
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
214
    else {
215
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216
    }
217
}
218

    
219
static inline void gen_movl_TN_reg(int reg, TCGv tn)
220
{
221
    if (reg == 0)
222
        return;
223
    else if (reg < 8)
224
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
225
    else {
226
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227
    }
228
}
229

    
230
static inline void gen_goto_tb(DisasContext *s, int tb_num,
231
                               target_ulong pc, target_ulong npc)
232
{
233
    TranslationBlock *tb;
234

    
235
    tb = s->tb;
236
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
237
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238
        !s->singlestep)  {
239
        /* jump to same page: we can use a direct jump */
240
        tcg_gen_goto_tb(tb_num);
241
        tcg_gen_movi_tl(cpu_pc, pc);
242
        tcg_gen_movi_tl(cpu_npc, npc);
243
        tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
244
    } else {
245
        /* jump to another page: currently not optimized */
246
        tcg_gen_movi_tl(cpu_pc, pc);
247
        tcg_gen_movi_tl(cpu_npc, npc);
248
        tcg_gen_exit_tb(0);
249
    }
250
}
251

    
252
// XXX suboptimal
253
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
254
{
255
    tcg_gen_extu_i32_tl(reg, src);
256
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
257
    tcg_gen_andi_tl(reg, reg, 0x1);
258
}
259

    
260
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
261
{
262
    tcg_gen_extu_i32_tl(reg, src);
263
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
264
    tcg_gen_andi_tl(reg, reg, 0x1);
265
}
266

    
267
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
268
{
269
    tcg_gen_extu_i32_tl(reg, src);
270
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
271
    tcg_gen_andi_tl(reg, reg, 0x1);
272
}
273

    
274
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
275
{
276
    tcg_gen_extu_i32_tl(reg, src);
277
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
278
    tcg_gen_andi_tl(reg, reg, 0x1);
279
}
280

    
281
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
282
{
283
    TCGv r_temp;
284
    TCGv_i32 r_const;
285
    int l1;
286

    
287
    l1 = gen_new_label();
288

    
289
    r_temp = tcg_temp_new();
290
    tcg_gen_xor_tl(r_temp, src1, src2);
291
    tcg_gen_not_tl(r_temp, r_temp);
292
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
293
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
294
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
295
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
296
    r_const = tcg_const_i32(TT_TOVF);
297
    gen_helper_raise_exception(cpu_env, r_const);
298
    tcg_temp_free_i32(r_const);
299
    gen_set_label(l1);
300
    tcg_temp_free(r_temp);
301
}
302

    
303
static inline void gen_tag_tv(TCGv src1, TCGv src2)
304
{
305
    int l1;
306
    TCGv_i32 r_const;
307

    
308
    l1 = gen_new_label();
309
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
310
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
311
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
312
    r_const = tcg_const_i32(TT_TOVF);
313
    gen_helper_raise_exception(cpu_env, r_const);
314
    tcg_temp_free_i32(r_const);
315
    gen_set_label(l1);
316
}
317

    
318
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
319
{
320
    tcg_gen_mov_tl(cpu_cc_src, src1);
321
    tcg_gen_movi_tl(cpu_cc_src2, src2);
322
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
323
    tcg_gen_mov_tl(dst, cpu_cc_dst);
324
}
325

    
326
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
327
{
328
    tcg_gen_mov_tl(cpu_cc_src, src1);
329
    tcg_gen_mov_tl(cpu_cc_src2, src2);
330
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
331
    tcg_gen_mov_tl(dst, cpu_cc_dst);
332
}
333

    
334
static TCGv_i32 gen_add32_carry32(void)
335
{
336
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
337

    
338
    /* Carry is computed from a previous add: (dst < src)  */
339
#if TARGET_LONG_BITS == 64
340
    cc_src1_32 = tcg_temp_new_i32();
341
    cc_src2_32 = tcg_temp_new_i32();
342
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
343
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
344
#else
345
    cc_src1_32 = cpu_cc_dst;
346
    cc_src2_32 = cpu_cc_src;
347
#endif
348

    
349
    carry_32 = tcg_temp_new_i32();
350
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
351

    
352
#if TARGET_LONG_BITS == 64
353
    tcg_temp_free_i32(cc_src1_32);
354
    tcg_temp_free_i32(cc_src2_32);
355
#endif
356

    
357
    return carry_32;
358
}
359

    
360
static TCGv_i32 gen_sub32_carry32(void)
361
{
362
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
363

    
364
    /* Carry is computed from a previous borrow: (src1 < src2)  */
365
#if TARGET_LONG_BITS == 64
366
    cc_src1_32 = tcg_temp_new_i32();
367
    cc_src2_32 = tcg_temp_new_i32();
368
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
369
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
370
#else
371
    cc_src1_32 = cpu_cc_src;
372
    cc_src2_32 = cpu_cc_src2;
373
#endif
374

    
375
    carry_32 = tcg_temp_new_i32();
376
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
377

    
378
#if TARGET_LONG_BITS == 64
379
    tcg_temp_free_i32(cc_src1_32);
380
    tcg_temp_free_i32(cc_src2_32);
381
#endif
382

    
383
    return carry_32;
384
}
385

    
386
static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
387
                            TCGv src2, int update_cc)
388
{
389
    TCGv_i32 carry_32;
390
    TCGv carry;
391

    
392
    switch (dc->cc_op) {
393
    case CC_OP_DIV:
394
    case CC_OP_LOGIC:
395
        /* Carry is known to be zero.  Fall back to plain ADD.  */
396
        if (update_cc) {
397
            gen_op_add_cc(dst, src1, src2);
398
        } else {
399
            tcg_gen_add_tl(dst, src1, src2);
400
        }
401
        return;
402

    
403
    case CC_OP_ADD:
404
    case CC_OP_TADD:
405
    case CC_OP_TADDTV:
406
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
407
        {
408
            /* For 32-bit hosts, we can re-use the host's hardware carry
409
               generation by using an ADD2 opcode.  We discard the low
410
               part of the output.  Ideally we'd combine this operation
411
               with the add that generated the carry in the first place.  */
412
            TCGv dst_low = tcg_temp_new();
413
            tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
414
                            cpu_cc_src, src1, cpu_cc_src2, src2);
415
            tcg_temp_free(dst_low);
416
            goto add_done;
417
        }
418
#endif
419
        carry_32 = gen_add32_carry32();
420
        break;
421

    
422
    case CC_OP_SUB:
423
    case CC_OP_TSUB:
424
    case CC_OP_TSUBTV:
425
        carry_32 = gen_sub32_carry32();
426
        break;
427

    
428
    default:
429
        /* We need external help to produce the carry.  */
430
        carry_32 = tcg_temp_new_i32();
431
        gen_helper_compute_C_icc(carry_32, cpu_env);
432
        break;
433
    }
434

    
435
#if TARGET_LONG_BITS == 64
436
    carry = tcg_temp_new();
437
    tcg_gen_extu_i32_i64(carry, carry_32);
438
#else
439
    carry = carry_32;
440
#endif
441

    
442
    tcg_gen_add_tl(dst, src1, src2);
443
    tcg_gen_add_tl(dst, dst, carry);
444

    
445
    tcg_temp_free_i32(carry_32);
446
#if TARGET_LONG_BITS == 64
447
    tcg_temp_free(carry);
448
#endif
449

    
450
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
451
 add_done:
452
#endif
453
    if (update_cc) {
454
        tcg_gen_mov_tl(cpu_cc_src, src1);
455
        tcg_gen_mov_tl(cpu_cc_src2, src2);
456
        tcg_gen_mov_tl(cpu_cc_dst, dst);
457
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
458
        dc->cc_op = CC_OP_ADDX;
459
    }
460
}
461

    
462
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
463
{
464
    tcg_gen_mov_tl(cpu_cc_src, src1);
465
    tcg_gen_mov_tl(cpu_cc_src2, src2);
466
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
467
    tcg_gen_mov_tl(dst, cpu_cc_dst);
468
}
469

    
470
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
471
{
472
    tcg_gen_mov_tl(cpu_cc_src, src1);
473
    tcg_gen_mov_tl(cpu_cc_src2, src2);
474
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
475
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
476
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477
    tcg_gen_mov_tl(dst, cpu_cc_dst);
478
}
479

    
480
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
481
{
482
    TCGv r_temp;
483
    TCGv_i32 r_const;
484
    int l1;
485

    
486
    l1 = gen_new_label();
487

    
488
    r_temp = tcg_temp_new();
489
    tcg_gen_xor_tl(r_temp, src1, src2);
490
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
491
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
492
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
493
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
494
    r_const = tcg_const_i32(TT_TOVF);
495
    gen_helper_raise_exception(cpu_env, r_const);
496
    tcg_temp_free_i32(r_const);
497
    gen_set_label(l1);
498
    tcg_temp_free(r_temp);
499
}
500

    
501
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
502
{
503
    tcg_gen_mov_tl(cpu_cc_src, src1);
504
    tcg_gen_movi_tl(cpu_cc_src2, src2);
505
    if (src2 == 0) {
506
        tcg_gen_mov_tl(cpu_cc_dst, src1);
507
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
508
        dc->cc_op = CC_OP_LOGIC;
509
    } else {
510
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
511
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
512
        dc->cc_op = CC_OP_SUB;
513
    }
514
    tcg_gen_mov_tl(dst, cpu_cc_dst);
515
}
516

    
517
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
518
{
519
    tcg_gen_mov_tl(cpu_cc_src, src1);
520
    tcg_gen_mov_tl(cpu_cc_src2, src2);
521
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
522
    tcg_gen_mov_tl(dst, cpu_cc_dst);
523
}
524

    
525
static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
526
                            TCGv src2, int update_cc)
527
{
528
    TCGv_i32 carry_32;
529
    TCGv carry;
530

    
531
    switch (dc->cc_op) {
532
    case CC_OP_DIV:
533
    case CC_OP_LOGIC:
534
        /* Carry is known to be zero.  Fall back to plain SUB.  */
535
        if (update_cc) {
536
            gen_op_sub_cc(dst, src1, src2);
537
        } else {
538
            tcg_gen_sub_tl(dst, src1, src2);
539
        }
540
        return;
541

    
542
    case CC_OP_ADD:
543
    case CC_OP_TADD:
544
    case CC_OP_TADDTV:
545
        carry_32 = gen_add32_carry32();
546
        break;
547

    
548
    case CC_OP_SUB:
549
    case CC_OP_TSUB:
550
    case CC_OP_TSUBTV:
551
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
552
        {
553
            /* For 32-bit hosts, we can re-use the host's hardware carry
554
               generation by using a SUB2 opcode.  We discard the low
555
               part of the output.  Ideally we'd combine this operation
556
               with the add that generated the carry in the first place.  */
557
            TCGv dst_low = tcg_temp_new();
558
            tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
559
                            cpu_cc_src, src1, cpu_cc_src2, src2);
560
            tcg_temp_free(dst_low);
561
            goto sub_done;
562
        }
563
#endif
564
        carry_32 = gen_sub32_carry32();
565
        break;
566

    
567
    default:
568
        /* We need external help to produce the carry.  */
569
        carry_32 = tcg_temp_new_i32();
570
        gen_helper_compute_C_icc(carry_32, cpu_env);
571
        break;
572
    }
573

    
574
#if TARGET_LONG_BITS == 64
575
    carry = tcg_temp_new();
576
    tcg_gen_extu_i32_i64(carry, carry_32);
577
#else
578
    carry = carry_32;
579
#endif
580

    
581
    tcg_gen_sub_tl(dst, src1, src2);
582
    tcg_gen_sub_tl(dst, dst, carry);
583

    
584
    tcg_temp_free_i32(carry_32);
585
#if TARGET_LONG_BITS == 64
586
    tcg_temp_free(carry);
587
#endif
588

    
589
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
590
 sub_done:
591
#endif
592
    if (update_cc) {
593
        tcg_gen_mov_tl(cpu_cc_src, src1);
594
        tcg_gen_mov_tl(cpu_cc_src2, src2);
595
        tcg_gen_mov_tl(cpu_cc_dst, dst);
596
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
597
        dc->cc_op = CC_OP_SUBX;
598
    }
599
}
600

    
601
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
602
{
603
    tcg_gen_mov_tl(cpu_cc_src, src1);
604
    tcg_gen_mov_tl(cpu_cc_src2, src2);
605
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
606
    tcg_gen_mov_tl(dst, cpu_cc_dst);
607
}
608

    
609
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
610
{
611
    tcg_gen_mov_tl(cpu_cc_src, src1);
612
    tcg_gen_mov_tl(cpu_cc_src2, src2);
613
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
614
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
615
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616
    tcg_gen_mov_tl(dst, cpu_cc_dst);
617
}
618

    
619
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
620
{
621
    TCGv r_temp;
622
    int l1;
623

    
624
    l1 = gen_new_label();
625
    r_temp = tcg_temp_new();
626

    
627
    /* old op:
628
    if (!(env->y & 1))
629
        T1 = 0;
630
    */
631
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
632
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
633
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
634
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
635
    tcg_gen_movi_tl(cpu_cc_src2, 0);
636
    gen_set_label(l1);
637

    
638
    // b2 = T0 & 1;
639
    // env->y = (b2 << 31) | (env->y >> 1);
640
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
641
    tcg_gen_shli_tl(r_temp, r_temp, 31);
642
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
643
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
644
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
645
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
646

    
647
    // b1 = N ^ V;
648
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
649
    gen_mov_reg_V(r_temp, cpu_psr);
650
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
651
    tcg_temp_free(r_temp);
652

    
653
    // T0 = (b1 << 31) | (T0 >> 1);
654
    // src1 = T0;
655
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
656
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
657
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
658

    
659
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
660

    
661
    tcg_gen_mov_tl(dst, cpu_cc_dst);
662
}
663

    
664
static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
665
{
666
    TCGv_i32 r_src1, r_src2;
667
    TCGv_i64 r_temp, r_temp2;
668

    
669
    r_src1 = tcg_temp_new_i32();
670
    r_src2 = tcg_temp_new_i32();
671

    
672
    tcg_gen_trunc_tl_i32(r_src1, src1);
673
    tcg_gen_trunc_tl_i32(r_src2, src2);
674

    
675
    r_temp = tcg_temp_new_i64();
676
    r_temp2 = tcg_temp_new_i64();
677

    
678
    if (sign_ext) {
679
        tcg_gen_ext_i32_i64(r_temp, r_src2);
680
        tcg_gen_ext_i32_i64(r_temp2, r_src1);
681
    } else {
682
        tcg_gen_extu_i32_i64(r_temp, r_src2);
683
        tcg_gen_extu_i32_i64(r_temp2, r_src1);
684
    }
685

    
686
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
687

    
688
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
689
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
690
    tcg_temp_free_i64(r_temp);
691
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
692

    
693
    tcg_gen_trunc_i64_tl(dst, r_temp2);
694

    
695
    tcg_temp_free_i64(r_temp2);
696

    
697
    tcg_temp_free_i32(r_src1);
698
    tcg_temp_free_i32(r_src2);
699
}
700

    
701
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
702
{
703
    /* zero-extend truncated operands before multiplication */
704
    gen_op_multiply(dst, src1, src2, 0);
705
}
706

    
707
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
708
{
709
    /* sign-extend truncated operands before multiplication */
710
    gen_op_multiply(dst, src1, src2, 1);
711
}
712

    
713
#ifdef TARGET_SPARC64
714
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
715
{
716
    TCGv_i32 r_const;
717
    int l1;
718

    
719
    l1 = gen_new_label();
720
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
721
    r_const = tcg_const_i32(TT_DIV_ZERO);
722
    gen_helper_raise_exception(cpu_env, r_const);
723
    tcg_temp_free_i32(r_const);
724
    gen_set_label(l1);
725
}
726

    
727
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
728
{
729
    int l1, l2;
730
    TCGv r_temp1, r_temp2;
731

    
732
    l1 = gen_new_label();
733
    l2 = gen_new_label();
734
    r_temp1 = tcg_temp_local_new();
735
    r_temp2 = tcg_temp_local_new();
736
    tcg_gen_mov_tl(r_temp1, src1);
737
    tcg_gen_mov_tl(r_temp2, src2);
738
    gen_trap_ifdivzero_tl(r_temp2);
739
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
740
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
741
    tcg_gen_movi_i64(dst, INT64_MIN);
742
    tcg_gen_br(l2);
743
    gen_set_label(l1);
744
    tcg_gen_div_i64(dst, r_temp1, r_temp2);
745
    gen_set_label(l2);
746
    tcg_temp_free(r_temp1);
747
    tcg_temp_free(r_temp2);
748
}
749
#endif
750

    
751
// 1
752
static inline void gen_op_eval_ba(TCGv dst)
753
{
754
    tcg_gen_movi_tl(dst, 1);
755
}
756

    
757
// Z
758
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
759
{
760
    gen_mov_reg_Z(dst, src);
761
}
762

    
763
// Z | (N ^ V)
764
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
765
{
766
    gen_mov_reg_N(cpu_tmp0, src);
767
    gen_mov_reg_V(dst, src);
768
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
769
    gen_mov_reg_Z(cpu_tmp0, src);
770
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
771
}
772

    
773
// N ^ V
774
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
775
{
776
    gen_mov_reg_V(cpu_tmp0, src);
777
    gen_mov_reg_N(dst, src);
778
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
779
}
780

    
781
// C | Z
782
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
783
{
784
    gen_mov_reg_Z(cpu_tmp0, src);
785
    gen_mov_reg_C(dst, src);
786
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
787
}
788

    
789
// C
790
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
791
{
792
    gen_mov_reg_C(dst, src);
793
}
794

    
795
// V
796
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
797
{
798
    gen_mov_reg_V(dst, src);
799
}
800

    
801
// 0
802
static inline void gen_op_eval_bn(TCGv dst)
803
{
804
    tcg_gen_movi_tl(dst, 0);
805
}
806

    
807
// N
808
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
809
{
810
    gen_mov_reg_N(dst, src);
811
}
812

    
813
// !Z
814
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
815
{
816
    gen_mov_reg_Z(dst, src);
817
    tcg_gen_xori_tl(dst, dst, 0x1);
818
}
819

    
820
// !(Z | (N ^ V))
821
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
822
{
823
    gen_mov_reg_N(cpu_tmp0, src);
824
    gen_mov_reg_V(dst, src);
825
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
826
    gen_mov_reg_Z(cpu_tmp0, src);
827
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
828
    tcg_gen_xori_tl(dst, dst, 0x1);
829
}
830

    
831
// !(N ^ V)
832
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
833
{
834
    gen_mov_reg_V(cpu_tmp0, src);
835
    gen_mov_reg_N(dst, src);
836
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
837
    tcg_gen_xori_tl(dst, dst, 0x1);
838
}
839

    
840
// !(C | Z)
841
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
842
{
843
    gen_mov_reg_Z(cpu_tmp0, src);
844
    gen_mov_reg_C(dst, src);
845
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
846
    tcg_gen_xori_tl(dst, dst, 0x1);
847
}
848

    
849
// !C
850
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
851
{
852
    gen_mov_reg_C(dst, src);
853
    tcg_gen_xori_tl(dst, dst, 0x1);
854
}
855

    
856
// !N
857
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
858
{
859
    gen_mov_reg_N(dst, src);
860
    tcg_gen_xori_tl(dst, dst, 0x1);
861
}
862

    
863
// !V
864
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
865
{
866
    gen_mov_reg_V(dst, src);
867
    tcg_gen_xori_tl(dst, dst, 0x1);
868
}
869

    
870
/*
871
  FPSR bit field FCC1 | FCC0:
872
   0 =
873
   1 <
874
   2 >
875
   3 unordered
876
*/
877
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
878
                                    unsigned int fcc_offset)
879
{
880
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
881
    tcg_gen_andi_tl(reg, reg, 0x1);
882
}
883

    
884
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
885
                                    unsigned int fcc_offset)
886
{
887
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
888
    tcg_gen_andi_tl(reg, reg, 0x1);
889
}
890

    
891
// !0: FCC0 | FCC1
892
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
893
                                    unsigned int fcc_offset)
894
{
895
    gen_mov_reg_FCC0(dst, src, fcc_offset);
896
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
897
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
898
}
899

    
900
// 1 or 2: FCC0 ^ FCC1
901
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
902
                                    unsigned int fcc_offset)
903
{
904
    gen_mov_reg_FCC0(dst, src, fcc_offset);
905
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
906
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
907
}
908

    
909
// 1 or 3: FCC0
910
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
911
                                    unsigned int fcc_offset)
912
{
913
    gen_mov_reg_FCC0(dst, src, fcc_offset);
914
}
915

    
916
// 1: FCC0 & !FCC1
917
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
918
                                    unsigned int fcc_offset)
919
{
920
    gen_mov_reg_FCC0(dst, src, fcc_offset);
921
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
922
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
923
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
924
}
925

    
926
// 2 or 3: FCC1
927
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
928
                                    unsigned int fcc_offset)
929
{
930
    gen_mov_reg_FCC1(dst, src, fcc_offset);
931
}
932

    
933
// 2: !FCC0 & FCC1
934
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
935
                                    unsigned int fcc_offset)
936
{
937
    gen_mov_reg_FCC0(dst, src, fcc_offset);
938
    tcg_gen_xori_tl(dst, dst, 0x1);
939
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
940
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
941
}
942

    
943
// 3: FCC0 & FCC1
944
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
945
                                    unsigned int fcc_offset)
946
{
947
    gen_mov_reg_FCC0(dst, src, fcc_offset);
948
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
949
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
950
}
951

    
952
// 0: !(FCC0 | FCC1)
953
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
954
                                    unsigned int fcc_offset)
955
{
956
    gen_mov_reg_FCC0(dst, src, fcc_offset);
957
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
958
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
959
    tcg_gen_xori_tl(dst, dst, 0x1);
960
}
961

    
962
// 0 or 3: !(FCC0 ^ FCC1)
963
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
964
                                    unsigned int fcc_offset)
965
{
966
    gen_mov_reg_FCC0(dst, src, fcc_offset);
967
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
968
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
969
    tcg_gen_xori_tl(dst, dst, 0x1);
970
}
971

    
972
// 0 or 2: !FCC0
973
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
974
                                    unsigned int fcc_offset)
975
{
976
    gen_mov_reg_FCC0(dst, src, fcc_offset);
977
    tcg_gen_xori_tl(dst, dst, 0x1);
978
}
979

    
980
// !1: !(FCC0 & !FCC1)
981
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
982
                                    unsigned int fcc_offset)
983
{
984
    gen_mov_reg_FCC0(dst, src, fcc_offset);
985
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
986
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
987
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
988
    tcg_gen_xori_tl(dst, dst, 0x1);
989
}
990

    
991
// 0 or 1: !FCC1
992
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
993
                                    unsigned int fcc_offset)
994
{
995
    gen_mov_reg_FCC1(dst, src, fcc_offset);
996
    tcg_gen_xori_tl(dst, dst, 0x1);
997
}
998

    
999
// !2: !(!FCC0 & FCC1)
1000
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1001
                                    unsigned int fcc_offset)
1002
{
1003
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1004
    tcg_gen_xori_tl(dst, dst, 0x1);
1005
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1006
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1007
    tcg_gen_xori_tl(dst, dst, 0x1);
1008
}
1009

    
1010
// !3: !(FCC0 & FCC1)
1011
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1012
                                    unsigned int fcc_offset)
1013
{
1014
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1015
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1016
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1017
    tcg_gen_xori_tl(dst, dst, 0x1);
1018
}
1019

    
1020
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1021
                               target_ulong pc2, TCGv r_cond)
1022
{
1023
    int l1;
1024

    
1025
    l1 = gen_new_label();
1026

    
1027
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1028

    
1029
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
1030

    
1031
    gen_set_label(l1);
1032
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
1033
}
1034

    
1035
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1036
                                target_ulong pc2, TCGv r_cond)
1037
{
1038
    int l1;
1039

    
1040
    l1 = gen_new_label();
1041

    
1042
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1043

    
1044
    gen_goto_tb(dc, 0, pc2, pc1);
1045

    
1046
    gen_set_label(l1);
1047
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1048
}
1049

    
1050
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1051
                                      TCGv r_cond)
1052
{
1053
    int l1, l2;
1054

    
1055
    l1 = gen_new_label();
1056
    l2 = gen_new_label();
1057

    
1058
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1059

    
1060
    tcg_gen_movi_tl(cpu_npc, npc1);
1061
    tcg_gen_br(l2);
1062

    
1063
    gen_set_label(l1);
1064
    tcg_gen_movi_tl(cpu_npc, npc2);
1065
    gen_set_label(l2);
1066
}
1067

    
1068
/* call this function before using the condition register as it may
1069
   have been set for a jump */
1070
static inline void flush_cond(DisasContext *dc, TCGv cond)
1071
{
1072
    if (dc->npc == JUMP_PC) {
1073
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1074
        dc->npc = DYNAMIC_PC;
1075
    }
1076
}
1077

    
1078
static inline void save_npc(DisasContext *dc, TCGv cond)
1079
{
1080
    if (dc->npc == JUMP_PC) {
1081
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1082
        dc->npc = DYNAMIC_PC;
1083
    } else if (dc->npc != DYNAMIC_PC) {
1084
        tcg_gen_movi_tl(cpu_npc, dc->npc);
1085
    }
1086
}
1087

    
1088
static inline void save_state(DisasContext *dc, TCGv cond)
1089
{
1090
    tcg_gen_movi_tl(cpu_pc, dc->pc);
1091
    /* flush pending conditional evaluations before exposing cpu state */
1092
    if (dc->cc_op != CC_OP_FLAGS) {
1093
        dc->cc_op = CC_OP_FLAGS;
1094
        gen_helper_compute_psr(cpu_env);
1095
    }
1096
    save_npc(dc, cond);
1097
}
1098

    
1099
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1100
{
1101
    if (dc->npc == JUMP_PC) {
1102
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1103
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1104
        dc->pc = DYNAMIC_PC;
1105
    } else if (dc->npc == DYNAMIC_PC) {
1106
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1107
        dc->pc = DYNAMIC_PC;
1108
    } else {
1109
        dc->pc = dc->npc;
1110
    }
1111
}
1112

    
1113
static inline void gen_op_next_insn(void)
1114
{
1115
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1116
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1117
}
1118

    
1119
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1120
                            DisasContext *dc)
1121
{
1122
    TCGv_i32 r_src;
1123

    
1124
#ifdef TARGET_SPARC64
1125
    if (cc)
1126
        r_src = cpu_xcc;
1127
    else
1128
        r_src = cpu_psr;
1129
#else
1130
    r_src = cpu_psr;
1131
#endif
1132
    switch (dc->cc_op) {
1133
    case CC_OP_FLAGS:
1134
        break;
1135
    default:
1136
        gen_helper_compute_psr(cpu_env);
1137
        dc->cc_op = CC_OP_FLAGS;
1138
        break;
1139
    }
1140
    switch (cond) {
1141
    case 0x0:
1142
        gen_op_eval_bn(r_dst);
1143
        break;
1144
    case 0x1:
1145
        gen_op_eval_be(r_dst, r_src);
1146
        break;
1147
    case 0x2:
1148
        gen_op_eval_ble(r_dst, r_src);
1149
        break;
1150
    case 0x3:
1151
        gen_op_eval_bl(r_dst, r_src);
1152
        break;
1153
    case 0x4:
1154
        gen_op_eval_bleu(r_dst, r_src);
1155
        break;
1156
    case 0x5:
1157
        gen_op_eval_bcs(r_dst, r_src);
1158
        break;
1159
    case 0x6:
1160
        gen_op_eval_bneg(r_dst, r_src);
1161
        break;
1162
    case 0x7:
1163
        gen_op_eval_bvs(r_dst, r_src);
1164
        break;
1165
    case 0x8:
1166
        gen_op_eval_ba(r_dst);
1167
        break;
1168
    case 0x9:
1169
        gen_op_eval_bne(r_dst, r_src);
1170
        break;
1171
    case 0xa:
1172
        gen_op_eval_bg(r_dst, r_src);
1173
        break;
1174
    case 0xb:
1175
        gen_op_eval_bge(r_dst, r_src);
1176
        break;
1177
    case 0xc:
1178
        gen_op_eval_bgu(r_dst, r_src);
1179
        break;
1180
    case 0xd:
1181
        gen_op_eval_bcc(r_dst, r_src);
1182
        break;
1183
    case 0xe:
1184
        gen_op_eval_bpos(r_dst, r_src);
1185
        break;
1186
    case 0xf:
1187
        gen_op_eval_bvc(r_dst, r_src);
1188
        break;
1189
    }
1190
}
1191

    
1192
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1193
{
1194
    unsigned int offset;
1195

    
1196
    switch (cc) {
1197
    default:
1198
    case 0x0:
1199
        offset = 0;
1200
        break;
1201
    case 0x1:
1202
        offset = 32 - 10;
1203
        break;
1204
    case 0x2:
1205
        offset = 34 - 10;
1206
        break;
1207
    case 0x3:
1208
        offset = 36 - 10;
1209
        break;
1210
    }
1211

    
1212
    switch (cond) {
1213
    case 0x0:
1214
        gen_op_eval_bn(r_dst);
1215
        break;
1216
    case 0x1:
1217
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1218
        break;
1219
    case 0x2:
1220
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1221
        break;
1222
    case 0x3:
1223
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1224
        break;
1225
    case 0x4:
1226
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1227
        break;
1228
    case 0x5:
1229
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1230
        break;
1231
    case 0x6:
1232
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1233
        break;
1234
    case 0x7:
1235
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1236
        break;
1237
    case 0x8:
1238
        gen_op_eval_ba(r_dst);
1239
        break;
1240
    case 0x9:
1241
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1242
        break;
1243
    case 0xa:
1244
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1245
        break;
1246
    case 0xb:
1247
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1248
        break;
1249
    case 0xc:
1250
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1251
        break;
1252
    case 0xd:
1253
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1254
        break;
1255
    case 0xe:
1256
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1257
        break;
1258
    case 0xf:
1259
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1260
        break;
1261
    }
1262
}
1263

    
1264
#ifdef TARGET_SPARC64
1265
// Inverted logic
1266
static const int gen_tcg_cond_reg[8] = {
1267
    -1,
1268
    TCG_COND_NE,
1269
    TCG_COND_GT,
1270
    TCG_COND_GE,
1271
    -1,
1272
    TCG_COND_EQ,
1273
    TCG_COND_LE,
1274
    TCG_COND_LT,
1275
};
1276

    
1277
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1278
{
1279
    int l1;
1280

    
1281
    l1 = gen_new_label();
1282
    tcg_gen_movi_tl(r_dst, 0);
1283
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1284
    tcg_gen_movi_tl(r_dst, 1);
1285
    gen_set_label(l1);
1286
}
1287
#endif
1288

    
1289
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1290
                      TCGv r_cond)
1291
{
1292
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1293
    target_ulong target = dc->pc + offset;
1294

    
1295
    if (cond == 0x0) {
1296
        /* unconditional not taken */
1297
        if (a) {
1298
            dc->pc = dc->npc + 4;
1299
            dc->npc = dc->pc + 4;
1300
        } else {
1301
            dc->pc = dc->npc;
1302
            dc->npc = dc->pc + 4;
1303
        }
1304
    } else if (cond == 0x8) {
1305
        /* unconditional taken */
1306
        if (a) {
1307
            dc->pc = target;
1308
            dc->npc = dc->pc + 4;
1309
        } else {
1310
            dc->pc = dc->npc;
1311
            dc->npc = target;
1312
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1313
        }
1314
    } else {
1315
        flush_cond(dc, r_cond);
1316
        gen_cond(r_cond, cc, cond, dc);
1317
        if (a) {
1318
            gen_branch_a(dc, target, dc->npc, r_cond);
1319
            dc->is_br = 1;
1320
        } else {
1321
            dc->pc = dc->npc;
1322
            dc->jump_pc[0] = target;
1323
            if (unlikely(dc->npc == DYNAMIC_PC)) {
1324
                dc->jump_pc[1] = DYNAMIC_PC;
1325
                tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1326
            } else {
1327
                dc->jump_pc[1] = dc->npc + 4;
1328
                dc->npc = JUMP_PC;
1329
            }
1330
        }
1331
    }
1332
}
1333

    
1334
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1335
                      TCGv r_cond)
1336
{
1337
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1338
    target_ulong target = dc->pc + offset;
1339

    
1340
    if (cond == 0x0) {
1341
        /* unconditional not taken */
1342
        if (a) {
1343
            dc->pc = dc->npc + 4;
1344
            dc->npc = dc->pc + 4;
1345
        } else {
1346
            dc->pc = dc->npc;
1347
            dc->npc = dc->pc + 4;
1348
        }
1349
    } else if (cond == 0x8) {
1350
        /* unconditional taken */
1351
        if (a) {
1352
            dc->pc = target;
1353
            dc->npc = dc->pc + 4;
1354
        } else {
1355
            dc->pc = dc->npc;
1356
            dc->npc = target;
1357
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1358
        }
1359
    } else {
1360
        flush_cond(dc, r_cond);
1361
        gen_fcond(r_cond, cc, cond);
1362
        if (a) {
1363
            gen_branch_a(dc, target, dc->npc, r_cond);
1364
            dc->is_br = 1;
1365
        } else {
1366
            dc->pc = dc->npc;
1367
            dc->jump_pc[0] = target;
1368
            if (unlikely(dc->npc == DYNAMIC_PC)) {
1369
                dc->jump_pc[1] = DYNAMIC_PC;
1370
                tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1371
            } else {
1372
                dc->jump_pc[1] = dc->npc + 4;
1373
                dc->npc = JUMP_PC;
1374
            }
1375
        }
1376
    }
1377
}
1378

    
1379
#ifdef TARGET_SPARC64
1380
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1381
                          TCGv r_cond, TCGv r_reg)
1382
{
1383
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1384
    target_ulong target = dc->pc + offset;
1385

    
1386
    flush_cond(dc, r_cond);
1387
    gen_cond_reg(r_cond, cond, r_reg);
1388
    if (a) {
1389
        gen_branch_a(dc, target, dc->npc, r_cond);
1390
        dc->is_br = 1;
1391
    } else {
1392
        dc->pc = dc->npc;
1393
        dc->jump_pc[0] = target;
1394
        if (unlikely(dc->npc == DYNAMIC_PC)) {
1395
            dc->jump_pc[1] = DYNAMIC_PC;
1396
            tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1397
        } else {
1398
            dc->jump_pc[1] = dc->npc + 4;
1399
            dc->npc = JUMP_PC;
1400
        }
1401
    }
1402
}
1403

    
1404
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1405
{
1406
    switch (fccno) {
1407
    case 0:
1408
        gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1409
        break;
1410
    case 1:
1411
        gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1412
        break;
1413
    case 2:
1414
        gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1415
        break;
1416
    case 3:
1417
        gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1418
        break;
1419
    }
1420
}
1421

    
1422
static inline void gen_op_fcmpd(int fccno)
1423
{
1424
    switch (fccno) {
1425
    case 0:
1426
        gen_helper_fcmpd(cpu_env);
1427
        break;
1428
    case 1:
1429
        gen_helper_fcmpd_fcc1(cpu_env);
1430
        break;
1431
    case 2:
1432
        gen_helper_fcmpd_fcc2(cpu_env);
1433
        break;
1434
    case 3:
1435
        gen_helper_fcmpd_fcc3(cpu_env);
1436
        break;
1437
    }
1438
}
1439

    
1440
static inline void gen_op_fcmpq(int fccno)
1441
{
1442
    switch (fccno) {
1443
    case 0:
1444
        gen_helper_fcmpq(cpu_env);
1445
        break;
1446
    case 1:
1447
        gen_helper_fcmpq_fcc1(cpu_env);
1448
        break;
1449
    case 2:
1450
        gen_helper_fcmpq_fcc2(cpu_env);
1451
        break;
1452
    case 3:
1453
        gen_helper_fcmpq_fcc3(cpu_env);
1454
        break;
1455
    }
1456
}
1457

    
1458
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1459
{
1460
    switch (fccno) {
1461
    case 0:
1462
        gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1463
        break;
1464
    case 1:
1465
        gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1466
        break;
1467
    case 2:
1468
        gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1469
        break;
1470
    case 3:
1471
        gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1472
        break;
1473
    }
1474
}
1475

    
1476
static inline void gen_op_fcmped(int fccno)
1477
{
1478
    switch (fccno) {
1479
    case 0:
1480
        gen_helper_fcmped(cpu_env);
1481
        break;
1482
    case 1:
1483
        gen_helper_fcmped_fcc1(cpu_env);
1484
        break;
1485
    case 2:
1486
        gen_helper_fcmped_fcc2(cpu_env);
1487
        break;
1488
    case 3:
1489
        gen_helper_fcmped_fcc3(cpu_env);
1490
        break;
1491
    }
1492
}
1493

    
1494
static inline void gen_op_fcmpeq(int fccno)
1495
{
1496
    switch (fccno) {
1497
    case 0:
1498
        gen_helper_fcmpeq(cpu_env);
1499
        break;
1500
    case 1:
1501
        gen_helper_fcmpeq_fcc1(cpu_env);
1502
        break;
1503
    case 2:
1504
        gen_helper_fcmpeq_fcc2(cpu_env);
1505
        break;
1506
    case 3:
1507
        gen_helper_fcmpeq_fcc3(cpu_env);
1508
        break;
1509
    }
1510
}
1511

    
1512
#else
1513

    
1514
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1515
{
1516
    gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1517
}
1518

    
1519
static inline void gen_op_fcmpd(int fccno)
1520
{
1521
    gen_helper_fcmpd(cpu_env);
1522
}
1523

    
1524
static inline void gen_op_fcmpq(int fccno)
1525
{
1526
    gen_helper_fcmpq(cpu_env);
1527
}
1528

    
1529
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1530
{
1531
    gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1532
}
1533

    
1534
static inline void gen_op_fcmped(int fccno)
1535
{
1536
    gen_helper_fcmped(cpu_env);
1537
}
1538

    
1539
static inline void gen_op_fcmpeq(int fccno)
1540
{
1541
    gen_helper_fcmpeq(cpu_env);
1542
}
1543
#endif
1544

    
1545
static inline void gen_op_fpexception_im(int fsr_flags)
1546
{
1547
    TCGv_i32 r_const;
1548

    
1549
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1550
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1551
    r_const = tcg_const_i32(TT_FP_EXCP);
1552
    gen_helper_raise_exception(cpu_env, r_const);
1553
    tcg_temp_free_i32(r_const);
1554
}
1555

    
1556
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1557
{
1558
#if !defined(CONFIG_USER_ONLY)
1559
    if (!dc->fpu_enabled) {
1560
        TCGv_i32 r_const;
1561

    
1562
        save_state(dc, r_cond);
1563
        r_const = tcg_const_i32(TT_NFPU_INSN);
1564
        gen_helper_raise_exception(cpu_env, r_const);
1565
        tcg_temp_free_i32(r_const);
1566
        dc->is_br = 1;
1567
        return 1;
1568
    }
1569
#endif
1570
    return 0;
1571
}
1572

    
1573
static inline void gen_update_fprs_dirty(int rd)
1574
{
1575
#if defined(TARGET_SPARC64)
1576
    tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
1577
#endif
1578
}
1579

    
1580
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1581
{
1582
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1583
}
1584

    
1585
static inline void gen_clear_float_exceptions(void)
1586
{
1587
    gen_helper_clear_float_exceptions(cpu_env);
1588
}
1589

    
1590
/* asi moves */
1591
#ifdef TARGET_SPARC64
1592
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1593
{
1594
    int asi;
1595
    TCGv_i32 r_asi;
1596

    
1597
    if (IS_IMM) {
1598
        r_asi = tcg_temp_new_i32();
1599
        tcg_gen_mov_i32(r_asi, cpu_asi);
1600
    } else {
1601
        asi = GET_FIELD(insn, 19, 26);
1602
        r_asi = tcg_const_i32(asi);
1603
    }
1604
    return r_asi;
1605
}
1606

    
1607
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1608
                              int sign)
1609
{
1610
    TCGv_i32 r_asi, r_size, r_sign;
1611

    
1612
    r_asi = gen_get_asi(insn, addr);
1613
    r_size = tcg_const_i32(size);
1614
    r_sign = tcg_const_i32(sign);
1615
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1616
    tcg_temp_free_i32(r_sign);
1617
    tcg_temp_free_i32(r_size);
1618
    tcg_temp_free_i32(r_asi);
1619
}
1620

    
1621
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1622
{
1623
    TCGv_i32 r_asi, r_size;
1624

    
1625
    r_asi = gen_get_asi(insn, addr);
1626
    r_size = tcg_const_i32(size);
1627
    gen_helper_st_asi(addr, src, r_asi, r_size);
1628
    tcg_temp_free_i32(r_size);
1629
    tcg_temp_free_i32(r_asi);
1630
}
1631

    
1632
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1633
{
1634
    TCGv_i32 r_asi, r_size, r_rd;
1635

    
1636
    r_asi = gen_get_asi(insn, addr);
1637
    r_size = tcg_const_i32(size);
1638
    r_rd = tcg_const_i32(rd);
1639
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1640
    tcg_temp_free_i32(r_rd);
1641
    tcg_temp_free_i32(r_size);
1642
    tcg_temp_free_i32(r_asi);
1643
}
1644

    
1645
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1646
{
1647
    TCGv_i32 r_asi, r_size, r_rd;
1648

    
1649
    r_asi = gen_get_asi(insn, addr);
1650
    r_size = tcg_const_i32(size);
1651
    r_rd = tcg_const_i32(rd);
1652
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1653
    tcg_temp_free_i32(r_rd);
1654
    tcg_temp_free_i32(r_size);
1655
    tcg_temp_free_i32(r_asi);
1656
}
1657

    
1658
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1659
{
1660
    TCGv_i32 r_asi, r_size, r_sign;
1661

    
1662
    r_asi = gen_get_asi(insn, addr);
1663
    r_size = tcg_const_i32(4);
1664
    r_sign = tcg_const_i32(0);
1665
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1666
    tcg_temp_free_i32(r_sign);
1667
    gen_helper_st_asi(addr, dst, r_asi, r_size);
1668
    tcg_temp_free_i32(r_size);
1669
    tcg_temp_free_i32(r_asi);
1670
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1671
}
1672

    
1673
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1674
{
1675
    TCGv_i32 r_asi, r_rd;
1676

    
1677
    r_asi = gen_get_asi(insn, addr);
1678
    r_rd = tcg_const_i32(rd);
1679
    gen_helper_ldda_asi(addr, r_asi, r_rd);
1680
    tcg_temp_free_i32(r_rd);
1681
    tcg_temp_free_i32(r_asi);
1682
}
1683

    
1684
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1685
{
1686
    TCGv_i32 r_asi, r_size;
1687

    
1688
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1689
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1690
    r_asi = gen_get_asi(insn, addr);
1691
    r_size = tcg_const_i32(8);
1692
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1693
    tcg_temp_free_i32(r_size);
1694
    tcg_temp_free_i32(r_asi);
1695
}
1696

    
1697
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1698
                               int rd)
1699
{
1700
    TCGv r_val1;
1701
    TCGv_i32 r_asi;
1702

    
1703
    r_val1 = tcg_temp_new();
1704
    gen_movl_reg_TN(rd, r_val1);
1705
    r_asi = gen_get_asi(insn, addr);
1706
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1707
    tcg_temp_free_i32(r_asi);
1708
    tcg_temp_free(r_val1);
1709
}
1710

    
1711
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1712
                                int rd)
1713
{
1714
    TCGv_i32 r_asi;
1715

    
1716
    gen_movl_reg_TN(rd, cpu_tmp64);
1717
    r_asi = gen_get_asi(insn, addr);
1718
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1719
    tcg_temp_free_i32(r_asi);
1720
}
1721

    
1722
#elif !defined(CONFIG_USER_ONLY)
1723

    
1724
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1725
                              int sign)
1726
{
1727
    TCGv_i32 r_asi, r_size, r_sign;
1728

    
1729
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1730
    r_size = tcg_const_i32(size);
1731
    r_sign = tcg_const_i32(sign);
1732
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1733
    tcg_temp_free(r_sign);
1734
    tcg_temp_free(r_size);
1735
    tcg_temp_free(r_asi);
1736
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1737
}
1738

    
1739
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1740
{
1741
    TCGv_i32 r_asi, r_size;
1742

    
1743
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
1744
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1745
    r_size = tcg_const_i32(size);
1746
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1747
    tcg_temp_free(r_size);
1748
    tcg_temp_free(r_asi);
1749
}
1750

    
1751
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1752
{
1753
    TCGv_i32 r_asi, r_size, r_sign;
1754
    TCGv_i64 r_val;
1755

    
1756
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1757
    r_size = tcg_const_i32(4);
1758
    r_sign = tcg_const_i32(0);
1759
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1760
    tcg_temp_free(r_sign);
1761
    r_val = tcg_temp_new_i64();
1762
    tcg_gen_extu_tl_i64(r_val, dst);
1763
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1764
    tcg_temp_free_i64(r_val);
1765
    tcg_temp_free(r_size);
1766
    tcg_temp_free(r_asi);
1767
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1768
}
1769

    
1770
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1771
{
1772
    TCGv_i32 r_asi, r_size, r_sign;
1773

    
1774
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1775
    r_size = tcg_const_i32(8);
1776
    r_sign = tcg_const_i32(0);
1777
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1778
    tcg_temp_free(r_sign);
1779
    tcg_temp_free(r_size);
1780
    tcg_temp_free(r_asi);
1781
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1782
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
1783
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1784
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1785
    gen_movl_TN_reg(rd, hi);
1786
}
1787

    
1788
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1789
{
1790
    TCGv_i32 r_asi, r_size;
1791

    
1792
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1793
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1794
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1795
    r_size = tcg_const_i32(8);
1796
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1797
    tcg_temp_free(r_size);
1798
    tcg_temp_free(r_asi);
1799
}
1800
#endif
1801

    
1802
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1803
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1804
{
1805
    TCGv_i64 r_val;
1806
    TCGv_i32 r_asi, r_size;
1807

    
1808
    gen_ld_asi(dst, addr, insn, 1, 0);
1809

    
1810
    r_val = tcg_const_i64(0xffULL);
1811
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1812
    r_size = tcg_const_i32(1);
1813
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1814
    tcg_temp_free_i32(r_size);
1815
    tcg_temp_free_i32(r_asi);
1816
    tcg_temp_free_i64(r_val);
1817
}
1818
#endif
1819

    
1820
static inline TCGv get_src1(unsigned int insn, TCGv def)
1821
{
1822
    TCGv r_rs1 = def;
1823
    unsigned int rs1;
1824

    
1825
    rs1 = GET_FIELD(insn, 13, 17);
1826
    if (rs1 == 0) {
1827
        tcg_gen_movi_tl(def, 0);
1828
    } else if (rs1 < 8) {
1829
        r_rs1 = cpu_gregs[rs1];
1830
    } else {
1831
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1832
    }
1833
    return r_rs1;
1834
}
1835

    
1836
static inline TCGv get_src2(unsigned int insn, TCGv def)
1837
{
1838
    TCGv r_rs2 = def;
1839

    
1840
    if (IS_IMM) { /* immediate */
1841
        target_long simm = GET_FIELDs(insn, 19, 31);
1842
        tcg_gen_movi_tl(def, simm);
1843
    } else { /* register */
1844
        unsigned int rs2 = GET_FIELD(insn, 27, 31);
1845
        if (rs2 == 0) {
1846
            tcg_gen_movi_tl(def, 0);
1847
        } else if (rs2 < 8) {
1848
            r_rs2 = cpu_gregs[rs2];
1849
        } else {
1850
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1851
        }
1852
    }
1853
    return r_rs2;
1854
}
1855

    
1856
#ifdef TARGET_SPARC64
1857
static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1858
{
1859
    TCGv_i32 r_tl = tcg_temp_new_i32();
1860

    
1861
    /* load env->tl into r_tl */
1862
    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1863

    
1864
    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1865
    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1866

    
1867
    /* calculate offset to current trap state from env->ts, reuse r_tl */
1868
    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1869
    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1870

    
1871
    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1872
    {
1873
        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1874
        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1875
        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1876
        tcg_temp_free_ptr(r_tl_tmp);
1877
    }
1878

    
1879
    tcg_temp_free_i32(r_tl);
1880
}
1881
#endif
1882

    
1883
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
1884
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1885
        goto illegal_insn;
1886
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1887
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1888
        goto nfpu_insn;
1889

    
1890
/* before an instruction, dc->pc must be static */
1891
static void disas_sparc_insn(DisasContext * dc)
1892
{
1893
    unsigned int insn, opc, rs1, rs2, rd;
1894
    TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1895
    target_long simm;
1896

    
1897
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1898
        tcg_gen_debug_insn_start(dc->pc);
1899
    insn = ldl_code(dc->pc);
1900
    opc = GET_FIELD(insn, 0, 1);
1901

    
1902
    rd = GET_FIELD(insn, 2, 6);
1903

    
1904
    cpu_tmp1 = cpu_src1 = tcg_temp_new();
1905
    cpu_tmp2 = cpu_src2 = tcg_temp_new();
1906

    
1907
    switch (opc) {
1908
    case 0:                     /* branches/sethi */
1909
        {
1910
            unsigned int xop = GET_FIELD(insn, 7, 9);
1911
            int32_t target;
1912
            switch (xop) {
1913
#ifdef TARGET_SPARC64
1914
            case 0x1:           /* V9 BPcc */
1915
                {
1916
                    int cc;
1917

    
1918
                    target = GET_FIELD_SP(insn, 0, 18);
1919
                    target = sign_extend(target, 19);
1920
                    target <<= 2;
1921
                    cc = GET_FIELD_SP(insn, 20, 21);
1922
                    if (cc == 0)
1923
                        do_branch(dc, target, insn, 0, cpu_cond);
1924
                    else if (cc == 2)
1925
                        do_branch(dc, target, insn, 1, cpu_cond);
1926
                    else
1927
                        goto illegal_insn;
1928
                    goto jmp_insn;
1929
                }
1930
            case 0x3:           /* V9 BPr */
1931
                {
1932
                    target = GET_FIELD_SP(insn, 0, 13) |
1933
                        (GET_FIELD_SP(insn, 20, 21) << 14);
1934
                    target = sign_extend(target, 16);
1935
                    target <<= 2;
1936
                    cpu_src1 = get_src1(insn, cpu_src1);
1937
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1938
                    goto jmp_insn;
1939
                }
1940
            case 0x5:           /* V9 FBPcc */
1941
                {
1942
                    int cc = GET_FIELD_SP(insn, 20, 21);
1943
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1944
                        goto jmp_insn;
1945
                    target = GET_FIELD_SP(insn, 0, 18);
1946
                    target = sign_extend(target, 19);
1947
                    target <<= 2;
1948
                    do_fbranch(dc, target, insn, cc, cpu_cond);
1949
                    goto jmp_insn;
1950
                }
1951
#else
1952
            case 0x7:           /* CBN+x */
1953
                {
1954
                    goto ncp_insn;
1955
                }
1956
#endif
1957
            case 0x2:           /* BN+x */
1958
                {
1959
                    target = GET_FIELD(insn, 10, 31);
1960
                    target = sign_extend(target, 22);
1961
                    target <<= 2;
1962
                    do_branch(dc, target, insn, 0, cpu_cond);
1963
                    goto jmp_insn;
1964
                }
1965
            case 0x6:           /* FBN+x */
1966
                {
1967
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1968
                        goto jmp_insn;
1969
                    target = GET_FIELD(insn, 10, 31);
1970
                    target = sign_extend(target, 22);
1971
                    target <<= 2;
1972
                    do_fbranch(dc, target, insn, 0, cpu_cond);
1973
                    goto jmp_insn;
1974
                }
1975
            case 0x4:           /* SETHI */
1976
                if (rd) { // nop
1977
                    uint32_t value = GET_FIELD(insn, 10, 31);
1978
                    TCGv r_const;
1979

    
1980
                    r_const = tcg_const_tl(value << 10);
1981
                    gen_movl_TN_reg(rd, r_const);
1982
                    tcg_temp_free(r_const);
1983
                }
1984
                break;
1985
            case 0x0:           /* UNIMPL */
1986
            default:
1987
                goto illegal_insn;
1988
            }
1989
            break;
1990
        }
1991
        break;
1992
    case 1:                     /*CALL*/
1993
        {
1994
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
1995
            TCGv r_const;
1996

    
1997
            r_const = tcg_const_tl(dc->pc);
1998
            gen_movl_TN_reg(15, r_const);
1999
            tcg_temp_free(r_const);
2000
            target += dc->pc;
2001
            gen_mov_pc_npc(dc, cpu_cond);
2002
            dc->npc = target;
2003
        }
2004
        goto jmp_insn;
2005
    case 2:                     /* FPU & Logical Operations */
2006
        {
2007
            unsigned int xop = GET_FIELD(insn, 7, 12);
2008
            if (xop == 0x3a) {  /* generate trap */
2009
                int cond;
2010

    
2011
                cpu_src1 = get_src1(insn, cpu_src1);
2012
                if (IS_IMM) {
2013
                    rs2 = GET_FIELD(insn, 25, 31);
2014
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2015
                } else {
2016
                    rs2 = GET_FIELD(insn, 27, 31);
2017
                    if (rs2 != 0) {
2018
                        gen_movl_reg_TN(rs2, cpu_src2);
2019
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2020
                    } else
2021
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
2022
                }
2023

    
2024
                cond = GET_FIELD(insn, 3, 6);
2025
                if (cond == 0x8) { /* Trap Always */
2026
                    save_state(dc, cpu_cond);
2027
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2028
                        supervisor(dc))
2029
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2030
                    else
2031
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2032
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2033
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2034

    
2035
                    if (rs2 == 0 &&
2036
                        dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2037

    
2038
                        gen_helper_shutdown();
2039

    
2040
                    } else {
2041
                        gen_helper_raise_exception(cpu_env, cpu_tmp32);
2042
                    }
2043
                } else if (cond != 0) {
2044
                    TCGv r_cond = tcg_temp_new();
2045
                    int l1;
2046
#ifdef TARGET_SPARC64
2047
                    /* V9 icc/xcc */
2048
                    int cc = GET_FIELD_SP(insn, 11, 12);
2049

    
2050
                    save_state(dc, cpu_cond);
2051
                    if (cc == 0)
2052
                        gen_cond(r_cond, 0, cond, dc);
2053
                    else if (cc == 2)
2054
                        gen_cond(r_cond, 1, cond, dc);
2055
                    else
2056
                        goto illegal_insn;
2057
#else
2058
                    save_state(dc, cpu_cond);
2059
                    gen_cond(r_cond, 0, cond, dc);
2060
#endif
2061
                    l1 = gen_new_label();
2062
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2063

    
2064
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2065
                        supervisor(dc))
2066
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2067
                    else
2068
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2069
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2070
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2071
                    gen_helper_raise_exception(cpu_env, cpu_tmp32);
2072

    
2073
                    gen_set_label(l1);
2074
                    tcg_temp_free(r_cond);
2075
                }
2076
                gen_op_next_insn();
2077
                tcg_gen_exit_tb(0);
2078
                dc->is_br = 1;
2079
                goto jmp_insn;
2080
            } else if (xop == 0x28) {
2081
                rs1 = GET_FIELD(insn, 13, 17);
2082
                switch(rs1) {
2083
                case 0: /* rdy */
2084
#ifndef TARGET_SPARC64
2085
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
2086
                                       manual, rdy on the microSPARC
2087
                                       II */
2088
                case 0x0f:          /* stbar in the SPARCv8 manual,
2089
                                       rdy on the microSPARC II */
2090
                case 0x10 ... 0x1f: /* implementation-dependent in the
2091
                                       SPARCv8 manual, rdy on the
2092
                                       microSPARC II */
2093
                    /* Read Asr17 */
2094
                    if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2095
                        TCGv r_const;
2096

    
2097
                        /* Read Asr17 for a Leon3 monoprocessor */
2098
                        r_const = tcg_const_tl((1 << 8)
2099
                                               | (dc->def->nwindows - 1));
2100
                        gen_movl_TN_reg(rd, r_const);
2101
                        tcg_temp_free(r_const);
2102
                        break;
2103
                    }
2104
#endif
2105
                    gen_movl_TN_reg(rd, cpu_y);
2106
                    break;
2107
#ifdef TARGET_SPARC64
2108
                case 0x2: /* V9 rdccr */
2109
                    gen_helper_compute_psr(cpu_env);
2110
                    gen_helper_rdccr(cpu_dst, cpu_env);
2111
                    gen_movl_TN_reg(rd, cpu_dst);
2112
                    break;
2113
                case 0x3: /* V9 rdasi */
2114
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2115
                    gen_movl_TN_reg(rd, cpu_dst);
2116
                    break;
2117
                case 0x4: /* V9 rdtick */
2118
                    {
2119
                        TCGv_ptr r_tickptr;
2120

    
2121
                        r_tickptr = tcg_temp_new_ptr();
2122
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2123
                                       offsetof(CPUState, tick));
2124
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2125
                        tcg_temp_free_ptr(r_tickptr);
2126
                        gen_movl_TN_reg(rd, cpu_dst);
2127
                    }
2128
                    break;
2129
                case 0x5: /* V9 rdpc */
2130
                    {
2131
                        TCGv r_const;
2132

    
2133
                        r_const = tcg_const_tl(dc->pc);
2134
                        gen_movl_TN_reg(rd, r_const);
2135
                        tcg_temp_free(r_const);
2136
                    }
2137
                    break;
2138
                case 0x6: /* V9 rdfprs */
2139
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2140
                    gen_movl_TN_reg(rd, cpu_dst);
2141
                    break;
2142
                case 0xf: /* V9 membar */
2143
                    break; /* no effect */
2144
                case 0x13: /* Graphics Status */
2145
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2146
                        goto jmp_insn;
2147
                    gen_movl_TN_reg(rd, cpu_gsr);
2148
                    break;
2149
                case 0x16: /* Softint */
2150
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2151
                    gen_movl_TN_reg(rd, cpu_dst);
2152
                    break;
2153
                case 0x17: /* Tick compare */
2154
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
2155
                    break;
2156
                case 0x18: /* System tick */
2157
                    {
2158
                        TCGv_ptr r_tickptr;
2159

    
2160
                        r_tickptr = tcg_temp_new_ptr();
2161
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2162
                                       offsetof(CPUState, stick));
2163
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2164
                        tcg_temp_free_ptr(r_tickptr);
2165
                        gen_movl_TN_reg(rd, cpu_dst);
2166
                    }
2167
                    break;
2168
                case 0x19: /* System tick compare */
2169
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
2170
                    break;
2171
                case 0x10: /* Performance Control */
2172
                case 0x11: /* Performance Instrumentation Counter */
2173
                case 0x12: /* Dispatch Control */
2174
                case 0x14: /* Softint set, WO */
2175
                case 0x15: /* Softint clear, WO */
2176
#endif
2177
                default:
2178
                    goto illegal_insn;
2179
                }
2180
#if !defined(CONFIG_USER_ONLY)
2181
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2182
#ifndef TARGET_SPARC64
2183
                if (!supervisor(dc))
2184
                    goto priv_insn;
2185
                gen_helper_compute_psr(cpu_env);
2186
                dc->cc_op = CC_OP_FLAGS;
2187
                gen_helper_rdpsr(cpu_dst, cpu_env);
2188
#else
2189
                CHECK_IU_FEATURE(dc, HYPV);
2190
                if (!hypervisor(dc))
2191
                    goto priv_insn;
2192
                rs1 = GET_FIELD(insn, 13, 17);
2193
                switch (rs1) {
2194
                case 0: // hpstate
2195
                    // gen_op_rdhpstate();
2196
                    break;
2197
                case 1: // htstate
2198
                    // gen_op_rdhtstate();
2199
                    break;
2200
                case 3: // hintp
2201
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2202
                    break;
2203
                case 5: // htba
2204
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
2205
                    break;
2206
                case 6: // hver
2207
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
2208
                    break;
2209
                case 31: // hstick_cmpr
2210
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2211
                    break;
2212
                default:
2213
                    goto illegal_insn;
2214
                }
2215
#endif
2216
                gen_movl_TN_reg(rd, cpu_dst);
2217
                break;
2218
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2219
                if (!supervisor(dc))
2220
                    goto priv_insn;
2221
#ifdef TARGET_SPARC64
2222
                rs1 = GET_FIELD(insn, 13, 17);
2223
                switch (rs1) {
2224
                case 0: // tpc
2225
                    {
2226
                        TCGv_ptr r_tsptr;
2227

    
2228
                        r_tsptr = tcg_temp_new_ptr();
2229
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2230
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2231
                                      offsetof(trap_state, tpc));
2232
                        tcg_temp_free_ptr(r_tsptr);
2233
                    }
2234
                    break;
2235
                case 1: // tnpc
2236
                    {
2237
                        TCGv_ptr r_tsptr;
2238

    
2239
                        r_tsptr = tcg_temp_new_ptr();
2240
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2241
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2242
                                      offsetof(trap_state, tnpc));
2243
                        tcg_temp_free_ptr(r_tsptr);
2244
                    }
2245
                    break;
2246
                case 2: // tstate
2247
                    {
2248
                        TCGv_ptr r_tsptr;
2249

    
2250
                        r_tsptr = tcg_temp_new_ptr();
2251
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2252
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2253
                                      offsetof(trap_state, tstate));
2254
                        tcg_temp_free_ptr(r_tsptr);
2255
                    }
2256
                    break;
2257
                case 3: // tt
2258
                    {
2259
                        TCGv_ptr r_tsptr;
2260

    
2261
                        r_tsptr = tcg_temp_new_ptr();
2262
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2263
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2264
                                       offsetof(trap_state, tt));
2265
                        tcg_temp_free_ptr(r_tsptr);
2266
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2267
                    }
2268
                    break;
2269
                case 4: // tick
2270
                    {
2271
                        TCGv_ptr r_tickptr;
2272

    
2273
                        r_tickptr = tcg_temp_new_ptr();
2274
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2275
                                       offsetof(CPUState, tick));
2276
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2277
                        gen_movl_TN_reg(rd, cpu_tmp0);
2278
                        tcg_temp_free_ptr(r_tickptr);
2279
                    }
2280
                    break;
2281
                case 5: // tba
2282
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2283
                    break;
2284
                case 6: // pstate
2285
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2286
                                   offsetof(CPUSPARCState, pstate));
2287
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2288
                    break;
2289
                case 7: // tl
2290
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2291
                                   offsetof(CPUSPARCState, tl));
2292
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2293
                    break;
2294
                case 8: // pil
2295
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2296
                                   offsetof(CPUSPARCState, psrpil));
2297
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2298
                    break;
2299
                case 9: // cwp
2300
                    gen_helper_rdcwp(cpu_tmp0, cpu_env);
2301
                    break;
2302
                case 10: // cansave
2303
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2304
                                   offsetof(CPUSPARCState, cansave));
2305
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2306
                    break;
2307
                case 11: // canrestore
2308
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2309
                                   offsetof(CPUSPARCState, canrestore));
2310
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2311
                    break;
2312
                case 12: // cleanwin
2313
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2314
                                   offsetof(CPUSPARCState, cleanwin));
2315
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2316
                    break;
2317
                case 13: // otherwin
2318
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2319
                                   offsetof(CPUSPARCState, otherwin));
2320
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2321
                    break;
2322
                case 14: // wstate
2323
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2324
                                   offsetof(CPUSPARCState, wstate));
2325
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2326
                    break;
2327
                case 16: // UA2005 gl
2328
                    CHECK_IU_FEATURE(dc, GL);
2329
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2330
                                   offsetof(CPUSPARCState, gl));
2331
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2332
                    break;
2333
                case 26: // UA2005 strand status
2334
                    CHECK_IU_FEATURE(dc, HYPV);
2335
                    if (!hypervisor(dc))
2336
                        goto priv_insn;
2337
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2338
                    break;
2339
                case 31: // ver
2340
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2341
                    break;
2342
                case 15: // fq
2343
                default:
2344
                    goto illegal_insn;
2345
                }
2346
#else
2347
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2348
#endif
2349
                gen_movl_TN_reg(rd, cpu_tmp0);
2350
                break;
2351
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2352
#ifdef TARGET_SPARC64
2353
                save_state(dc, cpu_cond);
2354
                gen_helper_flushw(cpu_env);
2355
#else
2356
                if (!supervisor(dc))
2357
                    goto priv_insn;
2358
                gen_movl_TN_reg(rd, cpu_tbr);
2359
#endif
2360
                break;
2361
#endif
2362
            } else if (xop == 0x34) {   /* FPU Operations */
2363
                if (gen_trap_ifnofpu(dc, cpu_cond))
2364
                    goto jmp_insn;
2365
                gen_op_clear_ieee_excp_and_FTT();
2366
                rs1 = GET_FIELD(insn, 13, 17);
2367
                rs2 = GET_FIELD(insn, 27, 31);
2368
                xop = GET_FIELD(insn, 18, 26);
2369
                save_state(dc, cpu_cond);
2370
                switch (xop) {
2371
                case 0x1: /* fmovs */
2372
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2373
                    gen_update_fprs_dirty(rd);
2374
                    break;
2375
                case 0x5: /* fnegs */
2376
                    gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2377
                    gen_update_fprs_dirty(rd);
2378
                    break;
2379
                case 0x9: /* fabss */
2380
                    gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2381
                    gen_update_fprs_dirty(rd);
2382
                    break;
2383
                case 0x29: /* fsqrts */
2384
                    CHECK_FPU_FEATURE(dc, FSQRT);
2385
                    gen_clear_float_exceptions();
2386
                    gen_helper_fsqrts(cpu_tmp32, cpu_env, cpu_fpr[rs2]);
2387
                    gen_helper_check_ieee_exceptions(cpu_env);
2388
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2389
                    gen_update_fprs_dirty(rd);
2390
                    break;
2391
                case 0x2a: /* fsqrtd */
2392
                    CHECK_FPU_FEATURE(dc, FSQRT);
2393
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2394
                    gen_clear_float_exceptions();
2395
                    gen_helper_fsqrtd(cpu_env);
2396
                    gen_helper_check_ieee_exceptions(cpu_env);
2397
                    gen_op_store_DT0_fpr(DFPREG(rd));
2398
                    gen_update_fprs_dirty(DFPREG(rd));
2399
                    break;
2400
                case 0x2b: /* fsqrtq */
2401
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2402
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2403
                    gen_clear_float_exceptions();
2404
                    gen_helper_fsqrtq(cpu_env);
2405
                    gen_helper_check_ieee_exceptions(cpu_env);
2406
                    gen_op_store_QT0_fpr(QFPREG(rd));
2407
                    gen_update_fprs_dirty(QFPREG(rd));
2408
                    break;
2409
                case 0x41: /* fadds */
2410
                    gen_clear_float_exceptions();
2411
                    gen_helper_fadds(cpu_tmp32, cpu_env, cpu_fpr[rs1],
2412
                                     cpu_fpr[rs2]);
2413
                    gen_helper_check_ieee_exceptions(cpu_env);
2414
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2415
                    gen_update_fprs_dirty(rd);
2416
                    break;
2417
                case 0x42: /* faddd */
2418
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2419
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2420
                    gen_clear_float_exceptions();
2421
                    gen_helper_faddd(cpu_env);
2422
                    gen_helper_check_ieee_exceptions(cpu_env);
2423
                    gen_op_store_DT0_fpr(DFPREG(rd));
2424
                    gen_update_fprs_dirty(DFPREG(rd));
2425
                    break;
2426
                case 0x43: /* faddq */
2427
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2428
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2429
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2430
                    gen_clear_float_exceptions();
2431
                    gen_helper_faddq(cpu_env);
2432
                    gen_helper_check_ieee_exceptions(cpu_env);
2433
                    gen_op_store_QT0_fpr(QFPREG(rd));
2434
                    gen_update_fprs_dirty(QFPREG(rd));
2435
                    break;
2436
                case 0x45: /* fsubs */
2437
                    gen_clear_float_exceptions();
2438
                    gen_helper_fsubs(cpu_tmp32, cpu_env, cpu_fpr[rs1],
2439
                                     cpu_fpr[rs2]);
2440
                    gen_helper_check_ieee_exceptions(cpu_env);
2441
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2442
                    gen_update_fprs_dirty(rd);
2443
                    break;
2444
                case 0x46: /* fsubd */
2445
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2446
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2447
                    gen_clear_float_exceptions();
2448
                    gen_helper_fsubd(cpu_env);
2449
                    gen_helper_check_ieee_exceptions(cpu_env);
2450
                    gen_op_store_DT0_fpr(DFPREG(rd));
2451
                    gen_update_fprs_dirty(DFPREG(rd));
2452
                    break;
2453
                case 0x47: /* fsubq */
2454
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2455
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2456
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2457
                    gen_clear_float_exceptions();
2458
                    gen_helper_fsubq(cpu_env);
2459
                    gen_helper_check_ieee_exceptions(cpu_env);
2460
                    gen_op_store_QT0_fpr(QFPREG(rd));
2461
                    gen_update_fprs_dirty(QFPREG(rd));
2462
                    break;
2463
                case 0x49: /* fmuls */
2464
                    CHECK_FPU_FEATURE(dc, FMUL);
2465
                    gen_clear_float_exceptions();
2466
                    gen_helper_fmuls(cpu_tmp32, cpu_env, cpu_fpr[rs1],
2467
                                     cpu_fpr[rs2]);
2468
                    gen_helper_check_ieee_exceptions(cpu_env);
2469
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2470
                    gen_update_fprs_dirty(rd);
2471
                    break;
2472
                case 0x4a: /* fmuld */
2473
                    CHECK_FPU_FEATURE(dc, FMUL);
2474
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2475
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2476
                    gen_clear_float_exceptions();
2477
                    gen_helper_fmuld(cpu_env);
2478
                    gen_helper_check_ieee_exceptions(cpu_env);
2479
                    gen_op_store_DT0_fpr(DFPREG(rd));
2480
                    gen_update_fprs_dirty(DFPREG(rd));
2481
                    break;
2482
                case 0x4b: /* fmulq */
2483
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2484
                    CHECK_FPU_FEATURE(dc, FMUL);
2485
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2486
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2487
                    gen_clear_float_exceptions();
2488
                    gen_helper_fmulq(cpu_env);
2489
                    gen_helper_check_ieee_exceptions(cpu_env);
2490
                    gen_op_store_QT0_fpr(QFPREG(rd));
2491
                    gen_update_fprs_dirty(QFPREG(rd));
2492
                    break;
2493
                case 0x4d: /* fdivs */
2494
                    gen_clear_float_exceptions();
2495
                    gen_helper_fdivs(cpu_tmp32, cpu_env, cpu_fpr[rs1],
2496
                                     cpu_fpr[rs2]);
2497
                    gen_helper_check_ieee_exceptions(cpu_env);
2498
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2499
                    gen_update_fprs_dirty(rd);
2500
                    break;
2501
                case 0x4e: /* fdivd */
2502
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2503
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2504
                    gen_clear_float_exceptions();
2505
                    gen_helper_fdivd(cpu_env);
2506
                    gen_helper_check_ieee_exceptions(cpu_env);
2507
                    gen_op_store_DT0_fpr(DFPREG(rd));
2508
                    gen_update_fprs_dirty(DFPREG(rd));
2509
                    break;
2510
                case 0x4f: /* fdivq */
2511
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2512
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2513
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2514
                    gen_clear_float_exceptions();
2515
                    gen_helper_fdivq(cpu_env);
2516
                    gen_helper_check_ieee_exceptions(cpu_env);
2517
                    gen_op_store_QT0_fpr(QFPREG(rd));
2518
                    gen_update_fprs_dirty(QFPREG(rd));
2519
                    break;
2520
                case 0x69: /* fsmuld */
2521
                    CHECK_FPU_FEATURE(dc, FSMULD);
2522
                    gen_clear_float_exceptions();
2523
                    gen_helper_fsmuld(cpu_env, cpu_fpr[rs1], cpu_fpr[rs2]);
2524
                    gen_helper_check_ieee_exceptions(cpu_env);
2525
                    gen_op_store_DT0_fpr(DFPREG(rd));
2526
                    gen_update_fprs_dirty(DFPREG(rd));
2527
                    break;
2528
                case 0x6e: /* fdmulq */
2529
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2530
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2531
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2532
                    gen_clear_float_exceptions();
2533
                    gen_helper_fdmulq(cpu_env);
2534
                    gen_helper_check_ieee_exceptions(cpu_env);
2535
                    gen_op_store_QT0_fpr(QFPREG(rd));
2536
                    gen_update_fprs_dirty(QFPREG(rd));
2537
                    break;
2538
                case 0xc4: /* fitos */
2539
                    gen_clear_float_exceptions();
2540
                    gen_helper_fitos(cpu_tmp32, cpu_env, cpu_fpr[rs2]);
2541
                    gen_helper_check_ieee_exceptions(cpu_env);
2542
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2543
                    gen_update_fprs_dirty(rd);
2544
                    break;
2545
                case 0xc6: /* fdtos */
2546
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2547
                    gen_clear_float_exceptions();
2548
                    gen_helper_fdtos(cpu_tmp32, cpu_env);
2549
                    gen_helper_check_ieee_exceptions(cpu_env);
2550
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2551
                    gen_update_fprs_dirty(rd);
2552
                    break;
2553
                case 0xc7: /* fqtos */
2554
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2555
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2556
                    gen_clear_float_exceptions();
2557
                    gen_helper_fqtos(cpu_tmp32, cpu_env);
2558
                    gen_helper_check_ieee_exceptions(cpu_env);
2559
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2560
                    gen_update_fprs_dirty(rd);
2561
                    break;
2562
                case 0xc8: /* fitod */
2563
                    gen_helper_fitod(cpu_env, cpu_fpr[rs2]);
2564
                    gen_op_store_DT0_fpr(DFPREG(rd));
2565
                    gen_update_fprs_dirty(DFPREG(rd));
2566
                    break;
2567
                case 0xc9: /* fstod */
2568
                    gen_helper_fstod(cpu_env, cpu_fpr[rs2]);
2569
                    gen_op_store_DT0_fpr(DFPREG(rd));
2570
                    gen_update_fprs_dirty(DFPREG(rd));
2571
                    break;
2572
                case 0xcb: /* fqtod */
2573
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2574
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2575
                    gen_clear_float_exceptions();
2576
                    gen_helper_fqtod(cpu_env);
2577
                    gen_helper_check_ieee_exceptions(cpu_env);
2578
                    gen_op_store_DT0_fpr(DFPREG(rd));
2579
                    gen_update_fprs_dirty(DFPREG(rd));
2580
                    break;
2581
                case 0xcc: /* fitoq */
2582
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2583
                    gen_helper_fitoq(cpu_env, cpu_fpr[rs2]);
2584
                    gen_op_store_QT0_fpr(QFPREG(rd));
2585
                    gen_update_fprs_dirty(QFPREG(rd));
2586
                    break;
2587
                case 0xcd: /* fstoq */
2588
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2589
                    gen_helper_fstoq(cpu_env, cpu_fpr[rs2]);
2590
                    gen_op_store_QT0_fpr(QFPREG(rd));
2591
                    gen_update_fprs_dirty(QFPREG(rd));
2592
                    break;
2593
                case 0xce: /* fdtoq */
2594
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2595
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2596
                    gen_helper_fdtoq(cpu_env);
2597
                    gen_op_store_QT0_fpr(QFPREG(rd));
2598
                    gen_update_fprs_dirty(QFPREG(rd));
2599
                    break;
2600
                case 0xd1: /* fstoi */
2601
                    gen_clear_float_exceptions();
2602
                    gen_helper_fstoi(cpu_tmp32, cpu_env, cpu_fpr[rs2]);
2603
                    gen_helper_check_ieee_exceptions(cpu_env);
2604
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2605
                    gen_update_fprs_dirty(rd);
2606
                    break;
2607
                case 0xd2: /* fdtoi */
2608
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2609
                    gen_clear_float_exceptions();
2610
                    gen_helper_fdtoi(cpu_tmp32, cpu_env);
2611
                    gen_helper_check_ieee_exceptions(cpu_env);
2612
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2613
                    gen_update_fprs_dirty(rd);
2614
                    break;
2615
                case 0xd3: /* fqtoi */
2616
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2617
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2618
                    gen_clear_float_exceptions();
2619
                    gen_helper_fqtoi(cpu_tmp32, cpu_env);
2620
                    gen_helper_check_ieee_exceptions(cpu_env);
2621
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2622
                    gen_update_fprs_dirty(rd);
2623
                    break;
2624
#ifdef TARGET_SPARC64
2625
                case 0x2: /* V9 fmovd */
2626
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2627
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2628
                                    cpu_fpr[DFPREG(rs2) + 1]);
2629
                    gen_update_fprs_dirty(DFPREG(rd));
2630
                    break;
2631
                case 0x3: /* V9 fmovq */
2632
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2633
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2634
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2635
                                    cpu_fpr[QFPREG(rs2) + 1]);
2636
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2637
                                    cpu_fpr[QFPREG(rs2) + 2]);
2638
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2639
                                    cpu_fpr[QFPREG(rs2) + 3]);
2640
                    gen_update_fprs_dirty(QFPREG(rd));
2641
                    break;
2642
                case 0x6: /* V9 fnegd */
2643
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2644
                    gen_helper_fnegd(cpu_env);
2645
                    gen_op_store_DT0_fpr(DFPREG(rd));
2646
                    gen_update_fprs_dirty(DFPREG(rd));
2647
                    break;
2648
                case 0x7: /* V9 fnegq */
2649
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2650
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2651
                    gen_helper_fnegq(cpu_env);
2652
                    gen_op_store_QT0_fpr(QFPREG(rd));
2653
                    gen_update_fprs_dirty(QFPREG(rd));
2654
                    break;
2655
                case 0xa: /* V9 fabsd */
2656
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2657
                    gen_helper_fabsd(cpu_env);
2658
                    gen_op_store_DT0_fpr(DFPREG(rd));
2659
                    gen_update_fprs_dirty(DFPREG(rd));
2660
                    break;
2661
                case 0xb: /* V9 fabsq */
2662
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2663
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2664
                    gen_helper_fabsq(cpu_env);
2665
                    gen_op_store_QT0_fpr(QFPREG(rd));
2666
                    gen_update_fprs_dirty(QFPREG(rd));
2667
                    break;
2668
                case 0x81: /* V9 fstox */
2669
                    gen_clear_float_exceptions();
2670
                    gen_helper_fstox(cpu_env, cpu_fpr[rs2]);
2671
                    gen_helper_check_ieee_exceptions(cpu_env);
2672
                    gen_op_store_DT0_fpr(DFPREG(rd));
2673
                    gen_update_fprs_dirty(DFPREG(rd));
2674
                    break;
2675
                case 0x82: /* V9 fdtox */
2676
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2677
                    gen_clear_float_exceptions();
2678
                    gen_helper_fdtox(cpu_env);
2679
                    gen_helper_check_ieee_exceptions(cpu_env);
2680
                    gen_op_store_DT0_fpr(DFPREG(rd));
2681
                    gen_update_fprs_dirty(DFPREG(rd));
2682
                    break;
2683
                case 0x83: /* V9 fqtox */
2684
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2685
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2686
                    gen_clear_float_exceptions();
2687
                    gen_helper_fqtox(cpu_env);
2688
                    gen_helper_check_ieee_exceptions(cpu_env);
2689
                    gen_op_store_DT0_fpr(DFPREG(rd));
2690
                    gen_update_fprs_dirty(DFPREG(rd));
2691
                    break;
2692
                case 0x84: /* V9 fxtos */
2693
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2694
                    gen_clear_float_exceptions();
2695
                    gen_helper_fxtos(cpu_tmp32, cpu_env);
2696
                    gen_helper_check_ieee_exceptions(cpu_env);
2697
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2698
                    gen_update_fprs_dirty(rd);
2699
                    break;
2700
                case 0x88: /* V9 fxtod */
2701
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2702
                    gen_clear_float_exceptions();
2703
                    gen_helper_fxtod(cpu_env);
2704
                    gen_helper_check_ieee_exceptions(cpu_env);
2705
                    gen_op_store_DT0_fpr(DFPREG(rd));
2706
                    gen_update_fprs_dirty(DFPREG(rd));
2707
                    break;
2708
                case 0x8c: /* V9 fxtoq */
2709
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2710
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2711
                    gen_clear_float_exceptions();
2712
                    gen_helper_fxtoq(cpu_env);
2713
                    gen_helper_check_ieee_exceptions(cpu_env);
2714
                    gen_op_store_QT0_fpr(QFPREG(rd));
2715
                    gen_update_fprs_dirty(QFPREG(rd));
2716
                    break;
2717
#endif
2718
                default:
2719
                    goto illegal_insn;
2720
                }
2721
            } else if (xop == 0x35) {   /* FPU Operations */
2722
#ifdef TARGET_SPARC64
2723
                int cond;
2724
#endif
2725
                if (gen_trap_ifnofpu(dc, cpu_cond))
2726
                    goto jmp_insn;
2727
                gen_op_clear_ieee_excp_and_FTT();
2728
                rs1 = GET_FIELD(insn, 13, 17);
2729
                rs2 = GET_FIELD(insn, 27, 31);
2730
                xop = GET_FIELD(insn, 18, 26);
2731
                save_state(dc, cpu_cond);
2732
#ifdef TARGET_SPARC64
2733
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2734
                    int l1;
2735

    
2736
                    l1 = gen_new_label();
2737
                    cond = GET_FIELD_SP(insn, 14, 17);
2738
                    cpu_src1 = get_src1(insn, cpu_src1);
2739
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2740
                                       0, l1);
2741
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2742
                    gen_update_fprs_dirty(rd);
2743
                    gen_set_label(l1);
2744
                    break;
2745
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2746
                    int l1;
2747

    
2748
                    l1 = gen_new_label();
2749
                    cond = GET_FIELD_SP(insn, 14, 17);
2750
                    cpu_src1 = get_src1(insn, cpu_src1);
2751
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2752
                                       0, l1);
2753
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2754
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2755
                    gen_update_fprs_dirty(DFPREG(rd));
2756
                    gen_set_label(l1);
2757
                    break;
2758
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2759
                    int l1;
2760

    
2761
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2762
                    l1 = gen_new_label();
2763
                    cond = GET_FIELD_SP(insn, 14, 17);
2764
                    cpu_src1 = get_src1(insn, cpu_src1);
2765
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2766
                                       0, l1);
2767
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2768
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2769
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2770
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2771
                    gen_update_fprs_dirty(QFPREG(rd));
2772
                    gen_set_label(l1);
2773
                    break;
2774
                }
2775
#endif
2776
                switch (xop) {
2777
#ifdef TARGET_SPARC64
2778
#define FMOVSCC(fcc)                                                    \
2779
                    {                                                   \
2780
                        TCGv r_cond;                                    \
2781
                        int l1;                                         \
2782
                                                                        \
2783
                        l1 = gen_new_label();                           \
2784
                        r_cond = tcg_temp_new();                        \
2785
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2786
                        gen_fcond(r_cond, fcc, cond);                   \
2787
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2788
                                           0, l1);                      \
2789
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2790
                        gen_update_fprs_dirty(rd);                      \
2791
                        gen_set_label(l1);                              \
2792
                        tcg_temp_free(r_cond);                          \
2793
                    }
2794
#define FMOVDCC(fcc)                                                    \
2795
                    {                                                   \
2796
                        TCGv r_cond;                                    \
2797
                        int l1;                                         \
2798
                                                                        \
2799
                        l1 = gen_new_label();                           \
2800
                        r_cond = tcg_temp_new();                        \
2801
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2802
                        gen_fcond(r_cond, fcc, cond);                   \
2803
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2804
                                           0, l1);                      \
2805
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2806
                                        cpu_fpr[DFPREG(rs2)]);          \
2807
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2808
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2809
                        gen_update_fprs_dirty(DFPREG(rd));              \
2810
                        gen_set_label(l1);                              \
2811
                        tcg_temp_free(r_cond);                          \
2812
                    }
2813
#define FMOVQCC(fcc)                                                    \
2814
                    {                                                   \
2815
                        TCGv r_cond;                                    \
2816
                        int l1;                                         \
2817
                                                                        \
2818
                        l1 = gen_new_label();                           \
2819
                        r_cond = tcg_temp_new();                        \
2820
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2821
                        gen_fcond(r_cond, fcc, cond);                   \
2822
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2823
                                           0, l1);                      \
2824
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2825
                                        cpu_fpr[QFPREG(rs2)]);          \
2826
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2827
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2828
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2829
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2830
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2831
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2832
                        gen_update_fprs_dirty(QFPREG(rd));              \
2833
                        gen_set_label(l1);                              \
2834
                        tcg_temp_free(r_cond);                          \
2835
                    }
2836
                    case 0x001: /* V9 fmovscc %fcc0 */
2837
                        FMOVSCC(0);
2838
                        break;
2839
                    case 0x002: /* V9 fmovdcc %fcc0 */
2840
                        FMOVDCC(0);
2841
                        break;
2842
                    case 0x003: /* V9 fmovqcc %fcc0 */
2843
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2844
                        FMOVQCC(0);
2845
                        break;
2846
                    case 0x041: /* V9 fmovscc %fcc1 */
2847
                        FMOVSCC(1);
2848
                        break;
2849
                    case 0x042: /* V9 fmovdcc %fcc1 */
2850
                        FMOVDCC(1);
2851
                        break;
2852
                    case 0x043: /* V9 fmovqcc %fcc1 */
2853
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2854
                        FMOVQCC(1);
2855
                        break;
2856
                    case 0x081: /* V9 fmovscc %fcc2 */
2857
                        FMOVSCC(2);
2858
                        break;
2859
                    case 0x082: /* V9 fmovdcc %fcc2 */
2860
                        FMOVDCC(2);
2861
                        break;
2862
                    case 0x083: /* V9 fmovqcc %fcc2 */
2863
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2864
                        FMOVQCC(2);
2865
                        break;
2866
                    case 0x0c1: /* V9 fmovscc %fcc3 */
2867
                        FMOVSCC(3);
2868
                        break;
2869
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
2870
                        FMOVDCC(3);
2871
                        break;
2872
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
2873
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2874
                        FMOVQCC(3);
2875
                        break;
2876
#undef FMOVSCC
2877
#undef FMOVDCC
2878
#undef FMOVQCC
2879
#define FMOVSCC(icc)                                                    \
2880
                    {                                                   \
2881
                        TCGv r_cond;                                    \
2882
                        int l1;                                         \
2883
                                                                        \
2884
                        l1 = gen_new_label();                           \
2885
                        r_cond = tcg_temp_new();                        \
2886
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2887
                        gen_cond(r_cond, icc, cond, dc);                \
2888
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2889
                                           0, l1);                      \
2890
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2891
                        gen_update_fprs_dirty(rd);                      \
2892
                        gen_set_label(l1);                              \
2893
                        tcg_temp_free(r_cond);                          \
2894
                    }
2895
#define FMOVDCC(icc)                                                    \
2896
                    {                                                   \
2897
                        TCGv r_cond;                                    \
2898
                        int l1;                                         \
2899
                                                                        \
2900
                        l1 = gen_new_label();                           \
2901
                        r_cond = tcg_temp_new();                        \
2902
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2903
                        gen_cond(r_cond, icc, cond, dc);                \
2904
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2905
                                           0, l1);                      \
2906
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2907
                                        cpu_fpr[DFPREG(rs2)]);          \
2908
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2909
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2910
                        gen_update_fprs_dirty(DFPREG(rd));              \
2911
                        gen_set_label(l1);                              \
2912
                        tcg_temp_free(r_cond);                          \
2913
                    }
2914
#define FMOVQCC(icc)                                                    \
2915
                    {                                                   \
2916
                        TCGv r_cond;                                    \
2917
                        int l1;                                         \
2918
                                                                        \
2919
                        l1 = gen_new_label();                           \
2920
                        r_cond = tcg_temp_new();                        \
2921
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2922
                        gen_cond(r_cond, icc, cond, dc);                \
2923
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2924
                                           0, l1);                      \
2925
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2926
                                        cpu_fpr[QFPREG(rs2)]);          \
2927
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2928
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2929
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2930
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2931
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2932
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2933
                        gen_update_fprs_dirty(QFPREG(rd));              \
2934
                        gen_set_label(l1);                              \
2935
                        tcg_temp_free(r_cond);                          \
2936
                    }
2937

    
2938
                    case 0x101: /* V9 fmovscc %icc */
2939
                        FMOVSCC(0);
2940
                        break;
2941
                    case 0x102: /* V9 fmovdcc %icc */
2942
                        FMOVDCC(0);
2943
                        break;
2944
                    case 0x103: /* V9 fmovqcc %icc */
2945
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2946
                        FMOVQCC(0);
2947
                        break;
2948
                    case 0x181: /* V9 fmovscc %xcc */
2949
                        FMOVSCC(1);
2950
                        break;
2951
                    case 0x182: /* V9 fmovdcc %xcc */
2952
                        FMOVDCC(1);
2953
                        break;
2954
                    case 0x183: /* V9 fmovqcc %xcc */
2955
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2956
                        FMOVQCC(1);
2957
                        break;
2958
#undef FMOVSCC
2959
#undef FMOVDCC
2960
#undef FMOVQCC
2961
#endif
2962
                    case 0x51: /* fcmps, V9 %fcc */
2963
                        gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2964
                        break;
2965
                    case 0x52: /* fcmpd, V9 %fcc */
2966
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2967
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2968
                        gen_op_fcmpd(rd & 3);
2969
                        break;
2970
                    case 0x53: /* fcmpq, V9 %fcc */
2971
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2972
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2973
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2974
                        gen_op_fcmpq(rd & 3);
2975
                        break;
2976
                    case 0x55: /* fcmpes, V9 %fcc */
2977
                        gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2978
                        break;
2979
                    case 0x56: /* fcmped, V9 %fcc */
2980
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2981
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2982
                        gen_op_fcmped(rd & 3);
2983
                        break;
2984
                    case 0x57: /* fcmpeq, V9 %fcc */
2985
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2986
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2987
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2988
                        gen_op_fcmpeq(rd & 3);
2989
                        break;
2990
                    default:
2991
                        goto illegal_insn;
2992
                }
2993
            } else if (xop == 0x2) {
2994
                // clr/mov shortcut
2995

    
2996
                rs1 = GET_FIELD(insn, 13, 17);
2997
                if (rs1 == 0) {
2998
                    // or %g0, x, y -> mov T0, x; mov y, T0
2999
                    if (IS_IMM) {       /* immediate */
3000
                        TCGv r_const;
3001

    
3002
                        simm = GET_FIELDs(insn, 19, 31);
3003
                        r_const = tcg_const_tl(simm);
3004
                        gen_movl_TN_reg(rd, r_const);
3005
                        tcg_temp_free(r_const);
3006
                    } else {            /* register */
3007
                        rs2 = GET_FIELD(insn, 27, 31);
3008
                        gen_movl_reg_TN(rs2, cpu_dst);
3009
                        gen_movl_TN_reg(rd, cpu_dst);
3010
                    }
3011
                } else {
3012
                    cpu_src1 = get_src1(insn, cpu_src1);
3013
                    if (IS_IMM) {       /* immediate */
3014
                        simm = GET_FIELDs(insn, 19, 31);
3015
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3016
                        gen_movl_TN_reg(rd, cpu_dst);
3017
                    } else {            /* register */
3018
                        // or x, %g0, y -> mov T1, x; mov y, T1
3019
                        rs2 = GET_FIELD(insn, 27, 31);
3020
                        if (rs2 != 0) {
3021
                            gen_movl_reg_TN(rs2, cpu_src2);
3022
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3023
                            gen_movl_TN_reg(rd, cpu_dst);
3024
                        } else
3025
                            gen_movl_TN_reg(rd, cpu_src1);
3026
                    }
3027
                }
3028
#ifdef TARGET_SPARC64
3029
            } else if (xop == 0x25) { /* sll, V9 sllx */
3030
                cpu_src1 = get_src1(insn, cpu_src1);
3031
                if (IS_IMM) {   /* immediate */
3032
                    simm = GET_FIELDs(insn, 20, 31);
3033
                    if (insn & (1 << 12)) {
3034
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3035
                    } else {
3036
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3037
                    }
3038
                } else {                /* register */
3039
                    rs2 = GET_FIELD(insn, 27, 31);
3040
                    gen_movl_reg_TN(rs2, cpu_src2);
3041
                    if (insn & (1 << 12)) {
3042
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3043
                    } else {
3044
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3045
                    }
3046
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3047
                }
3048
                gen_movl_TN_reg(rd, cpu_dst);
3049
            } else if (xop == 0x26) { /* srl, V9 srlx */
3050
                cpu_src1 = get_src1(insn, cpu_src1);
3051
                if (IS_IMM) {   /* immediate */
3052
                    simm = GET_FIELDs(insn, 20, 31);
3053
                    if (insn & (1 << 12)) {
3054
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3055
                    } else {
3056
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3057
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3058
                    }
3059
                } else {                /* register */
3060
                    rs2 = GET_FIELD(insn, 27, 31);
3061
                    gen_movl_reg_TN(rs2, cpu_src2);
3062
                    if (insn & (1 << 12)) {
3063
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3064
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3065
                    } else {
3066
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3067
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3068
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3069
                    }
3070
                }
3071
                gen_movl_TN_reg(rd, cpu_dst);
3072
            } else if (xop == 0x27) { /* sra, V9 srax */
3073
                cpu_src1 = get_src1(insn, cpu_src1);
3074
                if (IS_IMM) {   /* immediate */
3075
                    simm = GET_FIELDs(insn, 20, 31);
3076
                    if (insn & (1 << 12)) {
3077
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3078
                    } else {
3079
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3080
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3081
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3082
                    }
3083
                } else {                /* register */
3084
                    rs2 = GET_FIELD(insn, 27, 31);
3085
                    gen_movl_reg_TN(rs2, cpu_src2);
3086
                    if (insn & (1 << 12)) {
3087
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3088
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3089
                    } else {
3090
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3091
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3092
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3093
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3094
                    }
3095
                }
3096
                gen_movl_TN_reg(rd, cpu_dst);
3097
#endif
3098
            } else if (xop < 0x36) {
3099
                if (xop < 0x20) {
3100
                    cpu_src1 = get_src1(insn, cpu_src1);
3101
                    cpu_src2 = get_src2(insn, cpu_src2);
3102
                    switch (xop & ~0x10) {
3103
                    case 0x0: /* add */
3104
                        if (IS_IMM) {
3105
                            simm = GET_FIELDs(insn, 19, 31);
3106
                            if (xop & 0x10) {
3107
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3108
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3109
                                dc->cc_op = CC_OP_ADD;
3110
                            } else {
3111
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3112
                            }
3113
                        } else {
3114
                            if (xop & 0x10) {
3115
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3116
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3117
                                dc->cc_op = CC_OP_ADD;
3118
                            } else {
3119
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3120
                            }
3121
                        }
3122
                        break;
3123
                    case 0x1: /* and */
3124
                        if (IS_IMM) {
3125
                            simm = GET_FIELDs(insn, 19, 31);
3126
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3127
                        } else {
3128
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3129
                        }
3130
                        if (xop & 0x10) {
3131
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3132
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3133
                            dc->cc_op = CC_OP_LOGIC;
3134
                        }
3135
                        break;
3136
                    case 0x2: /* or */
3137
                        if (IS_IMM) {
3138
                            simm = GET_FIELDs(insn, 19, 31);
3139
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3140
                        } else {
3141
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3142
                        }
3143
                        if (xop & 0x10) {
3144
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3145
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3146
                            dc->cc_op = CC_OP_LOGIC;
3147
                        }
3148
                        break;
3149
                    case 0x3: /* xor */
3150
                        if (IS_IMM) {
3151
                            simm = GET_FIELDs(insn, 19, 31);
3152
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3153
                        } else {
3154
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3155
                        }
3156
                        if (xop & 0x10) {
3157
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3158
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3159
                            dc->cc_op = CC_OP_LOGIC;
3160
                        }
3161
                        break;
3162
                    case 0x4: /* sub */
3163
                        if (IS_IMM) {
3164
                            simm = GET_FIELDs(insn, 19, 31);
3165
                            if (xop & 0x10) {
3166
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3167
                            } else {
3168
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3169
                            }
3170
                        } else {
3171
                            if (xop & 0x10) {
3172
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3173
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3174
                                dc->cc_op = CC_OP_SUB;
3175
                            } else {
3176
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3177
                            }
3178
                        }
3179
                        break;
3180
                    case 0x5: /* andn */
3181
                        if (IS_IMM) {
3182
                            simm = GET_FIELDs(insn, 19, 31);
3183
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3184
                        } else {
3185
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3186
                        }
3187
                        if (xop & 0x10) {
3188
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3189
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3190
                            dc->cc_op = CC_OP_LOGIC;
3191
                        }
3192
                        break;
3193
                    case 0x6: /* orn */
3194
                        if (IS_IMM) {
3195
                            simm = GET_FIELDs(insn, 19, 31);
3196
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3197
                        } else {
3198
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3199
                        }
3200
                        if (xop & 0x10) {
3201
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3202
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3203
                            dc->cc_op = CC_OP_LOGIC;
3204
                        }
3205
                        break;
3206
                    case 0x7: /* xorn */
3207
                        if (IS_IMM) {
3208
                            simm = GET_FIELDs(insn, 19, 31);
3209
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3210
                        } else {
3211
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3212
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3213
                        }
3214
                        if (xop & 0x10) {
3215
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3216
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3217
                            dc->cc_op = CC_OP_LOGIC;
3218
                        }
3219
                        break;
3220
                    case 0x8: /* addx, V9 addc */
3221
                        gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3222
                                        (xop & 0x10));
3223
                        break;
3224
#ifdef TARGET_SPARC64
3225
                    case 0x9: /* V9 mulx */
3226
                        if (IS_IMM) {
3227
                            simm = GET_FIELDs(insn, 19, 31);
3228
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3229
                        } else {
3230
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3231
                        }
3232
                        break;
3233
#endif
3234
                    case 0xa: /* umul */
3235
                        CHECK_IU_FEATURE(dc, MUL);
3236
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3237
                        if (xop & 0x10) {
3238
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3239
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3240
                            dc->cc_op = CC_OP_LOGIC;
3241
                        }
3242
                        break;
3243
                    case 0xb: /* smul */
3244
                        CHECK_IU_FEATURE(dc, MUL);
3245
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3246
                        if (xop & 0x10) {
3247
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3248
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3249
                            dc->cc_op = CC_OP_LOGIC;
3250
                        }
3251
                        break;
3252
                    case 0xc: /* subx, V9 subc */
3253
                        gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3254
                                        (xop & 0x10));
3255
                        break;
3256
#ifdef TARGET_SPARC64
3257
                    case 0xd: /* V9 udivx */
3258
                        {
3259
                            TCGv r_temp1, r_temp2;
3260
                            r_temp1 = tcg_temp_local_new();
3261
                            r_temp2 = tcg_temp_local_new();
3262
                            tcg_gen_mov_tl(r_temp1, cpu_src1);
3263
                            tcg_gen_mov_tl(r_temp2, cpu_src2);
3264
                            gen_trap_ifdivzero_tl(r_temp2);
3265
                            tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3266
                            tcg_temp_free(r_temp1);
3267
                            tcg_temp_free(r_temp2);
3268
                        }
3269
                        break;
3270
#endif
3271
                    case 0xe: /* udiv */
3272
                        CHECK_IU_FEATURE(dc, DIV);
3273
                        if (xop & 0x10) {
3274
                            gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3275
                                               cpu_src2);
3276
                            dc->cc_op = CC_OP_DIV;
3277
                        } else {
3278
                            gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3279
                                            cpu_src2);
3280
                        }
3281
                        break;
3282
                    case 0xf: /* sdiv */
3283
                        CHECK_IU_FEATURE(dc, DIV);
3284
                        if (xop & 0x10) {
3285
                            gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3286
                                               cpu_src2);
3287
                            dc->cc_op = CC_OP_DIV;
3288
                        } else {
3289
                            gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3290
                                            cpu_src2);
3291
                        }
3292
                        break;
3293
                    default:
3294
                        goto illegal_insn;
3295
                    }
3296
                    gen_movl_TN_reg(rd, cpu_dst);
3297
                } else {
3298
                    cpu_src1 = get_src1(insn, cpu_src1);
3299
                    cpu_src2 = get_src2(insn, cpu_src2);
3300
                    switch (xop) {
3301
                    case 0x20: /* taddcc */
3302
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3303
                        gen_movl_TN_reg(rd, cpu_dst);
3304
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3305
                        dc->cc_op = CC_OP_TADD;
3306
                        break;
3307
                    case 0x21: /* tsubcc */
3308
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3309
                        gen_movl_TN_reg(rd, cpu_dst);
3310
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3311
                        dc->cc_op = CC_OP_TSUB;
3312
                        break;
3313
                    case 0x22: /* taddcctv */
3314
                        save_state(dc, cpu_cond);
3315
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3316
                        gen_movl_TN_reg(rd, cpu_dst);
3317
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3318
                        dc->cc_op = CC_OP_TADDTV;
3319
                        break;
3320
                    case 0x23: /* tsubcctv */
3321
                        save_state(dc, cpu_cond);
3322
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3323
                        gen_movl_TN_reg(rd, cpu_dst);
3324
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3325
                        dc->cc_op = CC_OP_TSUBTV;
3326
                        break;
3327
                    case 0x24: /* mulscc */
3328
                        gen_helper_compute_psr(cpu_env);
3329
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3330
                        gen_movl_TN_reg(rd, cpu_dst);
3331
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3332
                        dc->cc_op = CC_OP_ADD;
3333
                        break;
3334
#ifndef TARGET_SPARC64
3335
                    case 0x25:  /* sll */
3336
                        if (IS_IMM) { /* immediate */
3337
                            simm = GET_FIELDs(insn, 20, 31);
3338
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3339
                        } else { /* register */
3340
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3341
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3342
                        }
3343
                        gen_movl_TN_reg(rd, cpu_dst);
3344
                        break;
3345
                    case 0x26:  /* srl */
3346
                        if (IS_IMM) { /* immediate */
3347
                            simm = GET_FIELDs(insn, 20, 31);
3348
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3349
                        } else { /* register */
3350
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3351
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3352
                        }
3353
                        gen_movl_TN_reg(rd, cpu_dst);
3354
                        break;
3355
                    case 0x27:  /* sra */
3356
                        if (IS_IMM) { /* immediate */
3357
                            simm = GET_FIELDs(insn, 20, 31);
3358
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3359
                        } else { /* register */
3360
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3361
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3362
                        }
3363
                        gen_movl_TN_reg(rd, cpu_dst);
3364
                        break;
3365
#endif
3366
                    case 0x30:
3367
                        {
3368
                            switch(rd) {
3369
                            case 0: /* wry */
3370
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3371
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3372
                                break;
3373
#ifndef TARGET_SPARC64
3374
                            case 0x01 ... 0x0f: /* undefined in the
3375
                                                   SPARCv8 manual, nop
3376
                                                   on the microSPARC
3377
                                                   II */
3378
                            case 0x10 ... 0x1f: /* implementation-dependent
3379
                                                   in the SPARCv8
3380
                                                   manual, nop on the
3381
                                                   microSPARC II */
3382
                                break;
3383
#else
3384
                            case 0x2: /* V9 wrccr */
3385
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3386
                                gen_helper_wrccr(cpu_env, cpu_dst);
3387
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3388
                                dc->cc_op = CC_OP_FLAGS;
3389
                                break;
3390
                            case 0x3: /* V9 wrasi */
3391
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3392
                                tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3393
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3394
                                break;
3395
                            case 0x6: /* V9 wrfprs */
3396
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3397
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3398
                                save_state(dc, cpu_cond);
3399
                                gen_op_next_insn();
3400
                                tcg_gen_exit_tb(0);
3401
                                dc->is_br = 1;
3402
                                break;
3403
                            case 0xf: /* V9 sir, nop if user */
3404
#if !defined(CONFIG_USER_ONLY)
3405
                                if (supervisor(dc)) {
3406
                                    ; // XXX
3407
                                }
3408
#endif
3409
                                break;
3410
                            case 0x13: /* Graphics Status */
3411
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3412
                                    goto jmp_insn;
3413
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3414
                                break;
3415
                            case 0x14: /* Softint set */
3416
                                if (!supervisor(dc))
3417
                                    goto illegal_insn;
3418
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3419
                                gen_helper_set_softint(cpu_env, cpu_tmp64);
3420
                                break;
3421
                            case 0x15: /* Softint clear */
3422
                                if (!supervisor(dc))
3423
                                    goto illegal_insn;
3424
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3425
                                gen_helper_clear_softint(cpu_env, cpu_tmp64);
3426
                                break;
3427
                            case 0x16: /* Softint write */
3428
                                if (!supervisor(dc))
3429
                                    goto illegal_insn;
3430
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3431
                                gen_helper_write_softint(cpu_env, cpu_tmp64);
3432
                                break;
3433
                            case 0x17: /* Tick compare */
3434
#if !defined(CONFIG_USER_ONLY)
3435
                                if (!supervisor(dc))
3436
                                    goto illegal_insn;
3437
#endif
3438
                                {
3439
                                    TCGv_ptr r_tickptr;
3440

    
3441
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3442
                                                   cpu_src2);
3443
                                    r_tickptr = tcg_temp_new_ptr();
3444
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3445
                                                   offsetof(CPUState, tick));
3446
                                    gen_helper_tick_set_limit(r_tickptr,
3447
                                                              cpu_tick_cmpr);
3448
                                    tcg_temp_free_ptr(r_tickptr);
3449
                                }
3450
                                break;
3451
                            case 0x18: /* System tick */
3452
#if !defined(CONFIG_USER_ONLY)
3453
                                if (!supervisor(dc))
3454
                                    goto illegal_insn;
3455
#endif
3456
                                {
3457
                                    TCGv_ptr r_tickptr;
3458

    
3459
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3460
                                                   cpu_src2);
3461
                                    r_tickptr = tcg_temp_new_ptr();
3462
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3463
                                                   offsetof(CPUState, stick));
3464
                                    gen_helper_tick_set_count(r_tickptr,
3465
                                                              cpu_dst);
3466
                                    tcg_temp_free_ptr(r_tickptr);
3467
                                }
3468
                                break;
3469
                            case 0x19: /* System tick compare */
3470
#if !defined(CONFIG_USER_ONLY)
3471
                                if (!supervisor(dc))
3472
                                    goto illegal_insn;
3473
#endif
3474
                                {
3475
                                    TCGv_ptr r_tickptr;
3476

    
3477
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3478
                                                   cpu_src2);
3479
                                    r_tickptr = tcg_temp_new_ptr();
3480
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3481
                                                   offsetof(CPUState, stick));
3482
                                    gen_helper_tick_set_limit(r_tickptr,
3483
                                                              cpu_stick_cmpr);
3484
                                    tcg_temp_free_ptr(r_tickptr);
3485
                                }
3486
                                break;
3487

    
3488
                            case 0x10: /* Performance Control */
3489
                            case 0x11: /* Performance Instrumentation
3490
                                          Counter */
3491
                            case 0x12: /* Dispatch Control */
3492
#endif
3493
                            default:
3494
                                goto illegal_insn;
3495
                            }
3496
                        }
3497
                        break;
3498
#if !defined(CONFIG_USER_ONLY)
3499
                    case 0x31: /* wrpsr, V9 saved, restored */
3500
                        {
3501
                            if (!supervisor(dc))
3502
                                goto priv_insn;
3503
#ifdef TARGET_SPARC64
3504
                            switch (rd) {
3505
                            case 0:
3506
                                gen_helper_saved(cpu_env);
3507
                                break;
3508
                            case 1:
3509
                                gen_helper_restored(cpu_env);
3510
                                break;
3511
                            case 2: /* UA2005 allclean */
3512
                            case 3: /* UA2005 otherw */
3513
                            case 4: /* UA2005 normalw */
3514
                            case 5: /* UA2005 invalw */
3515
                                // XXX
3516
                            default:
3517
                                goto illegal_insn;
3518
                            }
3519
#else
3520
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3521
                            gen_helper_wrpsr(cpu_env, cpu_dst);
3522
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3523
                            dc->cc_op = CC_OP_FLAGS;
3524
                            save_state(dc, cpu_cond);
3525
                            gen_op_next_insn();
3526
                            tcg_gen_exit_tb(0);
3527
                            dc->is_br = 1;
3528
#endif
3529
                        }
3530
                        break;
3531
                    case 0x32: /* wrwim, V9 wrpr */
3532
                        {
3533
                            if (!supervisor(dc))
3534
                                goto priv_insn;
3535
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3536
#ifdef TARGET_SPARC64
3537
                            switch (rd) {
3538
                            case 0: // tpc
3539
                                {
3540
                                    TCGv_ptr r_tsptr;
3541

    
3542
                                    r_tsptr = tcg_temp_new_ptr();
3543
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3544
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3545
                                                  offsetof(trap_state, tpc));
3546
                                    tcg_temp_free_ptr(r_tsptr);
3547
                                }
3548
                                break;
3549
                            case 1: // tnpc
3550
                                {
3551
                                    TCGv_ptr r_tsptr;
3552

    
3553
                                    r_tsptr = tcg_temp_new_ptr();
3554
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3555
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3556
                                                  offsetof(trap_state, tnpc));
3557
                                    tcg_temp_free_ptr(r_tsptr);
3558
                                }
3559
                                break;
3560
                            case 2: // tstate
3561
                                {
3562
                                    TCGv_ptr r_tsptr;
3563

    
3564
                                    r_tsptr = tcg_temp_new_ptr();
3565
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3566
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3567
                                                  offsetof(trap_state,
3568
                                                           tstate));
3569
                                    tcg_temp_free_ptr(r_tsptr);
3570
                                }
3571
                                break;
3572
                            case 3: // tt
3573
                                {
3574
                                    TCGv_ptr r_tsptr;
3575

    
3576
                                    r_tsptr = tcg_temp_new_ptr();
3577
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3578
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3579
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3580
                                                   offsetof(trap_state, tt));
3581
                                    tcg_temp_free_ptr(r_tsptr);
3582
                                }
3583
                                break;
3584
                            case 4: // tick
3585
                                {
3586
                                    TCGv_ptr r_tickptr;
3587

    
3588
                                    r_tickptr = tcg_temp_new_ptr();
3589
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3590
                                                   offsetof(CPUState, tick));
3591
                                    gen_helper_tick_set_count(r_tickptr,
3592
                                                              cpu_tmp0);
3593
                                    tcg_temp_free_ptr(r_tickptr);
3594
                                }
3595
                                break;
3596
                            case 5: // tba
3597
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3598
                                break;
3599
                            case 6: // pstate
3600
                                {
3601
                                    TCGv r_tmp = tcg_temp_local_new();
3602

    
3603
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3604
                                    save_state(dc, cpu_cond);
3605
                                    gen_helper_wrpstate(cpu_env, r_tmp);
3606
                                    tcg_temp_free(r_tmp);
3607
                                    dc->npc = DYNAMIC_PC;
3608
                                }
3609
                                break;
3610
                            case 7: // tl
3611
                                {
3612
                                    TCGv r_tmp = tcg_temp_local_new();
3613

    
3614
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3615
                                    save_state(dc, cpu_cond);
3616
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3617
                                    tcg_temp_free(r_tmp);
3618
                                    tcg_gen_st_i32(cpu_tmp32, cpu_env,
3619
                                                   offsetof(CPUSPARCState, tl));
3620
                                    dc->npc = DYNAMIC_PC;
3621
                                }
3622
                                break;
3623
                            case 8: // pil
3624
                                gen_helper_wrpil(cpu_env, cpu_tmp0);
3625
                                break;
3626
                            case 9: // cwp
3627
                                gen_helper_wrcwp(cpu_env, cpu_tmp0);
3628
                                break;
3629
                            case 10: // cansave
3630
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3631
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3632
                                               offsetof(CPUSPARCState,
3633
                                                        cansave));
3634
                                break;
3635
                            case 11: // canrestore
3636
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3637
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3638
                                               offsetof(CPUSPARCState,
3639
                                                        canrestore));
3640
                                break;
3641
                            case 12: // cleanwin
3642
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3643
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3644
                                               offsetof(CPUSPARCState,
3645
                                                        cleanwin));
3646
                                break;
3647
                            case 13: // otherwin
3648
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3649
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3650
                                               offsetof(CPUSPARCState,
3651
                                                        otherwin));
3652
                                break;
3653
                            case 14: // wstate
3654
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3655
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3656
                                               offsetof(CPUSPARCState,
3657
                                                        wstate));
3658
                                break;
3659
                            case 16: // UA2005 gl
3660
                                CHECK_IU_FEATURE(dc, GL);
3661
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3662
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3663
                                               offsetof(CPUSPARCState, gl));
3664
                                break;
3665
                            case 26: // UA2005 strand status
3666
                                CHECK_IU_FEATURE(dc, HYPV);
3667
                                if (!hypervisor(dc))
3668
                                    goto priv_insn;
3669
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3670
                                break;
3671
                            default:
3672
                                goto illegal_insn;
3673
                            }
3674
#else
3675
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3676
                            if (dc->def->nwindows != 32)
3677
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3678
                                                (1 << dc->def->nwindows) - 1);
3679
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3680
#endif
3681
                        }
3682
                        break;
3683
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3684
                        {
3685
#ifndef TARGET_SPARC64
3686
                            if (!supervisor(dc))
3687
                                goto priv_insn;
3688
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3689
#else
3690
                            CHECK_IU_FEATURE(dc, HYPV);
3691
                            if (!hypervisor(dc))
3692
                                goto priv_insn;
3693
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3694
                            switch (rd) {
3695
                            case 0: // hpstate
3696
                                // XXX gen_op_wrhpstate();
3697
                                save_state(dc, cpu_cond);
3698
                                gen_op_next_insn();
3699
                                tcg_gen_exit_tb(0);
3700
                                dc->is_br = 1;
3701
                                break;
3702
                            case 1: // htstate
3703
                                // XXX gen_op_wrhtstate();
3704
                                break;
3705
                            case 3: // hintp
3706
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3707
                                break;
3708
                            case 5: // htba
3709
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3710
                                break;
3711
                            case 31: // hstick_cmpr
3712
                                {
3713
                                    TCGv_ptr r_tickptr;
3714

    
3715
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3716
                                    r_tickptr = tcg_temp_new_ptr();
3717
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3718
                                                   offsetof(CPUState, hstick));
3719
                                    gen_helper_tick_set_limit(r_tickptr,
3720
                                                              cpu_hstick_cmpr);
3721
                                    tcg_temp_free_ptr(r_tickptr);
3722
                                }
3723
                                break;
3724
                            case 6: // hver readonly
3725
                            default:
3726
                                goto illegal_insn;
3727
                            }
3728
#endif
3729
                        }
3730
                        break;
3731
#endif
3732
#ifdef TARGET_SPARC64
3733
                    case 0x2c: /* V9 movcc */
3734
                        {
3735
                            int cc = GET_FIELD_SP(insn, 11, 12);
3736
                            int cond = GET_FIELD_SP(insn, 14, 17);
3737
                            TCGv r_cond;
3738
                            int l1;
3739

    
3740
                            r_cond = tcg_temp_new();
3741
                            if (insn & (1 << 18)) {
3742
                                if (cc == 0)
3743
                                    gen_cond(r_cond, 0, cond, dc);
3744
                                else if (cc == 2)
3745
                                    gen_cond(r_cond, 1, cond, dc);
3746
                                else
3747
                                    goto illegal_insn;
3748
                            } else {
3749
                                gen_fcond(r_cond, cc, cond);
3750
                            }
3751

    
3752
                            l1 = gen_new_label();
3753

    
3754
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3755
                            if (IS_IMM) {       /* immediate */
3756
                                TCGv r_const;
3757

    
3758
                                simm = GET_FIELD_SPs(insn, 0, 10);
3759
                                r_const = tcg_const_tl(simm);
3760
                                gen_movl_TN_reg(rd, r_const);
3761
                                tcg_temp_free(r_const);
3762
                            } else {
3763
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3764
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3765
                                gen_movl_TN_reg(rd, cpu_tmp0);
3766
                            }
3767
                            gen_set_label(l1);
3768
                            tcg_temp_free(r_cond);
3769
                            break;
3770
                        }
3771
                    case 0x2d: /* V9 sdivx */
3772
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3773
                        gen_movl_TN_reg(rd, cpu_dst);
3774
                        break;
3775
                    case 0x2e: /* V9 popc */
3776
                        {
3777
                            cpu_src2 = get_src2(insn, cpu_src2);
3778
                            gen_helper_popc(cpu_dst, cpu_src2);
3779
                            gen_movl_TN_reg(rd, cpu_dst);
3780
                        }
3781
                    case 0x2f: /* V9 movr */
3782
                        {
3783
                            int cond = GET_FIELD_SP(insn, 10, 12);
3784
                            int l1;
3785

    
3786
                            cpu_src1 = get_src1(insn, cpu_src1);
3787

    
3788
                            l1 = gen_new_label();
3789

    
3790
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3791
                                              cpu_src1, 0, l1);
3792
                            if (IS_IMM) {       /* immediate */
3793
                                TCGv r_const;
3794

    
3795
                                simm = GET_FIELD_SPs(insn, 0, 9);
3796
                                r_const = tcg_const_tl(simm);
3797
                                gen_movl_TN_reg(rd, r_const);
3798
                                tcg_temp_free(r_const);
3799
                            } else {
3800
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3801
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3802
                                gen_movl_TN_reg(rd, cpu_tmp0);
3803
                            }
3804
                            gen_set_label(l1);
3805
                            break;
3806
                        }
3807
#endif
3808
                    default:
3809
                        goto illegal_insn;
3810
                    }
3811
                }
3812
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3813
#ifdef TARGET_SPARC64
3814
                int opf = GET_FIELD_SP(insn, 5, 13);
3815
                rs1 = GET_FIELD(insn, 13, 17);
3816
                rs2 = GET_FIELD(insn, 27, 31);
3817
                if (gen_trap_ifnofpu(dc, cpu_cond))
3818
                    goto jmp_insn;
3819

    
3820
                switch (opf) {
3821
                case 0x000: /* VIS I edge8cc */
3822
                case 0x001: /* VIS II edge8n */
3823
                case 0x002: /* VIS I edge8lcc */
3824
                case 0x003: /* VIS II edge8ln */
3825
                case 0x004: /* VIS I edge16cc */
3826
                case 0x005: /* VIS II edge16n */
3827
                case 0x006: /* VIS I edge16lcc */
3828
                case 0x007: /* VIS II edge16ln */
3829
                case 0x008: /* VIS I edge32cc */
3830
                case 0x009: /* VIS II edge32n */
3831
                case 0x00a: /* VIS I edge32lcc */
3832
                case 0x00b: /* VIS II edge32ln */
3833
                    // XXX
3834
                    goto illegal_insn;
3835
                case 0x010: /* VIS I array8 */
3836
                    CHECK_FPU_FEATURE(dc, VIS1);
3837
                    cpu_src1 = get_src1(insn, cpu_src1);
3838
                    gen_movl_reg_TN(rs2, cpu_src2);
3839
                    gen_helper_array8(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3840
                    gen_movl_TN_reg(rd, cpu_dst);
3841
                    break;
3842
                case 0x012: /* VIS I array16 */
3843
                    CHECK_FPU_FEATURE(dc, VIS1);
3844
                    cpu_src1 = get_src1(insn, cpu_src1);
3845
                    gen_movl_reg_TN(rs2, cpu_src2);
3846
                    gen_helper_array8(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3847
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3848
                    gen_movl_TN_reg(rd, cpu_dst);
3849
                    break;
3850
                case 0x014: /* VIS I array32 */
3851
                    CHECK_FPU_FEATURE(dc, VIS1);
3852
                    cpu_src1 = get_src1(insn, cpu_src1);
3853
                    gen_movl_reg_TN(rs2, cpu_src2);
3854
                    gen_helper_array8(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3855
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3856
                    gen_movl_TN_reg(rd, cpu_dst);
3857
                    break;
3858
                case 0x018: /* VIS I alignaddr */
3859
                    CHECK_FPU_FEATURE(dc, VIS1);
3860
                    cpu_src1 = get_src1(insn, cpu_src1);
3861
                    gen_movl_reg_TN(rs2, cpu_src2);
3862
                    gen_helper_alignaddr(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3863
                    gen_movl_TN_reg(rd, cpu_dst);
3864
                    break;
3865
                case 0x019: /* VIS II bmask */
3866
                case 0x01a: /* VIS I alignaddrl */
3867
                    // XXX
3868
                    goto illegal_insn;
3869
                case 0x020: /* VIS I fcmple16 */
3870
                    CHECK_FPU_FEATURE(dc, VIS1);
3871
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3872
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3873
                    gen_helper_fcmple16(cpu_dst, cpu_env);
3874
                    gen_movl_TN_reg(rd, cpu_dst);
3875
                    break;
3876
                case 0x022: /* VIS I fcmpne16 */
3877
                    CHECK_FPU_FEATURE(dc, VIS1);
3878
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3879
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3880
                    gen_helper_fcmpne16(cpu_dst, cpu_env);
3881
                    gen_movl_TN_reg(rd, cpu_dst);
3882
                    break;
3883
                case 0x024: /* VIS I fcmple32 */
3884
                    CHECK_FPU_FEATURE(dc, VIS1);
3885
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3886
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3887
                    gen_helper_fcmple32(cpu_dst, cpu_env);
3888
                    gen_movl_TN_reg(rd, cpu_dst);
3889
                    break;
3890
                case 0x026: /* VIS I fcmpne32 */
3891
                    CHECK_FPU_FEATURE(dc, VIS1);
3892
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3893
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3894
                    gen_helper_fcmpne32(cpu_dst, cpu_env);
3895
                    gen_movl_TN_reg(rd, cpu_dst);
3896
                    break;
3897
                case 0x028: /* VIS I fcmpgt16 */
3898
                    CHECK_FPU_FEATURE(dc, VIS1);
3899
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3900
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3901
                    gen_helper_fcmpgt16(cpu_dst, cpu_env);
3902
                    gen_movl_TN_reg(rd, cpu_dst);
3903
                    break;
3904
                case 0x02a: /* VIS I fcmpeq16 */
3905
                    CHECK_FPU_FEATURE(dc, VIS1);
3906
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3907
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3908
                    gen_helper_fcmpeq16(cpu_dst, cpu_env);
3909
                    gen_movl_TN_reg(rd, cpu_dst);
3910
                    break;
3911
                case 0x02c: /* VIS I fcmpgt32 */
3912
                    CHECK_FPU_FEATURE(dc, VIS1);
3913
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3914
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3915
                    gen_helper_fcmpgt32(cpu_dst, cpu_env);
3916
                    gen_movl_TN_reg(rd, cpu_dst);
3917
                    break;
3918
                case 0x02e: /* VIS I fcmpeq32 */
3919
                    CHECK_FPU_FEATURE(dc, VIS1);
3920
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3921
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3922
                    gen_helper_fcmpeq32(cpu_dst, cpu_env);
3923
                    gen_movl_TN_reg(rd, cpu_dst);
3924
                    break;
3925
                case 0x031: /* VIS I fmul8x16 */
3926
                    CHECK_FPU_FEATURE(dc, VIS1);
3927
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3928
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3929
                    gen_helper_fmul8x16(cpu_env);
3930
                    gen_op_store_DT0_fpr(DFPREG(rd));
3931
                    gen_update_fprs_dirty(DFPREG(rd));
3932
                    break;
3933
                case 0x033: /* VIS I fmul8x16au */
3934
                    CHECK_FPU_FEATURE(dc, VIS1);
3935
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3936
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3937
                    gen_helper_fmul8x16au(cpu_env);
3938
                    gen_op_store_DT0_fpr(DFPREG(rd));
3939
                    gen_update_fprs_dirty(DFPREG(rd));
3940
                    break;
3941
                case 0x035: /* VIS I fmul8x16al */
3942
                    CHECK_FPU_FEATURE(dc, VIS1);
3943
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3944
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3945
                    gen_helper_fmul8x16al(cpu_env);
3946
                    gen_op_store_DT0_fpr(DFPREG(rd));
3947
                    gen_update_fprs_dirty(DFPREG(rd));
3948
                    break;
3949
                case 0x036: /* VIS I fmul8sux16 */
3950
                    CHECK_FPU_FEATURE(dc, VIS1);
3951
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3952
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3953
                    gen_helper_fmul8sux16(cpu_env);
3954
                    gen_op_store_DT0_fpr(DFPREG(rd));
3955
                    gen_update_fprs_dirty(DFPREG(rd));
3956
                    break;
3957
                case 0x037: /* VIS I fmul8ulx16 */
3958
                    CHECK_FPU_FEATURE(dc, VIS1);
3959
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3960
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3961
                    gen_helper_fmul8ulx16(cpu_env);
3962
                    gen_op_store_DT0_fpr(DFPREG(rd));
3963
                    gen_update_fprs_dirty(DFPREG(rd));
3964
                    break;
3965
                case 0x038: /* VIS I fmuld8sux16 */
3966
                    CHECK_FPU_FEATURE(dc, VIS1);
3967
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3968
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3969
                    gen_helper_fmuld8sux16(cpu_env);
3970
                    gen_op_store_DT0_fpr(DFPREG(rd));
3971
                    gen_update_fprs_dirty(DFPREG(rd));
3972
                    break;
3973
                case 0x039: /* VIS I fmuld8ulx16 */
3974
                    CHECK_FPU_FEATURE(dc, VIS1);
3975
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3976
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3977
                    gen_helper_fmuld8ulx16(cpu_env);
3978
                    gen_op_store_DT0_fpr(DFPREG(rd));
3979
                    gen_update_fprs_dirty(DFPREG(rd));
3980
                    break;
3981
                case 0x03a: /* VIS I fpack32 */
3982
                case 0x03b: /* VIS I fpack16 */
3983
                case 0x03d: /* VIS I fpackfix */
3984
                case 0x03e: /* VIS I pdist */
3985
                    // XXX
3986
                    goto illegal_insn;
3987
                case 0x048: /* VIS I faligndata */
3988
                    CHECK_FPU_FEATURE(dc, VIS1);
3989
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3990
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3991
                    gen_helper_faligndata(cpu_env);
3992
                    gen_op_store_DT0_fpr(DFPREG(rd));
3993
                    gen_update_fprs_dirty(DFPREG(rd));
3994
                    break;
3995
                case 0x04b: /* VIS I fpmerge */
3996
                    CHECK_FPU_FEATURE(dc, VIS1);
3997
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3998
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3999
                    gen_helper_fpmerge(cpu_env);
4000
                    gen_op_store_DT0_fpr(DFPREG(rd));
4001
                    gen_update_fprs_dirty(DFPREG(rd));
4002
                    break;
4003
                case 0x04c: /* VIS II bshuffle */
4004
                    // XXX
4005
                    goto illegal_insn;
4006
                case 0x04d: /* VIS I fexpand */
4007
                    CHECK_FPU_FEATURE(dc, VIS1);
4008
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4009
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4010
                    gen_helper_fexpand(cpu_env);
4011
                    gen_op_store_DT0_fpr(DFPREG(rd));
4012
                    gen_update_fprs_dirty(DFPREG(rd));
4013
                    break;
4014
                case 0x050: /* VIS I fpadd16 */
4015
                    CHECK_FPU_FEATURE(dc, VIS1);
4016
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4017
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4018
                    gen_helper_fpadd16(cpu_env);
4019
                    gen_op_store_DT0_fpr(DFPREG(rd));
4020
                    gen_update_fprs_dirty(DFPREG(rd));
4021
                    break;
4022
                case 0x051: /* VIS I fpadd16s */
4023
                    CHECK_FPU_FEATURE(dc, VIS1);
4024
                    gen_helper_fpadd16s(cpu_fpr[rd], cpu_env,
4025
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
4026
                    gen_update_fprs_dirty(rd);
4027
                    break;
4028
                case 0x052: /* VIS I fpadd32 */
4029
                    CHECK_FPU_FEATURE(dc, VIS1);
4030
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4031
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4032
                    gen_helper_fpadd32(cpu_env);
4033
                    gen_op_store_DT0_fpr(DFPREG(rd));
4034
                    gen_update_fprs_dirty(DFPREG(rd));
4035
                    break;
4036
                case 0x053: /* VIS I fpadd32s */
4037
                    CHECK_FPU_FEATURE(dc, VIS1);
4038
                    gen_helper_fpadd32s(cpu_fpr[rd], cpu_env,
4039
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
4040
                    gen_update_fprs_dirty(rd);
4041
                    break;
4042
                case 0x054: /* VIS I fpsub16 */
4043
                    CHECK_FPU_FEATURE(dc, VIS1);
4044
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4045
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4046
                    gen_helper_fpsub16(cpu_env);
4047
                    gen_op_store_DT0_fpr(DFPREG(rd));
4048
                    gen_update_fprs_dirty(DFPREG(rd));
4049
                    break;
4050
                case 0x055: /* VIS I fpsub16s */
4051
                    CHECK_FPU_FEATURE(dc, VIS1);
4052
                    gen_helper_fpsub16s(cpu_fpr[rd], cpu_env,
4053
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
4054
                    gen_update_fprs_dirty(rd);
4055
                    break;
4056
                case 0x056: /* VIS I fpsub32 */
4057
                    CHECK_FPU_FEATURE(dc, VIS1);
4058
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4059
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4060
                    gen_helper_fpsub32(cpu_env);
4061
                    gen_op_store_DT0_fpr(DFPREG(rd));
4062
                    gen_update_fprs_dirty(DFPREG(rd));
4063
                    break;
4064
                case 0x057: /* VIS I fpsub32s */
4065
                    CHECK_FPU_FEATURE(dc, VIS1);
4066
                    gen_helper_fpsub32s(cpu_fpr[rd], cpu_env,
4067
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
4068
                    gen_update_fprs_dirty(rd);
4069
                    break;
4070
                case 0x060: /* VIS I fzero */
4071
                    CHECK_FPU_FEATURE(dc, VIS1);
4072
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4073
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4074
                    gen_update_fprs_dirty(DFPREG(rd));
4075
                    break;
4076
                case 0x061: /* VIS I fzeros */
4077
                    CHECK_FPU_FEATURE(dc, VIS1);
4078
                    tcg_gen_movi_i32(cpu_fpr[rd], 0);
4079
                    gen_update_fprs_dirty(rd);
4080
                    break;
4081
                case 0x062: /* VIS I fnor */
4082
                    CHECK_FPU_FEATURE(dc, VIS1);
4083
                    tcg_gen_nor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4084
                                    cpu_fpr[DFPREG(rs2)]);
4085
                    tcg_gen_nor_i32(cpu_fpr[DFPREG(rd) + 1],
4086
                                    cpu_fpr[DFPREG(rs1) + 1],
4087
                                    cpu_fpr[DFPREG(rs2) + 1]);
4088
                    gen_update_fprs_dirty(DFPREG(rd));
4089
                    break;
4090
                case 0x063: /* VIS I fnors */
4091
                    CHECK_FPU_FEATURE(dc, VIS1);
4092
                    tcg_gen_nor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4093
                    gen_update_fprs_dirty(rd);
4094
                    break;
4095
                case 0x064: /* VIS I fandnot2 */
4096
                    CHECK_FPU_FEATURE(dc, VIS1);
4097
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4098
                                     cpu_fpr[DFPREG(rs2)]);
4099
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4100
                                     cpu_fpr[DFPREG(rs1) + 1],
4101
                                     cpu_fpr[DFPREG(rs2) + 1]);
4102
                    gen_update_fprs_dirty(DFPREG(rd));
4103
                    break;
4104
                case 0x065: /* VIS I fandnot2s */
4105
                    CHECK_FPU_FEATURE(dc, VIS1);
4106
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4107
                    gen_update_fprs_dirty(rd);
4108
                    break;
4109
                case 0x066: /* VIS I fnot2 */
4110
                    CHECK_FPU_FEATURE(dc, VIS1);
4111
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4112
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4113
                                    cpu_fpr[DFPREG(rs2) + 1]);
4114
                    gen_update_fprs_dirty(DFPREG(rd));
4115
                    break;
4116
                case 0x067: /* VIS I fnot2s */
4117
                    CHECK_FPU_FEATURE(dc, VIS1);
4118
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4119
                    gen_update_fprs_dirty(rd);
4120
                    break;
4121
                case 0x068: /* VIS I fandnot1 */
4122
                    CHECK_FPU_FEATURE(dc, VIS1);
4123
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4124
                                     cpu_fpr[DFPREG(rs1)]);
4125
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4126
                                     cpu_fpr[DFPREG(rs2) + 1],
4127
                                     cpu_fpr[DFPREG(rs1) + 1]);
4128
                    gen_update_fprs_dirty(DFPREG(rd));
4129
                    break;
4130
                case 0x069: /* VIS I fandnot1s */
4131
                    CHECK_FPU_FEATURE(dc, VIS1);
4132
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4133
                    gen_update_fprs_dirty(rd);
4134
                    break;
4135
                case 0x06a: /* VIS I fnot1 */
4136
                    CHECK_FPU_FEATURE(dc, VIS1);
4137
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4138
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4139
                                    cpu_fpr[DFPREG(rs1) + 1]);
4140
                    gen_update_fprs_dirty(DFPREG(rd));
4141
                    break;
4142
                case 0x06b: /* VIS I fnot1s */
4143
                    CHECK_FPU_FEATURE(dc, VIS1);
4144
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4145
                    gen_update_fprs_dirty(rd);
4146
                    break;
4147
                case 0x06c: /* VIS I fxor */
4148
                    CHECK_FPU_FEATURE(dc, VIS1);
4149
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4150
                                    cpu_fpr[DFPREG(rs2)]);
4151
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4152
                                    cpu_fpr[DFPREG(rs1) + 1],
4153
                                    cpu_fpr[DFPREG(rs2) + 1]);
4154
                    gen_update_fprs_dirty(DFPREG(rd));
4155
                    break;
4156
                case 0x06d: /* VIS I fxors */
4157
                    CHECK_FPU_FEATURE(dc, VIS1);
4158
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4159
                    gen_update_fprs_dirty(rd);
4160
                    break;
4161
                case 0x06e: /* VIS I fnand */
4162
                    CHECK_FPU_FEATURE(dc, VIS1);
4163
                    tcg_gen_nand_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4164
                                     cpu_fpr[DFPREG(rs2)]);
4165
                    tcg_gen_nand_i32(cpu_fpr[DFPREG(rd) + 1],
4166
                                     cpu_fpr[DFPREG(rs1) + 1],
4167
                                     cpu_fpr[DFPREG(rs2) + 1]);
4168
                    gen_update_fprs_dirty(DFPREG(rd));
4169
                    break;
4170
                case 0x06f: /* VIS I fnands */
4171
                    CHECK_FPU_FEATURE(dc, VIS1);
4172
                    tcg_gen_nand_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4173
                    gen_update_fprs_dirty(rd);
4174
                    break;
4175
                case 0x070: /* VIS I fand */
4176
                    CHECK_FPU_FEATURE(dc, VIS1);
4177
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4178
                                    cpu_fpr[DFPREG(rs2)]);
4179
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4180
                                    cpu_fpr[DFPREG(rs1) + 1],
4181
                                    cpu_fpr[DFPREG(rs2) + 1]);
4182
                    gen_update_fprs_dirty(DFPREG(rd));
4183
                    break;
4184
                case 0x071: /* VIS I fands */
4185
                    CHECK_FPU_FEATURE(dc, VIS1);
4186
                    tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4187
                    gen_update_fprs_dirty(rd);
4188
                    break;
4189
                case 0x072: /* VIS I fxnor */
4190
                    CHECK_FPU_FEATURE(dc, VIS1);
4191
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4192
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4193
                                    cpu_fpr[DFPREG(rs1)]);
4194
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4195
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4196
                                    cpu_fpr[DFPREG(rs1) + 1]);
4197
                    gen_update_fprs_dirty(DFPREG(rd));
4198
                    break;
4199
                case 0x073: /* VIS I fxnors */
4200
                    CHECK_FPU_FEATURE(dc, VIS1);
4201
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4202
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4203
                    gen_update_fprs_dirty(rd);
4204
                    break;
4205
                case 0x074: /* VIS I fsrc1 */
4206
                    CHECK_FPU_FEATURE(dc, VIS1);
4207
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4208
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4209
                                    cpu_fpr[DFPREG(rs1) + 1]);
4210
                    gen_update_fprs_dirty(DFPREG(rd));
4211
                    break;
4212
                case 0x075: /* VIS I fsrc1s */
4213
                    CHECK_FPU_FEATURE(dc, VIS1);
4214
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4215
                    gen_update_fprs_dirty(rd);
4216
                    break;
4217
                case 0x076: /* VIS I fornot2 */
4218
                    CHECK_FPU_FEATURE(dc, VIS1);
4219
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4220
                                    cpu_fpr[DFPREG(rs2)]);
4221
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4222
                                    cpu_fpr[DFPREG(rs1) + 1],
4223
                                    cpu_fpr[DFPREG(rs2) + 1]);
4224
                    gen_update_fprs_dirty(DFPREG(rd));
4225
                    break;
4226
                case 0x077: /* VIS I fornot2s */
4227
                    CHECK_FPU_FEATURE(dc, VIS1);
4228
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4229
                    gen_update_fprs_dirty(rd);
4230
                    break;
4231
                case 0x078: /* VIS I fsrc2 */
4232
                    CHECK_FPU_FEATURE(dc, VIS1);
4233
                    gen_op_load_fpr_DT0(DFPREG(rs2));
4234
                    gen_op_store_DT0_fpr(DFPREG(rd));
4235
                    gen_update_fprs_dirty(DFPREG(rd));
4236
                    break;
4237
                case 0x079: /* VIS I fsrc2s */
4238
                    CHECK_FPU_FEATURE(dc, VIS1);
4239
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4240
                    gen_update_fprs_dirty(rd);
4241
                    break;
4242
                case 0x07a: /* VIS I fornot1 */
4243
                    CHECK_FPU_FEATURE(dc, VIS1);
4244
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4245
                                    cpu_fpr[DFPREG(rs1)]);
4246
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4247
                                    cpu_fpr[DFPREG(rs2) + 1],
4248
                                    cpu_fpr[DFPREG(rs1) + 1]);
4249
                    gen_update_fprs_dirty(DFPREG(rd));
4250
                    break;
4251
                case 0x07b: /* VIS I fornot1s */
4252
                    CHECK_FPU_FEATURE(dc, VIS1);
4253
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4254
                    gen_update_fprs_dirty(rd);
4255
                    break;
4256
                case 0x07c: /* VIS I for */
4257
                    CHECK_FPU_FEATURE(dc, VIS1);
4258
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4259
                                   cpu_fpr[DFPREG(rs2)]);
4260
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4261
                                   cpu_fpr[DFPREG(rs1) + 1],
4262
                                   cpu_fpr[DFPREG(rs2) + 1]);
4263
                    gen_update_fprs_dirty(DFPREG(rd));
4264
                    break;
4265
                case 0x07d: /* VIS I fors */
4266
                    CHECK_FPU_FEATURE(dc, VIS1);
4267
                    tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4268
                    gen_update_fprs_dirty(rd);
4269
                    break;
4270
                case 0x07e: /* VIS I fone */
4271
                    CHECK_FPU_FEATURE(dc, VIS1);
4272
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4273
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4274
                    gen_update_fprs_dirty(DFPREG(rd));
4275
                    break;
4276
                case 0x07f: /* VIS I fones */
4277
                    CHECK_FPU_FEATURE(dc, VIS1);
4278
                    tcg_gen_movi_i32(cpu_fpr[rd], -1);
4279
                    gen_update_fprs_dirty(rd);
4280
                    break;
4281
                case 0x080: /* VIS I shutdown */
4282
                case 0x081: /* VIS II siam */
4283
                    // XXX
4284
                    goto illegal_insn;
4285
                default:
4286
                    goto illegal_insn;
4287
                }
4288
#else
4289
                goto ncp_insn;
4290
#endif
4291
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4292
#ifdef TARGET_SPARC64
4293
                goto illegal_insn;
4294
#else
4295
                goto ncp_insn;
4296
#endif
4297
#ifdef TARGET_SPARC64
4298
            } else if (xop == 0x39) { /* V9 return */
4299
                TCGv_i32 r_const;
4300

    
4301
                save_state(dc, cpu_cond);
4302
                cpu_src1 = get_src1(insn, cpu_src1);
4303
                if (IS_IMM) {   /* immediate */
4304
                    simm = GET_FIELDs(insn, 19, 31);
4305
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4306
                } else {                /* register */
4307
                    rs2 = GET_FIELD(insn, 27, 31);
4308
                    if (rs2) {
4309
                        gen_movl_reg_TN(rs2, cpu_src2);
4310
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4311
                    } else
4312
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4313
                }
4314
                gen_helper_restore(cpu_env);
4315
                gen_mov_pc_npc(dc, cpu_cond);
4316
                r_const = tcg_const_i32(3);
4317
                gen_helper_check_align(cpu_dst, r_const);
4318
                tcg_temp_free_i32(r_const);
4319
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4320
                dc->npc = DYNAMIC_PC;
4321
                goto jmp_insn;
4322
#endif
4323
            } else {
4324
                cpu_src1 = get_src1(insn, cpu_src1);
4325
                if (IS_IMM) {   /* immediate */
4326
                    simm = GET_FIELDs(insn, 19, 31);
4327
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4328
                } else {                /* register */
4329
                    rs2 = GET_FIELD(insn, 27, 31);
4330
                    if (rs2) {
4331
                        gen_movl_reg_TN(rs2, cpu_src2);
4332
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4333
                    } else
4334
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4335
                }
4336
                switch (xop) {
4337
                case 0x38:      /* jmpl */
4338
                    {
4339
                        TCGv r_pc;
4340
                        TCGv_i32 r_const;
4341

    
4342
                        r_pc = tcg_const_tl(dc->pc);
4343
                        gen_movl_TN_reg(rd, r_pc);
4344
                        tcg_temp_free(r_pc);
4345
                        gen_mov_pc_npc(dc, cpu_cond);
4346
                        r_const = tcg_const_i32(3);
4347
                        gen_helper_check_align(cpu_dst, r_const);
4348
                        tcg_temp_free_i32(r_const);
4349
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4350
                        dc->npc = DYNAMIC_PC;
4351
                    }
4352
                    goto jmp_insn;
4353
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4354
                case 0x39:      /* rett, V9 return */
4355
                    {
4356
                        TCGv_i32 r_const;
4357

    
4358
                        if (!supervisor(dc))
4359
                            goto priv_insn;
4360
                        gen_mov_pc_npc(dc, cpu_cond);
4361
                        r_const = tcg_const_i32(3);
4362
                        gen_helper_check_align(cpu_dst, r_const);
4363
                        tcg_temp_free_i32(r_const);
4364
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4365
                        dc->npc = DYNAMIC_PC;
4366
                        gen_helper_rett(cpu_env);
4367
                    }
4368
                    goto jmp_insn;
4369
#endif
4370
                case 0x3b: /* flush */
4371
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4372
                        goto unimp_flush;
4373
                    /* nop */
4374
                    break;
4375
                case 0x3c:      /* save */
4376
                    save_state(dc, cpu_cond);
4377
                    gen_helper_save(cpu_env);
4378
                    gen_movl_TN_reg(rd, cpu_dst);
4379
                    break;
4380
                case 0x3d:      /* restore */
4381
                    save_state(dc, cpu_cond);
4382
                    gen_helper_restore(cpu_env);
4383
                    gen_movl_TN_reg(rd, cpu_dst);
4384
                    break;
4385
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4386
                case 0x3e:      /* V9 done/retry */
4387
                    {
4388
                        switch (rd) {
4389
                        case 0:
4390
                            if (!supervisor(dc))
4391
                                goto priv_insn;
4392
                            dc->npc = DYNAMIC_PC;
4393
                            dc->pc = DYNAMIC_PC;
4394
                            gen_helper_done(cpu_env);
4395
                            goto jmp_insn;
4396
                        case 1:
4397
                            if (!supervisor(dc))
4398
                                goto priv_insn;
4399
                            dc->npc = DYNAMIC_PC;
4400
                            dc->pc = DYNAMIC_PC;
4401
                            gen_helper_retry(cpu_env);
4402
                            goto jmp_insn;
4403
                        default:
4404
                            goto illegal_insn;
4405
                        }
4406
                    }
4407
                    break;
4408
#endif
4409
                default:
4410
                    goto illegal_insn;
4411
                }
4412
            }
4413
            break;
4414
        }
4415
        break;
4416
    case 3:                     /* load/store instructions */
4417
        {
4418
            unsigned int xop = GET_FIELD(insn, 7, 12);
4419

    
4420
            /* flush pending conditional evaluations before exposing
4421
               cpu state */
4422
            if (dc->cc_op != CC_OP_FLAGS) {
4423
                dc->cc_op = CC_OP_FLAGS;
4424
                gen_helper_compute_psr(cpu_env);
4425
            }
4426
            cpu_src1 = get_src1(insn, cpu_src1);
4427
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4428
                rs2 = GET_FIELD(insn, 27, 31);
4429
                gen_movl_reg_TN(rs2, cpu_src2);
4430
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4431
            } else if (IS_IMM) {     /* immediate */
4432
                simm = GET_FIELDs(insn, 19, 31);
4433
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4434
            } else {            /* register */
4435
                rs2 = GET_FIELD(insn, 27, 31);
4436
                if (rs2 != 0) {
4437
                    gen_movl_reg_TN(rs2, cpu_src2);
4438
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4439
                } else
4440
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4441
            }
4442
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4443
                (xop > 0x17 && xop <= 0x1d ) ||
4444
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4445
                switch (xop) {
4446
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4447
                    gen_address_mask(dc, cpu_addr);
4448
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4449
                    break;
4450
                case 0x1:       /* ldub, load unsigned byte */
4451
                    gen_address_mask(dc, cpu_addr);
4452
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4453
                    break;
4454
                case 0x2:       /* lduh, load unsigned halfword */
4455
                    gen_address_mask(dc, cpu_addr);
4456
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4457
                    break;
4458
                case 0x3:       /* ldd, load double word */
4459
                    if (rd & 1)
4460
                        goto illegal_insn;
4461
                    else {
4462
                        TCGv_i32 r_const;
4463

    
4464
                        save_state(dc, cpu_cond);
4465
                        r_const = tcg_const_i32(7);
4466
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4467
                        tcg_temp_free_i32(r_const);
4468
                        gen_address_mask(dc, cpu_addr);
4469
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4470
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4471
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4472
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4473
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4474
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4475
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4476
                    }
4477
                    break;
4478
                case 0x9:       /* ldsb, load signed byte */
4479
                    gen_address_mask(dc, cpu_addr);
4480
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4481
                    break;
4482
                case 0xa:       /* ldsh, load signed halfword */
4483
                    gen_address_mask(dc, cpu_addr);
4484
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4485
                    break;
4486
                case 0xd:       /* ldstub -- XXX: should be atomically */
4487
                    {
4488
                        TCGv r_const;
4489

    
4490
                        gen_address_mask(dc, cpu_addr);
4491
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4492
                        r_const = tcg_const_tl(0xff);
4493
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4494
                        tcg_temp_free(r_const);
4495
                    }
4496
                    break;
4497
                case 0x0f:      /* swap, swap register with memory. Also
4498
                                   atomically */
4499
                    CHECK_IU_FEATURE(dc, SWAP);
4500
                    gen_movl_reg_TN(rd, cpu_val);
4501
                    gen_address_mask(dc, cpu_addr);
4502
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4503
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4504
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4505
                    break;
4506
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4507
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4508
#ifndef TARGET_SPARC64
4509
                    if (IS_IMM)
4510
                        goto illegal_insn;
4511
                    if (!supervisor(dc))
4512
                        goto priv_insn;
4513
#endif
4514
                    save_state(dc, cpu_cond);
4515
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4516
                    break;
4517
                case 0x11:      /* lduba, load unsigned byte alternate */
4518
#ifndef TARGET_SPARC64
4519
                    if (IS_IMM)
4520
                        goto illegal_insn;
4521
                    if (!supervisor(dc))
4522
                        goto priv_insn;
4523
#endif
4524
                    save_state(dc, cpu_cond);
4525
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4526
                    break;
4527
                case 0x12:      /* lduha, load unsigned halfword alternate */
4528
#ifndef TARGET_SPARC64
4529
                    if (IS_IMM)
4530
                        goto illegal_insn;
4531
                    if (!supervisor(dc))
4532
                        goto priv_insn;
4533
#endif
4534
                    save_state(dc, cpu_cond);
4535
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4536
                    break;
4537
                case 0x13:      /* ldda, load double word alternate */
4538
#ifndef TARGET_SPARC64
4539
                    if (IS_IMM)
4540
                        goto illegal_insn;
4541
                    if (!supervisor(dc))
4542
                        goto priv_insn;
4543
#endif
4544
                    if (rd & 1)
4545
                        goto illegal_insn;
4546
                    save_state(dc, cpu_cond);
4547
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4548
                    goto skip_move;
4549
                case 0x19:      /* ldsba, load signed byte alternate */
4550
#ifndef TARGET_SPARC64
4551
                    if (IS_IMM)
4552
                        goto illegal_insn;
4553
                    if (!supervisor(dc))
4554
                        goto priv_insn;
4555
#endif
4556
                    save_state(dc, cpu_cond);
4557
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4558
                    break;
4559
                case 0x1a:      /* ldsha, load signed halfword alternate */
4560
#ifndef TARGET_SPARC64
4561
                    if (IS_IMM)
4562
                        goto illegal_insn;
4563
                    if (!supervisor(dc))
4564
                        goto priv_insn;
4565
#endif
4566
                    save_state(dc, cpu_cond);
4567
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4568
                    break;
4569
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4570
#ifndef TARGET_SPARC64
4571
                    if (IS_IMM)
4572
                        goto illegal_insn;
4573
                    if (!supervisor(dc))
4574
                        goto priv_insn;
4575
#endif
4576
                    save_state(dc, cpu_cond);
4577
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4578
                    break;
4579
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4580
                                   atomically */
4581
                    CHECK_IU_FEATURE(dc, SWAP);
4582
#ifndef TARGET_SPARC64
4583
                    if (IS_IMM)
4584
                        goto illegal_insn;
4585
                    if (!supervisor(dc))
4586
                        goto priv_insn;
4587
#endif
4588
                    save_state(dc, cpu_cond);
4589
                    gen_movl_reg_TN(rd, cpu_val);
4590
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4591
                    break;
4592

    
4593
#ifndef TARGET_SPARC64
4594
                case 0x30: /* ldc */
4595
                case 0x31: /* ldcsr */
4596
                case 0x33: /* lddc */
4597
                    goto ncp_insn;
4598
#endif
4599
#endif
4600
#ifdef TARGET_SPARC64
4601
                case 0x08: /* V9 ldsw */
4602
                    gen_address_mask(dc, cpu_addr);
4603
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4604
                    break;
4605
                case 0x0b: /* V9 ldx */
4606
                    gen_address_mask(dc, cpu_addr);
4607
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4608
                    break;
4609
                case 0x18: /* V9 ldswa */
4610
                    save_state(dc, cpu_cond);
4611
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4612
                    break;
4613
                case 0x1b: /* V9 ldxa */
4614
                    save_state(dc, cpu_cond);
4615
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4616
                    break;
4617
                case 0x2d: /* V9 prefetch, no effect */
4618
                    goto skip_move;
4619
                case 0x30: /* V9 ldfa */
4620
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4621
                        goto jmp_insn;
4622
                    }
4623
                    save_state(dc, cpu_cond);
4624
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4625
                    gen_update_fprs_dirty(rd);
4626
                    goto skip_move;
4627
                case 0x33: /* V9 lddfa */
4628
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4629
                        goto jmp_insn;
4630
                    }
4631
                    save_state(dc, cpu_cond);
4632
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4633
                    gen_update_fprs_dirty(DFPREG(rd));
4634
                    goto skip_move;
4635
                case 0x3d: /* V9 prefetcha, no effect */
4636
                    goto skip_move;
4637
                case 0x32: /* V9 ldqfa */
4638
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4639
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4640
                        goto jmp_insn;
4641
                    }
4642
                    save_state(dc, cpu_cond);
4643
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4644
                    gen_update_fprs_dirty(QFPREG(rd));
4645
                    goto skip_move;
4646
#endif
4647
                default:
4648
                    goto illegal_insn;
4649
                }
4650
                gen_movl_TN_reg(rd, cpu_val);
4651
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4652
            skip_move: ;
4653
#endif
4654
            } else if (xop >= 0x20 && xop < 0x24) {
4655
                if (gen_trap_ifnofpu(dc, cpu_cond))
4656
                    goto jmp_insn;
4657
                save_state(dc, cpu_cond);
4658
                switch (xop) {
4659
                case 0x20:      /* ldf, load fpreg */
4660
                    gen_address_mask(dc, cpu_addr);
4661
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4662
                    tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4663
                    gen_update_fprs_dirty(rd);
4664
                    break;
4665
                case 0x21:      /* ldfsr, V9 ldxfsr */
4666
#ifdef TARGET_SPARC64
4667
                    gen_address_mask(dc, cpu_addr);
4668
                    if (rd == 1) {
4669
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4670
                        gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4671
                    } else {
4672
                        tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4673
                        tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4674
                        gen_helper_ldfsr(cpu_env, cpu_tmp32);
4675
                    }
4676
#else
4677
                    {
4678
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4679
                        gen_helper_ldfsr(cpu_env, cpu_tmp32);
4680
                    }
4681
#endif
4682
                    break;
4683
                case 0x22:      /* ldqf, load quad fpreg */
4684
                    {
4685
                        TCGv_i32 r_const;
4686

    
4687
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4688
                        r_const = tcg_const_i32(dc->mem_idx);
4689
                        gen_address_mask(dc, cpu_addr);
4690
                        gen_helper_ldqf(cpu_addr, r_const);
4691
                        tcg_temp_free_i32(r_const);
4692
                        gen_op_store_QT0_fpr(QFPREG(rd));
4693
                        gen_update_fprs_dirty(QFPREG(rd));
4694
                    }
4695
                    break;
4696
                case 0x23:      /* lddf, load double fpreg */
4697
                    {
4698
                        TCGv_i32 r_const;
4699

    
4700
                        r_const = tcg_const_i32(dc->mem_idx);
4701
                        gen_address_mask(dc, cpu_addr);
4702
                        gen_helper_lddf(cpu_addr, r_const);
4703
                        tcg_temp_free_i32(r_const);
4704
                        gen_op_store_DT0_fpr(DFPREG(rd));
4705
                        gen_update_fprs_dirty(DFPREG(rd));
4706
                    }
4707
                    break;
4708
                default:
4709
                    goto illegal_insn;
4710
                }
4711
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4712
                       xop == 0xe || xop == 0x1e) {
4713
                gen_movl_reg_TN(rd, cpu_val);
4714
                switch (xop) {
4715
                case 0x4: /* st, store word */
4716
                    gen_address_mask(dc, cpu_addr);
4717
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4718
                    break;
4719
                case 0x5: /* stb, store byte */
4720
                    gen_address_mask(dc, cpu_addr);
4721
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4722
                    break;
4723
                case 0x6: /* sth, store halfword */
4724
                    gen_address_mask(dc, cpu_addr);
4725
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4726
                    break;
4727
                case 0x7: /* std, store double word */
4728
                    if (rd & 1)
4729
                        goto illegal_insn;
4730
                    else {
4731
                        TCGv_i32 r_const;
4732

    
4733
                        save_state(dc, cpu_cond);
4734
                        gen_address_mask(dc, cpu_addr);
4735
                        r_const = tcg_const_i32(7);
4736
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4737
                        tcg_temp_free_i32(r_const);
4738
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4739
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4740
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4741
                    }
4742
                    break;
4743
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4744
                case 0x14: /* sta, V9 stwa, store word alternate */
4745
#ifndef TARGET_SPARC64
4746
                    if (IS_IMM)
4747
                        goto illegal_insn;
4748
                    if (!supervisor(dc))
4749
                        goto priv_insn;
4750
#endif
4751
                    save_state(dc, cpu_cond);
4752
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4753
                    dc->npc = DYNAMIC_PC;
4754
                    break;
4755
                case 0x15: /* stba, store byte alternate */
4756
#ifndef TARGET_SPARC64
4757
                    if (IS_IMM)
4758
                        goto illegal_insn;
4759
                    if (!supervisor(dc))
4760
                        goto priv_insn;
4761
#endif
4762
                    save_state(dc, cpu_cond);
4763
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4764
                    dc->npc = DYNAMIC_PC;
4765
                    break;
4766
                case 0x16: /* stha, store halfword alternate */
4767
#ifndef TARGET_SPARC64
4768
                    if (IS_IMM)
4769
                        goto illegal_insn;
4770
                    if (!supervisor(dc))
4771
                        goto priv_insn;
4772
#endif
4773
                    save_state(dc, cpu_cond);
4774
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4775
                    dc->npc = DYNAMIC_PC;
4776
                    break;
4777
                case 0x17: /* stda, store double word alternate */
4778
#ifndef TARGET_SPARC64
4779
                    if (IS_IMM)
4780
                        goto illegal_insn;
4781
                    if (!supervisor(dc))
4782
                        goto priv_insn;
4783
#endif
4784
                    if (rd & 1)
4785
                        goto illegal_insn;
4786
                    else {
4787
                        save_state(dc, cpu_cond);
4788
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4789
                    }
4790
                    break;
4791
#endif
4792
#ifdef TARGET_SPARC64
4793
                case 0x0e: /* V9 stx */
4794
                    gen_address_mask(dc, cpu_addr);
4795
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4796
                    break;
4797
                case 0x1e: /* V9 stxa */
4798
                    save_state(dc, cpu_cond);
4799
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4800
                    dc->npc = DYNAMIC_PC;
4801
                    break;
4802
#endif
4803
                default:
4804
                    goto illegal_insn;
4805
                }
4806
            } else if (xop > 0x23 && xop < 0x28) {
4807
                if (gen_trap_ifnofpu(dc, cpu_cond))
4808
                    goto jmp_insn;
4809
                save_state(dc, cpu_cond);
4810
                switch (xop) {
4811
                case 0x24: /* stf, store fpreg */
4812
                    gen_address_mask(dc, cpu_addr);
4813
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4814
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4815
                    break;
4816
                case 0x25: /* stfsr, V9 stxfsr */
4817
#ifdef TARGET_SPARC64
4818
                    gen_address_mask(dc, cpu_addr);
4819
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4820
                    if (rd == 1)
4821
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4822
                    else
4823
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4824
#else
4825
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4826
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4827
#endif
4828
                    break;
4829
                case 0x26:
4830
#ifdef TARGET_SPARC64
4831
                    /* V9 stqf, store quad fpreg */
4832
                    {
4833
                        TCGv_i32 r_const;
4834

    
4835
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4836
                        gen_op_load_fpr_QT0(QFPREG(rd));
4837
                        r_const = tcg_const_i32(dc->mem_idx);
4838
                        gen_address_mask(dc, cpu_addr);
4839
                        gen_helper_stqf(cpu_addr, r_const);
4840
                        tcg_temp_free_i32(r_const);
4841
                    }
4842
                    break;
4843
#else /* !TARGET_SPARC64 */
4844
                    /* stdfq, store floating point queue */
4845
#if defined(CONFIG_USER_ONLY)
4846
                    goto illegal_insn;
4847
#else
4848
                    if (!supervisor(dc))
4849
                        goto priv_insn;
4850
                    if (gen_trap_ifnofpu(dc, cpu_cond))
4851
                        goto jmp_insn;
4852
                    goto nfq_insn;
4853
#endif
4854
#endif
4855
                case 0x27: /* stdf, store double fpreg */
4856
                    {
4857
                        TCGv_i32 r_const;
4858

    
4859
                        gen_op_load_fpr_DT0(DFPREG(rd));
4860
                        r_const = tcg_const_i32(dc->mem_idx);
4861
                        gen_address_mask(dc, cpu_addr);
4862
                        gen_helper_stdf(cpu_addr, r_const);
4863
                        tcg_temp_free_i32(r_const);
4864
                    }
4865
                    break;
4866
                default:
4867
                    goto illegal_insn;
4868
                }
4869
            } else if (xop > 0x33 && xop < 0x3f) {
4870
                save_state(dc, cpu_cond);
4871
                switch (xop) {
4872
#ifdef TARGET_SPARC64
4873
                case 0x34: /* V9 stfa */
4874
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4875
                        goto jmp_insn;
4876
                    }
4877
                    gen_stf_asi(cpu_addr, insn, 4, rd);
4878
                    break;
4879
                case 0x36: /* V9 stqfa */
4880
                    {
4881
                        TCGv_i32 r_const;
4882

    
4883
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4884
                        if (gen_trap_ifnofpu(dc, cpu_cond)) {
4885
                            goto jmp_insn;
4886
                        }
4887
                        r_const = tcg_const_i32(7);
4888
                        gen_helper_check_align(cpu_addr, r_const);
4889
                        tcg_temp_free_i32(r_const);
4890
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4891
                    }
4892
                    break;
4893
                case 0x37: /* V9 stdfa */
4894
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4895
                        goto jmp_insn;
4896
                    }
4897
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4898
                    break;
4899
                case 0x3c: /* V9 casa */
4900
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4901
                    gen_movl_TN_reg(rd, cpu_val);
4902
                    break;
4903
                case 0x3e: /* V9 casxa */
4904
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4905
                    gen_movl_TN_reg(rd, cpu_val);
4906
                    break;
4907
#else
4908
                case 0x34: /* stc */
4909
                case 0x35: /* stcsr */
4910
                case 0x36: /* stdcq */
4911
                case 0x37: /* stdc */
4912
                    goto ncp_insn;
4913
#endif
4914
                default:
4915
                    goto illegal_insn;
4916
                }
4917
            } else
4918
                goto illegal_insn;
4919
        }
4920
        break;
4921
    }
4922
    /* default case for non jump instructions */
4923
    if (dc->npc == DYNAMIC_PC) {
4924
        dc->pc = DYNAMIC_PC;
4925
        gen_op_next_insn();
4926
    } else if (dc->npc == JUMP_PC) {
4927
        /* we can do a static jump */
4928
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4929
        dc->is_br = 1;
4930
    } else {
4931
        dc->pc = dc->npc;
4932
        dc->npc = dc->npc + 4;
4933
    }
4934
 jmp_insn:
4935
    goto egress;
4936
 illegal_insn:
4937
    {
4938
        TCGv_i32 r_const;
4939

    
4940
        save_state(dc, cpu_cond);
4941
        r_const = tcg_const_i32(TT_ILL_INSN);
4942
        gen_helper_raise_exception(cpu_env, r_const);
4943
        tcg_temp_free_i32(r_const);
4944
        dc->is_br = 1;
4945
    }
4946
    goto egress;
4947
 unimp_flush:
4948
    {
4949
        TCGv_i32 r_const;
4950

    
4951
        save_state(dc, cpu_cond);
4952
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4953
        gen_helper_raise_exception(cpu_env, r_const);
4954
        tcg_temp_free_i32(r_const);
4955
        dc->is_br = 1;
4956
    }
4957
    goto egress;
4958
#if !defined(CONFIG_USER_ONLY)
4959
 priv_insn:
4960
    {
4961
        TCGv_i32 r_const;
4962

    
4963
        save_state(dc, cpu_cond);
4964
        r_const = tcg_const_i32(TT_PRIV_INSN);
4965
        gen_helper_raise_exception(cpu_env, r_const);
4966
        tcg_temp_free_i32(r_const);
4967
        dc->is_br = 1;
4968
    }
4969
    goto egress;
4970
#endif
4971
 nfpu_insn:
4972
    save_state(dc, cpu_cond);
4973
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4974
    dc->is_br = 1;
4975
    goto egress;
4976
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4977
 nfq_insn:
4978
    save_state(dc, cpu_cond);
4979
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4980
    dc->is_br = 1;
4981
    goto egress;
4982
#endif
4983
#ifndef TARGET_SPARC64
4984
 ncp_insn:
4985
    {
4986
        TCGv r_const;
4987

    
4988
        save_state(dc, cpu_cond);
4989
        r_const = tcg_const_i32(TT_NCP_INSN);
4990
        gen_helper_raise_exception(cpu_env, r_const);
4991
        tcg_temp_free(r_const);
4992
        dc->is_br = 1;
4993
    }
4994
    goto egress;
4995
#endif
4996
 egress:
4997
    tcg_temp_free(cpu_tmp1);
4998
    tcg_temp_free(cpu_tmp2);
4999
}
5000

    
5001
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5002
                                                  int spc, CPUSPARCState *env)
5003
{
5004
    target_ulong pc_start, last_pc;
5005
    uint16_t *gen_opc_end;
5006
    DisasContext dc1, *dc = &dc1;
5007
    CPUBreakpoint *bp;
5008
    int j, lj = -1;
5009
    int num_insns;
5010
    int max_insns;
5011

    
5012
    memset(dc, 0, sizeof(DisasContext));
5013
    dc->tb = tb;
5014
    pc_start = tb->pc;
5015
    dc->pc = pc_start;
5016
    last_pc = dc->pc;
5017
    dc->npc = (target_ulong) tb->cs_base;
5018
    dc->cc_op = CC_OP_DYNAMIC;
5019
    dc->mem_idx = cpu_mmu_index(env);
5020
    dc->def = env->def;
5021
    dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5022
    dc->address_mask_32bit = tb_am_enabled(tb->flags);
5023
    dc->singlestep = (env->singlestep_enabled || singlestep);
5024
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5025

    
5026
    cpu_tmp0 = tcg_temp_new();
5027
    cpu_tmp32 = tcg_temp_new_i32();
5028
    cpu_tmp64 = tcg_temp_new_i64();
5029

    
5030
    cpu_dst = tcg_temp_local_new();
5031

    
5032
    // loads and stores
5033
    cpu_val = tcg_temp_local_new();
5034
    cpu_addr = tcg_temp_local_new();
5035

    
5036
    num_insns = 0;
5037
    max_insns = tb->cflags & CF_COUNT_MASK;
5038
    if (max_insns == 0)
5039
        max_insns = CF_COUNT_MASK;
5040
    gen_icount_start();
5041
    do {
5042
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5043
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5044
                if (bp->pc == dc->pc) {
5045
                    if (dc->pc != pc_start)
5046
                        save_state(dc, cpu_cond);
5047
                    gen_helper_debug(cpu_env);
5048
                    tcg_gen_exit_tb(0);
5049
                    dc->is_br = 1;
5050
                    goto exit_gen_loop;
5051
                }
5052
            }
5053
        }
5054
        if (spc) {
5055
            qemu_log("Search PC...\n");
5056
            j = gen_opc_ptr - gen_opc_buf;
5057
            if (lj < j) {
5058
                lj++;
5059
                while (lj < j)
5060
                    gen_opc_instr_start[lj++] = 0;
5061
                gen_opc_pc[lj] = dc->pc;
5062
                gen_opc_npc[lj] = dc->npc;
5063
                gen_opc_instr_start[lj] = 1;
5064
                gen_opc_icount[lj] = num_insns;
5065
            }
5066
        }
5067
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5068
            gen_io_start();
5069
        last_pc = dc->pc;
5070
        disas_sparc_insn(dc);
5071
        num_insns++;
5072

    
5073
        if (dc->is_br)
5074
            break;
5075
        /* if the next PC is different, we abort now */
5076
        if (dc->pc != (last_pc + 4))
5077
            break;
5078
        /* if we reach a page boundary, we stop generation so that the
5079
           PC of a TT_TFAULT exception is always in the right page */
5080
        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5081
            break;
5082
        /* if single step mode, we generate only one instruction and
5083
           generate an exception */
5084
        if (dc->singlestep) {
5085
            break;
5086
        }
5087
    } while ((gen_opc_ptr < gen_opc_end) &&
5088
             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5089
             num_insns < max_insns);
5090

    
5091
 exit_gen_loop:
5092
    tcg_temp_free(cpu_addr);
5093
    tcg_temp_free(cpu_val);
5094
    tcg_temp_free(cpu_dst);
5095
    tcg_temp_free_i64(cpu_tmp64);
5096
    tcg_temp_free_i32(cpu_tmp32);
5097
    tcg_temp_free(cpu_tmp0);
5098
    if (tb->cflags & CF_LAST_IO)
5099
        gen_io_end();
5100
    if (!dc->is_br) {
5101
        if (dc->pc != DYNAMIC_PC &&
5102
            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5103
            /* static PC and NPC: we can use direct chaining */
5104
            gen_goto_tb(dc, 0, dc->pc, dc->npc);
5105
        } else {
5106
            if (dc->pc != DYNAMIC_PC)
5107
                tcg_gen_movi_tl(cpu_pc, dc->pc);
5108
            save_npc(dc, cpu_cond);
5109
            tcg_gen_exit_tb(0);
5110
        }
5111
    }
5112
    gen_icount_end(tb, num_insns);
5113
    *gen_opc_ptr = INDEX_op_end;
5114
    if (spc) {
5115
        j = gen_opc_ptr - gen_opc_buf;
5116
        lj++;
5117
        while (lj <= j)
5118
            gen_opc_instr_start[lj++] = 0;
5119
#if 0
5120
        log_page_dump();
5121
#endif
5122
        gen_opc_jump_pc[0] = dc->jump_pc[0];
5123
        gen_opc_jump_pc[1] = dc->jump_pc[1];
5124
    } else {
5125
        tb->size = last_pc + 4 - pc_start;
5126
        tb->icount = num_insns;
5127
    }
5128
#ifdef DEBUG_DISAS
5129
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5130
        qemu_log("--------------\n");
5131
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
5132
        log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5133
        qemu_log("\n");
5134
    }
5135
#endif
5136
}
5137

    
5138
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5139
{
5140
    gen_intermediate_code_internal(tb, 0, env);
5141
}
5142

    
5143
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5144
{
5145
    gen_intermediate_code_internal(tb, 1, env);
5146
}
5147

    
5148
void gen_intermediate_code_init(CPUSPARCState *env)
5149
{
5150
    unsigned int i;
5151
    static int inited;
5152
    static const char * const gregnames[8] = {
5153
        NULL, // g0 not used
5154
        "g1",
5155
        "g2",
5156
        "g3",
5157
        "g4",
5158
        "g5",
5159
        "g6",
5160
        "g7",
5161
    };
5162
    static const char * const fregnames[64] = {
5163
        "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5164
        "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5165
        "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5166
        "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5167
        "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5168
        "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5169
        "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5170
        "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5171
    };
5172

    
5173
    /* init various static tables */
5174
    if (!inited) {
5175
        inited = 1;
5176

    
5177
        cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5178
        cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5179
                                             offsetof(CPUState, regwptr),
5180
                                             "regwptr");
5181
#ifdef TARGET_SPARC64
5182
        cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5183
                                         "xcc");
5184
        cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5185
                                         "asi");
5186
        cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5187
                                          "fprs");
5188
        cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5189
                                     "gsr");
5190
        cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5191
                                           offsetof(CPUState, tick_cmpr),
5192
                                           "tick_cmpr");
5193
        cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5194
                                            offsetof(CPUState, stick_cmpr),
5195
                                            "stick_cmpr");
5196
        cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5197
                                             offsetof(CPUState, hstick_cmpr),
5198
                                             "hstick_cmpr");
5199
        cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5200
                                       "hintp");
5201
        cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5202
                                      "htba");
5203
        cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5204
                                      "hver");
5205
        cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5206
                                     offsetof(CPUState, ssr), "ssr");
5207
        cpu_ver = tcg_global_mem_new(TCG_AREG0,
5208
                                     offsetof(CPUState, version), "ver");
5209
        cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5210
                                             offsetof(CPUState, softint),
5211
                                             "softint");
5212
#else
5213
        cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5214
                                     "wim");
5215
#endif
5216
        cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5217
                                      "cond");
5218
        cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5219
                                        "cc_src");
5220
        cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5221
                                         offsetof(CPUState, cc_src2),
5222
                                         "cc_src2");
5223
        cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5224
                                        "cc_dst");
5225
        cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5226
                                           "cc_op");
5227
        cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5228
                                         "psr");
5229
        cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5230
                                     "fsr");
5231
        cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5232
                                    "pc");
5233
        cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5234
                                     "npc");
5235
        cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5236
#ifndef CONFIG_USER_ONLY
5237
        cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5238
                                     "tbr");
5239
#endif
5240
        for (i = 1; i < 8; i++)
5241
            cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5242
                                              offsetof(CPUState, gregs[i]),
5243
                                              gregnames[i]);
5244
        for (i = 0; i < TARGET_FPREGS; i++)
5245
            cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5246
                                                offsetof(CPUState, fpr[i]),
5247
                                                fregnames[i]);
5248

    
5249
        /* register helpers */
5250

    
5251
#define GEN_HELPER 2
5252
#include "helper.h"
5253
    }
5254
}
5255

    
5256
void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5257
{
5258
    target_ulong npc;
5259
    env->pc = gen_opc_pc[pc_pos];
5260
    npc = gen_opc_npc[pc_pos];
5261
    if (npc == 1) {
5262
        /* dynamic NPC: already stored */
5263
    } else if (npc == 2) {
5264
        /* jump PC: use 'cond' and the jump targets of the translation */
5265
        if (env->cond) {
5266
            env->npc = gen_opc_jump_pc[0];
5267
        } else {
5268
            env->npc = gen_opc_jump_pc[1];
5269
        }
5270
    } else {
5271
        env->npc = npc;
5272
    }
5273

    
5274
    /* flush pending conditional evaluations before exposing cpu state */
5275
    if (CC_OP != CC_OP_FLAGS) {
5276
        helper_compute_psr(env);
5277
    }
5278
}