Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ b7d69dc2

History | View | Annotate | Download (192.2 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20

    
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26

    
27
#include "cpu.h"
28
#include "disas.h"
29
#include "helper.h"
30
#include "tcg-op.h"
31

    
32
#define GEN_HELPER 1
33
#include "helper.h"
34

    
35
#define DEBUG_DISAS
36

    
37
#define DYNAMIC_PC  1 /* dynamic pc value */
38
#define JUMP_PC     2 /* dynamic pc value which takes only two values
39
                         according to jump_pc[T2] */
40

    
41
/* global register indexes */
42
static TCGv_ptr cpu_env, cpu_regwptr;
43
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44
static TCGv_i32 cpu_cc_op;
45
static TCGv_i32 cpu_psr;
46
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47
static TCGv cpu_y;
48
#ifndef CONFIG_USER_ONLY
49
static TCGv cpu_tbr;
50
#endif
51
static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52
#ifdef TARGET_SPARC64
53
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54
static TCGv cpu_gsr;
55
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57
static TCGv_i32 cpu_softint;
58
#else
59
static TCGv cpu_wim;
60
#endif
61
/* local register indexes (only used inside old micro ops) */
62
static TCGv cpu_tmp0;
63
static TCGv_i32 cpu_tmp32;
64
static TCGv_i64 cpu_tmp64;
65
/* Floating point registers */
66
static TCGv_i32 cpu_fpr[TARGET_FPREGS];
67

    
68
static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69
static target_ulong gen_opc_jump_pc[2];
70

    
71
#include "gen-icount.h"
72

    
73
typedef struct DisasContext {
74
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
75
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77
    int is_br;
78
    int mem_idx;
79
    int fpu_enabled;
80
    int address_mask_32bit;
81
    int singlestep;
82
    uint32_t cc_op;  /* current CC operation */
83
    struct TranslationBlock *tb;
84
    sparc_def_t *def;
85
} DisasContext;
86

    
87
// This function uses non-native bit order
88
#define GET_FIELD(X, FROM, TO)                                  \
89
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
90

    
91
// This function uses the order in the manuals, i.e. bit 0 is 2^0
92
#define GET_FIELD_SP(X, FROM, TO)               \
93
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
94

    
95
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
97

    
98
#ifdef TARGET_SPARC64
99
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101
#else
102
#define DFPREG(r) (r & 0x1e)
103
#define QFPREG(r) (r & 0x1c)
104
#endif
105

    
106
#define UA2005_HTRAP_MASK 0xff
107
#define V8_TRAP_MASK 0x7f
108

    
109
static int sign_extend(int x, int len)
110
{
111
    len = 32 - len;
112
    return (x << len) >> len;
113
}
114

    
115
#define IS_IMM (insn & (1<<13))
116

    
117
/* floating point registers moves */
118
static void gen_op_load_fpr_DT0(unsigned int src)
119
{
120
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
121
                   offsetof(CPU_DoubleU, l.upper));
122
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
123
                   offsetof(CPU_DoubleU, l.lower));
124
}
125

    
126
static void gen_op_load_fpr_DT1(unsigned int src)
127
{
128
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
129
                   offsetof(CPU_DoubleU, l.upper));
130
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
131
                   offsetof(CPU_DoubleU, l.lower));
132
}
133

    
134
static void gen_op_store_DT0_fpr(unsigned int dst)
135
{
136
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
137
                   offsetof(CPU_DoubleU, l.upper));
138
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
139
                   offsetof(CPU_DoubleU, l.lower));
140
}
141

    
142
static void gen_op_load_fpr_QT0(unsigned int src)
143
{
144
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
145
                   offsetof(CPU_QuadU, l.upmost));
146
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
147
                   offsetof(CPU_QuadU, l.upper));
148
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
149
                   offsetof(CPU_QuadU, l.lower));
150
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
151
                   offsetof(CPU_QuadU, l.lowest));
152
}
153

    
154
static void gen_op_load_fpr_QT1(unsigned int src)
155
{
156
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
157
                   offsetof(CPU_QuadU, l.upmost));
158
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
159
                   offsetof(CPU_QuadU, l.upper));
160
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
161
                   offsetof(CPU_QuadU, l.lower));
162
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
163
                   offsetof(CPU_QuadU, l.lowest));
164
}
165

    
166
static void gen_op_store_QT0_fpr(unsigned int dst)
167
{
168
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
169
                   offsetof(CPU_QuadU, l.upmost));
170
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
171
                   offsetof(CPU_QuadU, l.upper));
172
    tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
173
                   offsetof(CPU_QuadU, l.lower));
174
    tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
175
                   offsetof(CPU_QuadU, l.lowest));
176
}
177

    
178
/* moves */
179
#ifdef CONFIG_USER_ONLY
180
#define supervisor(dc) 0
181
#ifdef TARGET_SPARC64
182
#define hypervisor(dc) 0
183
#endif
184
#else
185
#define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
186
#ifdef TARGET_SPARC64
187
#define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
188
#else
189
#endif
190
#endif
191

    
192
#ifdef TARGET_SPARC64
193
#ifndef TARGET_ABI32
194
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
195
#else
196
#define AM_CHECK(dc) (1)
197
#endif
198
#endif
199

    
200
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
201
{
202
#ifdef TARGET_SPARC64
203
    if (AM_CHECK(dc))
204
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205
#endif
206
}
207

    
208
static inline void gen_movl_reg_TN(int reg, TCGv tn)
209
{
210
    if (reg == 0)
211
        tcg_gen_movi_tl(tn, 0);
212
    else if (reg < 8)
213
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
214
    else {
215
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216
    }
217
}
218

    
219
static inline void gen_movl_TN_reg(int reg, TCGv tn)
220
{
221
    if (reg == 0)
222
        return;
223
    else if (reg < 8)
224
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
225
    else {
226
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227
    }
228
}
229

    
230
static inline void gen_goto_tb(DisasContext *s, int tb_num,
231
                               target_ulong pc, target_ulong npc)
232
{
233
    TranslationBlock *tb;
234

    
235
    tb = s->tb;
236
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
237
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238
        !s->singlestep)  {
239
        /* jump to same page: we can use a direct jump */
240
        tcg_gen_goto_tb(tb_num);
241
        tcg_gen_movi_tl(cpu_pc, pc);
242
        tcg_gen_movi_tl(cpu_npc, npc);
243
        tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
244
    } else {
245
        /* jump to another page: currently not optimized */
246
        tcg_gen_movi_tl(cpu_pc, pc);
247
        tcg_gen_movi_tl(cpu_npc, npc);
248
        tcg_gen_exit_tb(0);
249
    }
250
}
251

    
252
// XXX suboptimal
253
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
254
{
255
    tcg_gen_extu_i32_tl(reg, src);
256
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
257
    tcg_gen_andi_tl(reg, reg, 0x1);
258
}
259

    
260
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
261
{
262
    tcg_gen_extu_i32_tl(reg, src);
263
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
264
    tcg_gen_andi_tl(reg, reg, 0x1);
265
}
266

    
267
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
268
{
269
    tcg_gen_extu_i32_tl(reg, src);
270
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
271
    tcg_gen_andi_tl(reg, reg, 0x1);
272
}
273

    
274
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
275
{
276
    tcg_gen_extu_i32_tl(reg, src);
277
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
278
    tcg_gen_andi_tl(reg, reg, 0x1);
279
}
280

    
281
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
282
{
283
    TCGv r_temp;
284
    TCGv_i32 r_const;
285
    int l1;
286

    
287
    l1 = gen_new_label();
288

    
289
    r_temp = tcg_temp_new();
290
    tcg_gen_xor_tl(r_temp, src1, src2);
291
    tcg_gen_not_tl(r_temp, r_temp);
292
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
293
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
294
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
295
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
296
    r_const = tcg_const_i32(TT_TOVF);
297
    gen_helper_raise_exception(r_const);
298
    tcg_temp_free_i32(r_const);
299
    gen_set_label(l1);
300
    tcg_temp_free(r_temp);
301
}
302

    
303
static inline void gen_tag_tv(TCGv src1, TCGv src2)
304
{
305
    int l1;
306
    TCGv_i32 r_const;
307

    
308
    l1 = gen_new_label();
309
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
310
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
311
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
312
    r_const = tcg_const_i32(TT_TOVF);
313
    gen_helper_raise_exception(r_const);
314
    tcg_temp_free_i32(r_const);
315
    gen_set_label(l1);
316
}
317

    
318
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
319
{
320
    tcg_gen_mov_tl(cpu_cc_src, src1);
321
    tcg_gen_movi_tl(cpu_cc_src2, src2);
322
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
323
    tcg_gen_mov_tl(dst, cpu_cc_dst);
324
}
325

    
326
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
327
{
328
    tcg_gen_mov_tl(cpu_cc_src, src1);
329
    tcg_gen_mov_tl(cpu_cc_src2, src2);
330
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
331
    tcg_gen_mov_tl(dst, cpu_cc_dst);
332
}
333

    
334
static TCGv_i32 gen_add32_carry32(void)
335
{
336
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
337

    
338
    /* Carry is computed from a previous add: (dst < src)  */
339
#if TARGET_LONG_BITS == 64
340
    cc_src1_32 = tcg_temp_new_i32();
341
    cc_src2_32 = tcg_temp_new_i32();
342
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
343
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
344
#else
345
    cc_src1_32 = cpu_cc_dst;
346
    cc_src2_32 = cpu_cc_src;
347
#endif
348

    
349
    carry_32 = tcg_temp_new_i32();
350
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
351

    
352
#if TARGET_LONG_BITS == 64
353
    tcg_temp_free_i32(cc_src1_32);
354
    tcg_temp_free_i32(cc_src2_32);
355
#endif
356

    
357
    return carry_32;
358
}
359

    
360
static TCGv_i32 gen_sub32_carry32(void)
361
{
362
    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
363

    
364
    /* Carry is computed from a previous borrow: (src1 < src2)  */
365
#if TARGET_LONG_BITS == 64
366
    cc_src1_32 = tcg_temp_new_i32();
367
    cc_src2_32 = tcg_temp_new_i32();
368
    tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
369
    tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
370
#else
371
    cc_src1_32 = cpu_cc_src;
372
    cc_src2_32 = cpu_cc_src2;
373
#endif
374

    
375
    carry_32 = tcg_temp_new_i32();
376
    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
377

    
378
#if TARGET_LONG_BITS == 64
379
    tcg_temp_free_i32(cc_src1_32);
380
    tcg_temp_free_i32(cc_src2_32);
381
#endif
382

    
383
    return carry_32;
384
}
385

    
386
static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
387
                            TCGv src2, int update_cc)
388
{
389
    TCGv_i32 carry_32;
390
    TCGv carry;
391

    
392
    switch (dc->cc_op) {
393
    case CC_OP_DIV:
394
    case CC_OP_LOGIC:
395
        /* Carry is known to be zero.  Fall back to plain ADD.  */
396
        if (update_cc) {
397
            gen_op_add_cc(dst, src1, src2);
398
        } else {
399
            tcg_gen_add_tl(dst, src1, src2);
400
        }
401
        return;
402

    
403
    case CC_OP_ADD:
404
    case CC_OP_TADD:
405
    case CC_OP_TADDTV:
406
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
407
        {
408
            /* For 32-bit hosts, we can re-use the host's hardware carry
409
               generation by using an ADD2 opcode.  We discard the low
410
               part of the output.  Ideally we'd combine this operation
411
               with the add that generated the carry in the first place.  */
412
            TCGv dst_low = tcg_temp_new();
413
            tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
414
                            cpu_cc_src, src1, cpu_cc_src2, src2);
415
            tcg_temp_free(dst_low);
416
            goto add_done;
417
        }
418
#endif
419
        carry_32 = gen_add32_carry32();
420
        break;
421

    
422
    case CC_OP_SUB:
423
    case CC_OP_TSUB:
424
    case CC_OP_TSUBTV:
425
        carry_32 = gen_sub32_carry32();
426
        break;
427

    
428
    default:
429
        /* We need external help to produce the carry.  */
430
        carry_32 = tcg_temp_new_i32();
431
        gen_helper_compute_C_icc(carry_32);
432
        break;
433
    }
434

    
435
#if TARGET_LONG_BITS == 64
436
    carry = tcg_temp_new();
437
    tcg_gen_extu_i32_i64(carry, carry_32);
438
#else
439
    carry = carry_32;
440
#endif
441

    
442
    tcg_gen_add_tl(dst, src1, src2);
443
    tcg_gen_add_tl(dst, dst, carry);
444

    
445
    tcg_temp_free_i32(carry_32);
446
#if TARGET_LONG_BITS == 64
447
    tcg_temp_free(carry);
448
#endif
449

    
450
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
451
 add_done:
452
#endif
453
    if (update_cc) {
454
        tcg_gen_mov_tl(cpu_cc_src, src1);
455
        tcg_gen_mov_tl(cpu_cc_src2, src2);
456
        tcg_gen_mov_tl(cpu_cc_dst, dst);
457
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
458
        dc->cc_op = CC_OP_ADDX;
459
    }
460
}
461

    
462
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
463
{
464
    tcg_gen_mov_tl(cpu_cc_src, src1);
465
    tcg_gen_mov_tl(cpu_cc_src2, src2);
466
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
467
    tcg_gen_mov_tl(dst, cpu_cc_dst);
468
}
469

    
470
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
471
{
472
    tcg_gen_mov_tl(cpu_cc_src, src1);
473
    tcg_gen_mov_tl(cpu_cc_src2, src2);
474
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
475
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
476
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477
    tcg_gen_mov_tl(dst, cpu_cc_dst);
478
}
479

    
480
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
481
{
482
    TCGv r_temp;
483
    TCGv_i32 r_const;
484
    int l1;
485

    
486
    l1 = gen_new_label();
487

    
488
    r_temp = tcg_temp_new();
489
    tcg_gen_xor_tl(r_temp, src1, src2);
490
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
491
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
492
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
493
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
494
    r_const = tcg_const_i32(TT_TOVF);
495
    gen_helper_raise_exception(r_const);
496
    tcg_temp_free_i32(r_const);
497
    gen_set_label(l1);
498
    tcg_temp_free(r_temp);
499
}
500

    
501
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
502
{
503
    tcg_gen_mov_tl(cpu_cc_src, src1);
504
    tcg_gen_movi_tl(cpu_cc_src2, src2);
505
    if (src2 == 0) {
506
        tcg_gen_mov_tl(cpu_cc_dst, src1);
507
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
508
        dc->cc_op = CC_OP_LOGIC;
509
    } else {
510
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
511
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
512
        dc->cc_op = CC_OP_SUB;
513
    }
514
    tcg_gen_mov_tl(dst, cpu_cc_dst);
515
}
516

    
517
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
518
{
519
    tcg_gen_mov_tl(cpu_cc_src, src1);
520
    tcg_gen_mov_tl(cpu_cc_src2, src2);
521
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
522
    tcg_gen_mov_tl(dst, cpu_cc_dst);
523
}
524

    
525
static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
526
                            TCGv src2, int update_cc)
527
{
528
    TCGv_i32 carry_32;
529
    TCGv carry;
530

    
531
    switch (dc->cc_op) {
532
    case CC_OP_DIV:
533
    case CC_OP_LOGIC:
534
        /* Carry is known to be zero.  Fall back to plain SUB.  */
535
        if (update_cc) {
536
            gen_op_sub_cc(dst, src1, src2);
537
        } else {
538
            tcg_gen_sub_tl(dst, src1, src2);
539
        }
540
        return;
541

    
542
    case CC_OP_ADD:
543
    case CC_OP_TADD:
544
    case CC_OP_TADDTV:
545
        carry_32 = gen_add32_carry32();
546
        break;
547

    
548
    case CC_OP_SUB:
549
    case CC_OP_TSUB:
550
    case CC_OP_TSUBTV:
551
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
552
        {
553
            /* For 32-bit hosts, we can re-use the host's hardware carry
554
               generation by using a SUB2 opcode.  We discard the low
555
               part of the output.  Ideally we'd combine this operation
556
               with the add that generated the carry in the first place.  */
557
            TCGv dst_low = tcg_temp_new();
558
            tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
559
                            cpu_cc_src, src1, cpu_cc_src2, src2);
560
            tcg_temp_free(dst_low);
561
            goto sub_done;
562
        }
563
#endif
564
        carry_32 = gen_sub32_carry32();
565
        break;
566

    
567
    default:
568
        /* We need external help to produce the carry.  */
569
        carry_32 = tcg_temp_new_i32();
570
        gen_helper_compute_C_icc(carry_32);
571
        break;
572
    }
573

    
574
#if TARGET_LONG_BITS == 64
575
    carry = tcg_temp_new();
576
    tcg_gen_extu_i32_i64(carry, carry_32);
577
#else
578
    carry = carry_32;
579
#endif
580

    
581
    tcg_gen_sub_tl(dst, src1, src2);
582
    tcg_gen_sub_tl(dst, dst, carry);
583

    
584
    tcg_temp_free_i32(carry_32);
585
#if TARGET_LONG_BITS == 64
586
    tcg_temp_free(carry);
587
#endif
588

    
589
#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
590
 sub_done:
591
#endif
592
    if (update_cc) {
593
        tcg_gen_mov_tl(cpu_cc_src, src1);
594
        tcg_gen_mov_tl(cpu_cc_src2, src2);
595
        tcg_gen_mov_tl(cpu_cc_dst, dst);
596
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
597
        dc->cc_op = CC_OP_SUBX;
598
    }
599
}
600

    
601
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
602
{
603
    tcg_gen_mov_tl(cpu_cc_src, src1);
604
    tcg_gen_mov_tl(cpu_cc_src2, src2);
605
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
606
    tcg_gen_mov_tl(dst, cpu_cc_dst);
607
}
608

    
609
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
610
{
611
    tcg_gen_mov_tl(cpu_cc_src, src1);
612
    tcg_gen_mov_tl(cpu_cc_src2, src2);
613
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
614
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
615
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616
    tcg_gen_mov_tl(dst, cpu_cc_dst);
617
}
618

    
619
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
620
{
621
    TCGv r_temp;
622
    int l1;
623

    
624
    l1 = gen_new_label();
625
    r_temp = tcg_temp_new();
626

    
627
    /* old op:
628
    if (!(env->y & 1))
629
        T1 = 0;
630
    */
631
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
632
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
633
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
634
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
635
    tcg_gen_movi_tl(cpu_cc_src2, 0);
636
    gen_set_label(l1);
637

    
638
    // b2 = T0 & 1;
639
    // env->y = (b2 << 31) | (env->y >> 1);
640
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
641
    tcg_gen_shli_tl(r_temp, r_temp, 31);
642
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
643
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
644
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
645
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
646

    
647
    // b1 = N ^ V;
648
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
649
    gen_mov_reg_V(r_temp, cpu_psr);
650
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
651
    tcg_temp_free(r_temp);
652

    
653
    // T0 = (b1 << 31) | (T0 >> 1);
654
    // src1 = T0;
655
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
656
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
657
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
658

    
659
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
660

    
661
    tcg_gen_mov_tl(dst, cpu_cc_dst);
662
}
663

    
664
static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
665
{
666
    TCGv_i32 r_src1, r_src2;
667
    TCGv_i64 r_temp, r_temp2;
668

    
669
    r_src1 = tcg_temp_new_i32();
670
    r_src2 = tcg_temp_new_i32();
671

    
672
    tcg_gen_trunc_tl_i32(r_src1, src1);
673
    tcg_gen_trunc_tl_i32(r_src2, src2);
674

    
675
    r_temp = tcg_temp_new_i64();
676
    r_temp2 = tcg_temp_new_i64();
677

    
678
    if (sign_ext) {
679
        tcg_gen_ext_i32_i64(r_temp, r_src2);
680
        tcg_gen_ext_i32_i64(r_temp2, r_src1);
681
    } else {
682
        tcg_gen_extu_i32_i64(r_temp, r_src2);
683
        tcg_gen_extu_i32_i64(r_temp2, r_src1);
684
    }
685

    
686
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
687

    
688
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
689
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
690
    tcg_temp_free_i64(r_temp);
691
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
692

    
693
    tcg_gen_trunc_i64_tl(dst, r_temp2);
694

    
695
    tcg_temp_free_i64(r_temp2);
696

    
697
    tcg_temp_free_i32(r_src1);
698
    tcg_temp_free_i32(r_src2);
699
}
700

    
701
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
702
{
703
    /* zero-extend truncated operands before multiplication */
704
    gen_op_multiply(dst, src1, src2, 0);
705
}
706

    
707
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
708
{
709
    /* sign-extend truncated operands before multiplication */
710
    gen_op_multiply(dst, src1, src2, 1);
711
}
712

    
713
#ifdef TARGET_SPARC64
714
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
715
{
716
    TCGv_i32 r_const;
717
    int l1;
718

    
719
    l1 = gen_new_label();
720
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
721
    r_const = tcg_const_i32(TT_DIV_ZERO);
722
    gen_helper_raise_exception(r_const);
723
    tcg_temp_free_i32(r_const);
724
    gen_set_label(l1);
725
}
726

    
727
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
728
{
729
    int l1, l2;
730
    TCGv r_temp1, r_temp2;
731

    
732
    l1 = gen_new_label();
733
    l2 = gen_new_label();
734
    r_temp1 = tcg_temp_local_new();
735
    r_temp2 = tcg_temp_local_new();
736
    tcg_gen_mov_tl(r_temp1, src1);
737
    tcg_gen_mov_tl(r_temp2, src2);
738
    gen_trap_ifdivzero_tl(r_temp2);
739
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
740
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
741
    tcg_gen_movi_i64(dst, INT64_MIN);
742
    tcg_gen_br(l2);
743
    gen_set_label(l1);
744
    tcg_gen_div_i64(dst, r_temp1, r_temp2);
745
    gen_set_label(l2);
746
    tcg_temp_free(r_temp1);
747
    tcg_temp_free(r_temp2);
748
}
749
#endif
750

    
751
// 1
752
static inline void gen_op_eval_ba(TCGv dst)
753
{
754
    tcg_gen_movi_tl(dst, 1);
755
}
756

    
757
// Z
758
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
759
{
760
    gen_mov_reg_Z(dst, src);
761
}
762

    
763
// Z | (N ^ V)
764
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
765
{
766
    gen_mov_reg_N(cpu_tmp0, src);
767
    gen_mov_reg_V(dst, src);
768
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
769
    gen_mov_reg_Z(cpu_tmp0, src);
770
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
771
}
772

    
773
// N ^ V
774
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
775
{
776
    gen_mov_reg_V(cpu_tmp0, src);
777
    gen_mov_reg_N(dst, src);
778
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
779
}
780

    
781
// C | Z
782
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
783
{
784
    gen_mov_reg_Z(cpu_tmp0, src);
785
    gen_mov_reg_C(dst, src);
786
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
787
}
788

    
789
// C
790
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
791
{
792
    gen_mov_reg_C(dst, src);
793
}
794

    
795
// V
796
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
797
{
798
    gen_mov_reg_V(dst, src);
799
}
800

    
801
// 0
802
static inline void gen_op_eval_bn(TCGv dst)
803
{
804
    tcg_gen_movi_tl(dst, 0);
805
}
806

    
807
// N
808
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
809
{
810
    gen_mov_reg_N(dst, src);
811
}
812

    
813
// !Z
814
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
815
{
816
    gen_mov_reg_Z(dst, src);
817
    tcg_gen_xori_tl(dst, dst, 0x1);
818
}
819

    
820
// !(Z | (N ^ V))
821
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
822
{
823
    gen_mov_reg_N(cpu_tmp0, src);
824
    gen_mov_reg_V(dst, src);
825
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
826
    gen_mov_reg_Z(cpu_tmp0, src);
827
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
828
    tcg_gen_xori_tl(dst, dst, 0x1);
829
}
830

    
831
// !(N ^ V)
832
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
833
{
834
    gen_mov_reg_V(cpu_tmp0, src);
835
    gen_mov_reg_N(dst, src);
836
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
837
    tcg_gen_xori_tl(dst, dst, 0x1);
838
}
839

    
840
// !(C | Z)
841
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
842
{
843
    gen_mov_reg_Z(cpu_tmp0, src);
844
    gen_mov_reg_C(dst, src);
845
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
846
    tcg_gen_xori_tl(dst, dst, 0x1);
847
}
848

    
849
// !C
850
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
851
{
852
    gen_mov_reg_C(dst, src);
853
    tcg_gen_xori_tl(dst, dst, 0x1);
854
}
855

    
856
// !N
857
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
858
{
859
    gen_mov_reg_N(dst, src);
860
    tcg_gen_xori_tl(dst, dst, 0x1);
861
}
862

    
863
// !V
864
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
865
{
866
    gen_mov_reg_V(dst, src);
867
    tcg_gen_xori_tl(dst, dst, 0x1);
868
}
869

    
870
/*
871
  FPSR bit field FCC1 | FCC0:
872
   0 =
873
   1 <
874
   2 >
875
   3 unordered
876
*/
877
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
878
                                    unsigned int fcc_offset)
879
{
880
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
881
    tcg_gen_andi_tl(reg, reg, 0x1);
882
}
883

    
884
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
885
                                    unsigned int fcc_offset)
886
{
887
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
888
    tcg_gen_andi_tl(reg, reg, 0x1);
889
}
890

    
891
// !0: FCC0 | FCC1
892
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
893
                                    unsigned int fcc_offset)
894
{
895
    gen_mov_reg_FCC0(dst, src, fcc_offset);
896
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
897
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
898
}
899

    
900
// 1 or 2: FCC0 ^ FCC1
901
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
902
                                    unsigned int fcc_offset)
903
{
904
    gen_mov_reg_FCC0(dst, src, fcc_offset);
905
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
906
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
907
}
908

    
909
// 1 or 3: FCC0
910
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
911
                                    unsigned int fcc_offset)
912
{
913
    gen_mov_reg_FCC0(dst, src, fcc_offset);
914
}
915

    
916
// 1: FCC0 & !FCC1
917
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
918
                                    unsigned int fcc_offset)
919
{
920
    gen_mov_reg_FCC0(dst, src, fcc_offset);
921
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
922
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
923
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
924
}
925

    
926
// 2 or 3: FCC1
927
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
928
                                    unsigned int fcc_offset)
929
{
930
    gen_mov_reg_FCC1(dst, src, fcc_offset);
931
}
932

    
933
// 2: !FCC0 & FCC1
934
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
935
                                    unsigned int fcc_offset)
936
{
937
    gen_mov_reg_FCC0(dst, src, fcc_offset);
938
    tcg_gen_xori_tl(dst, dst, 0x1);
939
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
940
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
941
}
942

    
943
// 3: FCC0 & FCC1
944
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
945
                                    unsigned int fcc_offset)
946
{
947
    gen_mov_reg_FCC0(dst, src, fcc_offset);
948
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
949
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
950
}
951

    
952
// 0: !(FCC0 | FCC1)
953
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
954
                                    unsigned int fcc_offset)
955
{
956
    gen_mov_reg_FCC0(dst, src, fcc_offset);
957
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
958
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
959
    tcg_gen_xori_tl(dst, dst, 0x1);
960
}
961

    
962
// 0 or 3: !(FCC0 ^ FCC1)
963
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
964
                                    unsigned int fcc_offset)
965
{
966
    gen_mov_reg_FCC0(dst, src, fcc_offset);
967
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
968
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
969
    tcg_gen_xori_tl(dst, dst, 0x1);
970
}
971

    
972
// 0 or 2: !FCC0
973
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
974
                                    unsigned int fcc_offset)
975
{
976
    gen_mov_reg_FCC0(dst, src, fcc_offset);
977
    tcg_gen_xori_tl(dst, dst, 0x1);
978
}
979

    
980
// !1: !(FCC0 & !FCC1)
981
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
982
                                    unsigned int fcc_offset)
983
{
984
    gen_mov_reg_FCC0(dst, src, fcc_offset);
985
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
986
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
987
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
988
    tcg_gen_xori_tl(dst, dst, 0x1);
989
}
990

    
991
// 0 or 1: !FCC1
992
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
993
                                    unsigned int fcc_offset)
994
{
995
    gen_mov_reg_FCC1(dst, src, fcc_offset);
996
    tcg_gen_xori_tl(dst, dst, 0x1);
997
}
998

    
999
// !2: !(!FCC0 & FCC1)
1000
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1001
                                    unsigned int fcc_offset)
1002
{
1003
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1004
    tcg_gen_xori_tl(dst, dst, 0x1);
1005
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1006
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1007
    tcg_gen_xori_tl(dst, dst, 0x1);
1008
}
1009

    
1010
// !3: !(FCC0 & FCC1)
1011
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1012
                                    unsigned int fcc_offset)
1013
{
1014
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1015
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1016
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1017
    tcg_gen_xori_tl(dst, dst, 0x1);
1018
}
1019

    
1020
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1021
                               target_ulong pc2, TCGv r_cond)
1022
{
1023
    int l1;
1024

    
1025
    l1 = gen_new_label();
1026

    
1027
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1028

    
1029
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
1030

    
1031
    gen_set_label(l1);
1032
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
1033
}
1034

    
1035
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1036
                                target_ulong pc2, TCGv r_cond)
1037
{
1038
    int l1;
1039

    
1040
    l1 = gen_new_label();
1041

    
1042
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1043

    
1044
    gen_goto_tb(dc, 0, pc2, pc1);
1045

    
1046
    gen_set_label(l1);
1047
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1048
}
1049

    
1050
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1051
                                      TCGv r_cond)
1052
{
1053
    int l1, l2;
1054

    
1055
    l1 = gen_new_label();
1056
    l2 = gen_new_label();
1057

    
1058
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1059

    
1060
    tcg_gen_movi_tl(cpu_npc, npc1);
1061
    tcg_gen_br(l2);
1062

    
1063
    gen_set_label(l1);
1064
    tcg_gen_movi_tl(cpu_npc, npc2);
1065
    gen_set_label(l2);
1066
}
1067

    
1068
/* call this function before using the condition register as it may
1069
   have been set for a jump */
1070
static inline void flush_cond(DisasContext *dc, TCGv cond)
1071
{
1072
    if (dc->npc == JUMP_PC) {
1073
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1074
        dc->npc = DYNAMIC_PC;
1075
    }
1076
}
1077

    
1078
static inline void save_npc(DisasContext *dc, TCGv cond)
1079
{
1080
    if (dc->npc == JUMP_PC) {
1081
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1082
        dc->npc = DYNAMIC_PC;
1083
    } else if (dc->npc != DYNAMIC_PC) {
1084
        tcg_gen_movi_tl(cpu_npc, dc->npc);
1085
    }
1086
}
1087

    
1088
static inline void save_state(DisasContext *dc, TCGv cond)
1089
{
1090
    tcg_gen_movi_tl(cpu_pc, dc->pc);
1091
    /* flush pending conditional evaluations before exposing cpu state */
1092
    if (dc->cc_op != CC_OP_FLAGS) {
1093
        dc->cc_op = CC_OP_FLAGS;
1094
        gen_helper_compute_psr();
1095
    }
1096
    save_npc(dc, cond);
1097
}
1098

    
1099
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1100
{
1101
    if (dc->npc == JUMP_PC) {
1102
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1103
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1104
        dc->pc = DYNAMIC_PC;
1105
    } else if (dc->npc == DYNAMIC_PC) {
1106
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1107
        dc->pc = DYNAMIC_PC;
1108
    } else {
1109
        dc->pc = dc->npc;
1110
    }
1111
}
1112

    
1113
static inline void gen_op_next_insn(void)
1114
{
1115
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1116
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1117
}
1118

    
1119
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1120
                            DisasContext *dc)
1121
{
1122
    TCGv_i32 r_src;
1123

    
1124
#ifdef TARGET_SPARC64
1125
    if (cc)
1126
        r_src = cpu_xcc;
1127
    else
1128
        r_src = cpu_psr;
1129
#else
1130
    r_src = cpu_psr;
1131
#endif
1132
    switch (dc->cc_op) {
1133
    case CC_OP_FLAGS:
1134
        break;
1135
    default:
1136
        gen_helper_compute_psr();
1137
        dc->cc_op = CC_OP_FLAGS;
1138
        break;
1139
    }
1140
    switch (cond) {
1141
    case 0x0:
1142
        gen_op_eval_bn(r_dst);
1143
        break;
1144
    case 0x1:
1145
        gen_op_eval_be(r_dst, r_src);
1146
        break;
1147
    case 0x2:
1148
        gen_op_eval_ble(r_dst, r_src);
1149
        break;
1150
    case 0x3:
1151
        gen_op_eval_bl(r_dst, r_src);
1152
        break;
1153
    case 0x4:
1154
        gen_op_eval_bleu(r_dst, r_src);
1155
        break;
1156
    case 0x5:
1157
        gen_op_eval_bcs(r_dst, r_src);
1158
        break;
1159
    case 0x6:
1160
        gen_op_eval_bneg(r_dst, r_src);
1161
        break;
1162
    case 0x7:
1163
        gen_op_eval_bvs(r_dst, r_src);
1164
        break;
1165
    case 0x8:
1166
        gen_op_eval_ba(r_dst);
1167
        break;
1168
    case 0x9:
1169
        gen_op_eval_bne(r_dst, r_src);
1170
        break;
1171
    case 0xa:
1172
        gen_op_eval_bg(r_dst, r_src);
1173
        break;
1174
    case 0xb:
1175
        gen_op_eval_bge(r_dst, r_src);
1176
        break;
1177
    case 0xc:
1178
        gen_op_eval_bgu(r_dst, r_src);
1179
        break;
1180
    case 0xd:
1181
        gen_op_eval_bcc(r_dst, r_src);
1182
        break;
1183
    case 0xe:
1184
        gen_op_eval_bpos(r_dst, r_src);
1185
        break;
1186
    case 0xf:
1187
        gen_op_eval_bvc(r_dst, r_src);
1188
        break;
1189
    }
1190
}
1191

    
1192
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1193
{
1194
    unsigned int offset;
1195

    
1196
    switch (cc) {
1197
    default:
1198
    case 0x0:
1199
        offset = 0;
1200
        break;
1201
    case 0x1:
1202
        offset = 32 - 10;
1203
        break;
1204
    case 0x2:
1205
        offset = 34 - 10;
1206
        break;
1207
    case 0x3:
1208
        offset = 36 - 10;
1209
        break;
1210
    }
1211

    
1212
    switch (cond) {
1213
    case 0x0:
1214
        gen_op_eval_bn(r_dst);
1215
        break;
1216
    case 0x1:
1217
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1218
        break;
1219
    case 0x2:
1220
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1221
        break;
1222
    case 0x3:
1223
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1224
        break;
1225
    case 0x4:
1226
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1227
        break;
1228
    case 0x5:
1229
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1230
        break;
1231
    case 0x6:
1232
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1233
        break;
1234
    case 0x7:
1235
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1236
        break;
1237
    case 0x8:
1238
        gen_op_eval_ba(r_dst);
1239
        break;
1240
    case 0x9:
1241
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1242
        break;
1243
    case 0xa:
1244
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1245
        break;
1246
    case 0xb:
1247
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1248
        break;
1249
    case 0xc:
1250
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1251
        break;
1252
    case 0xd:
1253
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1254
        break;
1255
    case 0xe:
1256
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1257
        break;
1258
    case 0xf:
1259
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1260
        break;
1261
    }
1262
}
1263

    
1264
#ifdef TARGET_SPARC64
1265
// Inverted logic
1266
static const int gen_tcg_cond_reg[8] = {
1267
    -1,
1268
    TCG_COND_NE,
1269
    TCG_COND_GT,
1270
    TCG_COND_GE,
1271
    -1,
1272
    TCG_COND_EQ,
1273
    TCG_COND_LE,
1274
    TCG_COND_LT,
1275
};
1276

    
1277
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1278
{
1279
    int l1;
1280

    
1281
    l1 = gen_new_label();
1282
    tcg_gen_movi_tl(r_dst, 0);
1283
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1284
    tcg_gen_movi_tl(r_dst, 1);
1285
    gen_set_label(l1);
1286
}
1287
#endif
1288

    
1289
/* XXX: potentially incorrect if dynamic npc */
1290
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1291
                      TCGv r_cond)
1292
{
1293
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1294
    target_ulong target = dc->pc + offset;
1295

    
1296
    if (cond == 0x0) {
1297
        /* unconditional not taken */
1298
        if (a) {
1299
            dc->pc = dc->npc + 4;
1300
            dc->npc = dc->pc + 4;
1301
        } else {
1302
            dc->pc = dc->npc;
1303
            dc->npc = dc->pc + 4;
1304
        }
1305
    } else if (cond == 0x8) {
1306
        /* unconditional taken */
1307
        if (a) {
1308
            dc->pc = target;
1309
            dc->npc = dc->pc + 4;
1310
        } else {
1311
            dc->pc = dc->npc;
1312
            dc->npc = target;
1313
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1314
        }
1315
    } else {
1316
        flush_cond(dc, r_cond);
1317
        gen_cond(r_cond, cc, cond, dc);
1318
        if (a) {
1319
            gen_branch_a(dc, target, dc->npc, r_cond);
1320
            dc->is_br = 1;
1321
        } else {
1322
            dc->pc = dc->npc;
1323
            dc->jump_pc[0] = target;
1324
            dc->jump_pc[1] = dc->npc + 4;
1325
            dc->npc = JUMP_PC;
1326
        }
1327
    }
1328
}
1329

    
1330
/* XXX: potentially incorrect if dynamic npc */
1331
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1332
                      TCGv r_cond)
1333
{
1334
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1335
    target_ulong target = dc->pc + offset;
1336

    
1337
    if (cond == 0x0) {
1338
        /* unconditional not taken */
1339
        if (a) {
1340
            dc->pc = dc->npc + 4;
1341
            dc->npc = dc->pc + 4;
1342
        } else {
1343
            dc->pc = dc->npc;
1344
            dc->npc = dc->pc + 4;
1345
        }
1346
    } else if (cond == 0x8) {
1347
        /* unconditional taken */
1348
        if (a) {
1349
            dc->pc = target;
1350
            dc->npc = dc->pc + 4;
1351
        } else {
1352
            dc->pc = dc->npc;
1353
            dc->npc = target;
1354
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1355
        }
1356
    } else {
1357
        flush_cond(dc, r_cond);
1358
        gen_fcond(r_cond, cc, cond);
1359
        if (a) {
1360
            gen_branch_a(dc, target, dc->npc, r_cond);
1361
            dc->is_br = 1;
1362
        } else {
1363
            dc->pc = dc->npc;
1364
            dc->jump_pc[0] = target;
1365
            dc->jump_pc[1] = dc->npc + 4;
1366
            dc->npc = JUMP_PC;
1367
        }
1368
    }
1369
}
1370

    
1371
#ifdef TARGET_SPARC64
1372
/* XXX: potentially incorrect if dynamic npc */
1373
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1374
                          TCGv r_cond, TCGv r_reg)
1375
{
1376
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1377
    target_ulong target = dc->pc + offset;
1378

    
1379
    flush_cond(dc, r_cond);
1380
    gen_cond_reg(r_cond, cond, r_reg);
1381
    if (a) {
1382
        gen_branch_a(dc, target, dc->npc, r_cond);
1383
        dc->is_br = 1;
1384
    } else {
1385
        dc->pc = dc->npc;
1386
        dc->jump_pc[0] = target;
1387
        dc->jump_pc[1] = dc->npc + 4;
1388
        dc->npc = JUMP_PC;
1389
    }
1390
}
1391

    
1392
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1393
{
1394
    switch (fccno) {
1395
    case 0:
1396
        gen_helper_fcmps(r_rs1, r_rs2);
1397
        break;
1398
    case 1:
1399
        gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1400
        break;
1401
    case 2:
1402
        gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1403
        break;
1404
    case 3:
1405
        gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1406
        break;
1407
    }
1408
}
1409

    
1410
static inline void gen_op_fcmpd(int fccno)
1411
{
1412
    switch (fccno) {
1413
    case 0:
1414
        gen_helper_fcmpd();
1415
        break;
1416
    case 1:
1417
        gen_helper_fcmpd_fcc1();
1418
        break;
1419
    case 2:
1420
        gen_helper_fcmpd_fcc2();
1421
        break;
1422
    case 3:
1423
        gen_helper_fcmpd_fcc3();
1424
        break;
1425
    }
1426
}
1427

    
1428
static inline void gen_op_fcmpq(int fccno)
1429
{
1430
    switch (fccno) {
1431
    case 0:
1432
        gen_helper_fcmpq();
1433
        break;
1434
    case 1:
1435
        gen_helper_fcmpq_fcc1();
1436
        break;
1437
    case 2:
1438
        gen_helper_fcmpq_fcc2();
1439
        break;
1440
    case 3:
1441
        gen_helper_fcmpq_fcc3();
1442
        break;
1443
    }
1444
}
1445

    
1446
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1447
{
1448
    switch (fccno) {
1449
    case 0:
1450
        gen_helper_fcmpes(r_rs1, r_rs2);
1451
        break;
1452
    case 1:
1453
        gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1454
        break;
1455
    case 2:
1456
        gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1457
        break;
1458
    case 3:
1459
        gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1460
        break;
1461
    }
1462
}
1463

    
1464
static inline void gen_op_fcmped(int fccno)
1465
{
1466
    switch (fccno) {
1467
    case 0:
1468
        gen_helper_fcmped();
1469
        break;
1470
    case 1:
1471
        gen_helper_fcmped_fcc1();
1472
        break;
1473
    case 2:
1474
        gen_helper_fcmped_fcc2();
1475
        break;
1476
    case 3:
1477
        gen_helper_fcmped_fcc3();
1478
        break;
1479
    }
1480
}
1481

    
1482
static inline void gen_op_fcmpeq(int fccno)
1483
{
1484
    switch (fccno) {
1485
    case 0:
1486
        gen_helper_fcmpeq();
1487
        break;
1488
    case 1:
1489
        gen_helper_fcmpeq_fcc1();
1490
        break;
1491
    case 2:
1492
        gen_helper_fcmpeq_fcc2();
1493
        break;
1494
    case 3:
1495
        gen_helper_fcmpeq_fcc3();
1496
        break;
1497
    }
1498
}
1499

    
1500
#else
1501

    
1502
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1503
{
1504
    gen_helper_fcmps(r_rs1, r_rs2);
1505
}
1506

    
1507
static inline void gen_op_fcmpd(int fccno)
1508
{
1509
    gen_helper_fcmpd();
1510
}
1511

    
1512
static inline void gen_op_fcmpq(int fccno)
1513
{
1514
    gen_helper_fcmpq();
1515
}
1516

    
1517
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1518
{
1519
    gen_helper_fcmpes(r_rs1, r_rs2);
1520
}
1521

    
1522
static inline void gen_op_fcmped(int fccno)
1523
{
1524
    gen_helper_fcmped();
1525
}
1526

    
1527
static inline void gen_op_fcmpeq(int fccno)
1528
{
1529
    gen_helper_fcmpeq();
1530
}
1531
#endif
1532

    
1533
static inline void gen_op_fpexception_im(int fsr_flags)
1534
{
1535
    TCGv_i32 r_const;
1536

    
1537
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1538
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1539
    r_const = tcg_const_i32(TT_FP_EXCP);
1540
    gen_helper_raise_exception(r_const);
1541
    tcg_temp_free_i32(r_const);
1542
}
1543

    
1544
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1545
{
1546
#if !defined(CONFIG_USER_ONLY)
1547
    if (!dc->fpu_enabled) {
1548
        TCGv_i32 r_const;
1549

    
1550
        save_state(dc, r_cond);
1551
        r_const = tcg_const_i32(TT_NFPU_INSN);
1552
        gen_helper_raise_exception(r_const);
1553
        tcg_temp_free_i32(r_const);
1554
        dc->is_br = 1;
1555
        return 1;
1556
    }
1557
#endif
1558
    return 0;
1559
}
1560

    
1561
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1562
{
1563
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1564
}
1565

    
1566
static inline void gen_clear_float_exceptions(void)
1567
{
1568
    gen_helper_clear_float_exceptions();
1569
}
1570

    
1571
/* asi moves */
1572
#ifdef TARGET_SPARC64
1573
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1574
{
1575
    int asi;
1576
    TCGv_i32 r_asi;
1577

    
1578
    if (IS_IMM) {
1579
        r_asi = tcg_temp_new_i32();
1580
        tcg_gen_mov_i32(r_asi, cpu_asi);
1581
    } else {
1582
        asi = GET_FIELD(insn, 19, 26);
1583
        r_asi = tcg_const_i32(asi);
1584
    }
1585
    return r_asi;
1586
}
1587

    
1588
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1589
                              int sign)
1590
{
1591
    TCGv_i32 r_asi, r_size, r_sign;
1592

    
1593
    r_asi = gen_get_asi(insn, addr);
1594
    r_size = tcg_const_i32(size);
1595
    r_sign = tcg_const_i32(sign);
1596
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1597
    tcg_temp_free_i32(r_sign);
1598
    tcg_temp_free_i32(r_size);
1599
    tcg_temp_free_i32(r_asi);
1600
}
1601

    
1602
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1603
{
1604
    TCGv_i32 r_asi, r_size;
1605

    
1606
    r_asi = gen_get_asi(insn, addr);
1607
    r_size = tcg_const_i32(size);
1608
    gen_helper_st_asi(addr, src, r_asi, r_size);
1609
    tcg_temp_free_i32(r_size);
1610
    tcg_temp_free_i32(r_asi);
1611
}
1612

    
1613
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1614
{
1615
    TCGv_i32 r_asi, r_size, r_rd;
1616

    
1617
    r_asi = gen_get_asi(insn, addr);
1618
    r_size = tcg_const_i32(size);
1619
    r_rd = tcg_const_i32(rd);
1620
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1621
    tcg_temp_free_i32(r_rd);
1622
    tcg_temp_free_i32(r_size);
1623
    tcg_temp_free_i32(r_asi);
1624
}
1625

    
1626
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1627
{
1628
    TCGv_i32 r_asi, r_size, r_rd;
1629

    
1630
    r_asi = gen_get_asi(insn, addr);
1631
    r_size = tcg_const_i32(size);
1632
    r_rd = tcg_const_i32(rd);
1633
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1634
    tcg_temp_free_i32(r_rd);
1635
    tcg_temp_free_i32(r_size);
1636
    tcg_temp_free_i32(r_asi);
1637
}
1638

    
1639
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1640
{
1641
    TCGv_i32 r_asi, r_size, r_sign;
1642

    
1643
    r_asi = gen_get_asi(insn, addr);
1644
    r_size = tcg_const_i32(4);
1645
    r_sign = tcg_const_i32(0);
1646
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1647
    tcg_temp_free_i32(r_sign);
1648
    gen_helper_st_asi(addr, dst, r_asi, r_size);
1649
    tcg_temp_free_i32(r_size);
1650
    tcg_temp_free_i32(r_asi);
1651
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1652
}
1653

    
1654
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1655
{
1656
    TCGv_i32 r_asi, r_rd;
1657

    
1658
    r_asi = gen_get_asi(insn, addr);
1659
    r_rd = tcg_const_i32(rd);
1660
    gen_helper_ldda_asi(addr, r_asi, r_rd);
1661
    tcg_temp_free_i32(r_rd);
1662
    tcg_temp_free_i32(r_asi);
1663
}
1664

    
1665
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1666
{
1667
    TCGv_i32 r_asi, r_size;
1668

    
1669
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1670
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1671
    r_asi = gen_get_asi(insn, addr);
1672
    r_size = tcg_const_i32(8);
1673
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1674
    tcg_temp_free_i32(r_size);
1675
    tcg_temp_free_i32(r_asi);
1676
}
1677

    
1678
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1679
                               int rd)
1680
{
1681
    TCGv r_val1;
1682
    TCGv_i32 r_asi;
1683

    
1684
    r_val1 = tcg_temp_new();
1685
    gen_movl_reg_TN(rd, r_val1);
1686
    r_asi = gen_get_asi(insn, addr);
1687
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1688
    tcg_temp_free_i32(r_asi);
1689
    tcg_temp_free(r_val1);
1690
}
1691

    
1692
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1693
                                int rd)
1694
{
1695
    TCGv_i32 r_asi;
1696

    
1697
    gen_movl_reg_TN(rd, cpu_tmp64);
1698
    r_asi = gen_get_asi(insn, addr);
1699
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1700
    tcg_temp_free_i32(r_asi);
1701
}
1702

    
1703
#elif !defined(CONFIG_USER_ONLY)
1704

    
1705
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1706
                              int sign)
1707
{
1708
    TCGv_i32 r_asi, r_size, r_sign;
1709

    
1710
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1711
    r_size = tcg_const_i32(size);
1712
    r_sign = tcg_const_i32(sign);
1713
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1714
    tcg_temp_free(r_sign);
1715
    tcg_temp_free(r_size);
1716
    tcg_temp_free(r_asi);
1717
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1718
}
1719

    
1720
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1721
{
1722
    TCGv_i32 r_asi, r_size;
1723

    
1724
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
1725
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1726
    r_size = tcg_const_i32(size);
1727
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1728
    tcg_temp_free(r_size);
1729
    tcg_temp_free(r_asi);
1730
}
1731

    
1732
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1733
{
1734
    TCGv_i32 r_asi, r_size, r_sign;
1735
    TCGv_i64 r_val;
1736

    
1737
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1738
    r_size = tcg_const_i32(4);
1739
    r_sign = tcg_const_i32(0);
1740
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1741
    tcg_temp_free(r_sign);
1742
    r_val = tcg_temp_new_i64();
1743
    tcg_gen_extu_tl_i64(r_val, dst);
1744
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1745
    tcg_temp_free_i64(r_val);
1746
    tcg_temp_free(r_size);
1747
    tcg_temp_free(r_asi);
1748
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1749
}
1750

    
1751
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1752
{
1753
    TCGv_i32 r_asi, r_size, r_sign;
1754

    
1755
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1756
    r_size = tcg_const_i32(8);
1757
    r_sign = tcg_const_i32(0);
1758
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1759
    tcg_temp_free(r_sign);
1760
    tcg_temp_free(r_size);
1761
    tcg_temp_free(r_asi);
1762
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1763
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
1764
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1765
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1766
    gen_movl_TN_reg(rd, hi);
1767
}
1768

    
1769
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1770
{
1771
    TCGv_i32 r_asi, r_size;
1772

    
1773
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1774
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1775
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1776
    r_size = tcg_const_i32(8);
1777
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1778
    tcg_temp_free(r_size);
1779
    tcg_temp_free(r_asi);
1780
}
1781
#endif
1782

    
1783
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1784
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1785
{
1786
    TCGv_i64 r_val;
1787
    TCGv_i32 r_asi, r_size;
1788

    
1789
    gen_ld_asi(dst, addr, insn, 1, 0);
1790

    
1791
    r_val = tcg_const_i64(0xffULL);
1792
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1793
    r_size = tcg_const_i32(1);
1794
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1795
    tcg_temp_free_i32(r_size);
1796
    tcg_temp_free_i32(r_asi);
1797
    tcg_temp_free_i64(r_val);
1798
}
1799
#endif
1800

    
1801
static inline TCGv get_src1(unsigned int insn, TCGv def)
1802
{
1803
    TCGv r_rs1 = def;
1804
    unsigned int rs1;
1805

    
1806
    rs1 = GET_FIELD(insn, 13, 17);
1807
    if (rs1 == 0) {
1808
        tcg_gen_movi_tl(def, 0);
1809
    } else if (rs1 < 8) {
1810
        r_rs1 = cpu_gregs[rs1];
1811
    } else {
1812
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1813
    }
1814
    return r_rs1;
1815
}
1816

    
1817
static inline TCGv get_src2(unsigned int insn, TCGv def)
1818
{
1819
    TCGv r_rs2 = def;
1820

    
1821
    if (IS_IMM) { /* immediate */
1822
        target_long simm = GET_FIELDs(insn, 19, 31);
1823
        tcg_gen_movi_tl(def, simm);
1824
    } else { /* register */
1825
        unsigned int rs2 = GET_FIELD(insn, 27, 31);
1826
        if (rs2 == 0) {
1827
            tcg_gen_movi_tl(def, 0);
1828
        } else if (rs2 < 8) {
1829
            r_rs2 = cpu_gregs[rs2];
1830
        } else {
1831
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1832
        }
1833
    }
1834
    return r_rs2;
1835
}
1836

    
1837
#ifdef TARGET_SPARC64
1838
static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1839
{
1840
    TCGv_i32 r_tl = tcg_temp_new_i32();
1841

    
1842
    /* load env->tl into r_tl */
1843
    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1844

    
1845
    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1846
    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1847

    
1848
    /* calculate offset to current trap state from env->ts, reuse r_tl */
1849
    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1850
    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1851

    
1852
    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1853
    {
1854
        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1855
        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1856
        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1857
        tcg_temp_free_ptr(r_tl_tmp);
1858
    }
1859

    
1860
    tcg_temp_free_i32(r_tl);
1861
}
1862
#endif
1863

    
1864
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
1865
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1866
        goto illegal_insn;
1867
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1868
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1869
        goto nfpu_insn;
1870

    
1871
/* before an instruction, dc->pc must be static */
1872
static void disas_sparc_insn(DisasContext * dc)
1873
{
1874
    unsigned int insn, opc, rs1, rs2, rd;
1875
    TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1876
    target_long simm;
1877

    
1878
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1879
        tcg_gen_debug_insn_start(dc->pc);
1880
    insn = ldl_code(dc->pc);
1881
    opc = GET_FIELD(insn, 0, 1);
1882

    
1883
    rd = GET_FIELD(insn, 2, 6);
1884

    
1885
    cpu_tmp1 = cpu_src1 = tcg_temp_new();
1886
    cpu_tmp2 = cpu_src2 = tcg_temp_new();
1887

    
1888
    switch (opc) {
1889
    case 0:                     /* branches/sethi */
1890
        {
1891
            unsigned int xop = GET_FIELD(insn, 7, 9);
1892
            int32_t target;
1893
            switch (xop) {
1894
#ifdef TARGET_SPARC64
1895
            case 0x1:           /* V9 BPcc */
1896
                {
1897
                    int cc;
1898

    
1899
                    target = GET_FIELD_SP(insn, 0, 18);
1900
                    target = sign_extend(target, 19);
1901
                    target <<= 2;
1902
                    cc = GET_FIELD_SP(insn, 20, 21);
1903
                    if (cc == 0)
1904
                        do_branch(dc, target, insn, 0, cpu_cond);
1905
                    else if (cc == 2)
1906
                        do_branch(dc, target, insn, 1, cpu_cond);
1907
                    else
1908
                        goto illegal_insn;
1909
                    goto jmp_insn;
1910
                }
1911
            case 0x3:           /* V9 BPr */
1912
                {
1913
                    target = GET_FIELD_SP(insn, 0, 13) |
1914
                        (GET_FIELD_SP(insn, 20, 21) << 14);
1915
                    target = sign_extend(target, 16);
1916
                    target <<= 2;
1917
                    cpu_src1 = get_src1(insn, cpu_src1);
1918
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1919
                    goto jmp_insn;
1920
                }
1921
            case 0x5:           /* V9 FBPcc */
1922
                {
1923
                    int cc = GET_FIELD_SP(insn, 20, 21);
1924
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1925
                        goto jmp_insn;
1926
                    target = GET_FIELD_SP(insn, 0, 18);
1927
                    target = sign_extend(target, 19);
1928
                    target <<= 2;
1929
                    do_fbranch(dc, target, insn, cc, cpu_cond);
1930
                    goto jmp_insn;
1931
                }
1932
#else
1933
            case 0x7:           /* CBN+x */
1934
                {
1935
                    goto ncp_insn;
1936
                }
1937
#endif
1938
            case 0x2:           /* BN+x */
1939
                {
1940
                    target = GET_FIELD(insn, 10, 31);
1941
                    target = sign_extend(target, 22);
1942
                    target <<= 2;
1943
                    do_branch(dc, target, insn, 0, cpu_cond);
1944
                    goto jmp_insn;
1945
                }
1946
            case 0x6:           /* FBN+x */
1947
                {
1948
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1949
                        goto jmp_insn;
1950
                    target = GET_FIELD(insn, 10, 31);
1951
                    target = sign_extend(target, 22);
1952
                    target <<= 2;
1953
                    do_fbranch(dc, target, insn, 0, cpu_cond);
1954
                    goto jmp_insn;
1955
                }
1956
            case 0x4:           /* SETHI */
1957
                if (rd) { // nop
1958
                    uint32_t value = GET_FIELD(insn, 10, 31);
1959
                    TCGv r_const;
1960

    
1961
                    r_const = tcg_const_tl(value << 10);
1962
                    gen_movl_TN_reg(rd, r_const);
1963
                    tcg_temp_free(r_const);
1964
                }
1965
                break;
1966
            case 0x0:           /* UNIMPL */
1967
            default:
1968
                goto illegal_insn;
1969
            }
1970
            break;
1971
        }
1972
        break;
1973
    case 1:                     /*CALL*/
1974
        {
1975
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
1976
            TCGv r_const;
1977

    
1978
            r_const = tcg_const_tl(dc->pc);
1979
            gen_movl_TN_reg(15, r_const);
1980
            tcg_temp_free(r_const);
1981
            target += dc->pc;
1982
            gen_mov_pc_npc(dc, cpu_cond);
1983
            dc->npc = target;
1984
        }
1985
        goto jmp_insn;
1986
    case 2:                     /* FPU & Logical Operations */
1987
        {
1988
            unsigned int xop = GET_FIELD(insn, 7, 12);
1989
            if (xop == 0x3a) {  /* generate trap */
1990
                int cond;
1991

    
1992
                cpu_src1 = get_src1(insn, cpu_src1);
1993
                if (IS_IMM) {
1994
                    rs2 = GET_FIELD(insn, 25, 31);
1995
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1996
                } else {
1997
                    rs2 = GET_FIELD(insn, 27, 31);
1998
                    if (rs2 != 0) {
1999
                        gen_movl_reg_TN(rs2, cpu_src2);
2000
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2001
                    } else
2002
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
2003
                }
2004

    
2005
                cond = GET_FIELD(insn, 3, 6);
2006
                if (cond == 0x8) { /* Trap Always */
2007
                    save_state(dc, cpu_cond);
2008
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2009
                        supervisor(dc))
2010
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2011
                    else
2012
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2013
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2014
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2015

    
2016
                    if (rs2 == 0 &&
2017
                        dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2018

    
2019
                        gen_helper_shutdown();
2020

    
2021
                    } else {
2022
                        gen_helper_raise_exception(cpu_tmp32);
2023
                    }
2024
                } else if (cond != 0) {
2025
                    TCGv r_cond = tcg_temp_new();
2026
                    int l1;
2027
#ifdef TARGET_SPARC64
2028
                    /* V9 icc/xcc */
2029
                    int cc = GET_FIELD_SP(insn, 11, 12);
2030

    
2031
                    save_state(dc, cpu_cond);
2032
                    if (cc == 0)
2033
                        gen_cond(r_cond, 0, cond, dc);
2034
                    else if (cc == 2)
2035
                        gen_cond(r_cond, 1, cond, dc);
2036
                    else
2037
                        goto illegal_insn;
2038
#else
2039
                    save_state(dc, cpu_cond);
2040
                    gen_cond(r_cond, 0, cond, dc);
2041
#endif
2042
                    l1 = gen_new_label();
2043
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2044

    
2045
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2046
                        supervisor(dc))
2047
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2048
                    else
2049
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2050
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2051
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2052
                    gen_helper_raise_exception(cpu_tmp32);
2053

    
2054
                    gen_set_label(l1);
2055
                    tcg_temp_free(r_cond);
2056
                }
2057
                gen_op_next_insn();
2058
                tcg_gen_exit_tb(0);
2059
                dc->is_br = 1;
2060
                goto jmp_insn;
2061
            } else if (xop == 0x28) {
2062
                rs1 = GET_FIELD(insn, 13, 17);
2063
                switch(rs1) {
2064
                case 0: /* rdy */
2065
#ifndef TARGET_SPARC64
2066
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
2067
                                       manual, rdy on the microSPARC
2068
                                       II */
2069
                case 0x0f:          /* stbar in the SPARCv8 manual,
2070
                                       rdy on the microSPARC II */
2071
                case 0x10 ... 0x1f: /* implementation-dependent in the
2072
                                       SPARCv8 manual, rdy on the
2073
                                       microSPARC II */
2074
                    /* Read Asr17 */
2075
                    if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2076
                        TCGv r_const;
2077

    
2078
                        /* Read Asr17 for a Leon3 monoprocessor */
2079
                        r_const = tcg_const_tl((1 << 8)
2080
                                               | (dc->def->nwindows - 1));
2081
                        gen_movl_TN_reg(rd, r_const);
2082
                        tcg_temp_free(r_const);
2083
                        break;
2084
                    }
2085
#endif
2086
                    gen_movl_TN_reg(rd, cpu_y);
2087
                    break;
2088
#ifdef TARGET_SPARC64
2089
                case 0x2: /* V9 rdccr */
2090
                    gen_helper_compute_psr();
2091
                    gen_helper_rdccr(cpu_dst);
2092
                    gen_movl_TN_reg(rd, cpu_dst);
2093
                    break;
2094
                case 0x3: /* V9 rdasi */
2095
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2096
                    gen_movl_TN_reg(rd, cpu_dst);
2097
                    break;
2098
                case 0x4: /* V9 rdtick */
2099
                    {
2100
                        TCGv_ptr r_tickptr;
2101

    
2102
                        r_tickptr = tcg_temp_new_ptr();
2103
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2104
                                       offsetof(CPUState, tick));
2105
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2106
                        tcg_temp_free_ptr(r_tickptr);
2107
                        gen_movl_TN_reg(rd, cpu_dst);
2108
                    }
2109
                    break;
2110
                case 0x5: /* V9 rdpc */
2111
                    {
2112
                        TCGv r_const;
2113

    
2114
                        r_const = tcg_const_tl(dc->pc);
2115
                        gen_movl_TN_reg(rd, r_const);
2116
                        tcg_temp_free(r_const);
2117
                    }
2118
                    break;
2119
                case 0x6: /* V9 rdfprs */
2120
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2121
                    gen_movl_TN_reg(rd, cpu_dst);
2122
                    break;
2123
                case 0xf: /* V9 membar */
2124
                    break; /* no effect */
2125
                case 0x13: /* Graphics Status */
2126
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2127
                        goto jmp_insn;
2128
                    gen_movl_TN_reg(rd, cpu_gsr);
2129
                    break;
2130
                case 0x16: /* Softint */
2131
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2132
                    gen_movl_TN_reg(rd, cpu_dst);
2133
                    break;
2134
                case 0x17: /* Tick compare */
2135
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
2136
                    break;
2137
                case 0x18: /* System tick */
2138
                    {
2139
                        TCGv_ptr r_tickptr;
2140

    
2141
                        r_tickptr = tcg_temp_new_ptr();
2142
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2143
                                       offsetof(CPUState, stick));
2144
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2145
                        tcg_temp_free_ptr(r_tickptr);
2146
                        gen_movl_TN_reg(rd, cpu_dst);
2147
                    }
2148
                    break;
2149
                case 0x19: /* System tick compare */
2150
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
2151
                    break;
2152
                case 0x10: /* Performance Control */
2153
                case 0x11: /* Performance Instrumentation Counter */
2154
                case 0x12: /* Dispatch Control */
2155
                case 0x14: /* Softint set, WO */
2156
                case 0x15: /* Softint clear, WO */
2157
#endif
2158
                default:
2159
                    goto illegal_insn;
2160
                }
2161
#if !defined(CONFIG_USER_ONLY)
2162
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2163
#ifndef TARGET_SPARC64
2164
                if (!supervisor(dc))
2165
                    goto priv_insn;
2166
                gen_helper_compute_psr();
2167
                dc->cc_op = CC_OP_FLAGS;
2168
                gen_helper_rdpsr(cpu_dst);
2169
#else
2170
                CHECK_IU_FEATURE(dc, HYPV);
2171
                if (!hypervisor(dc))
2172
                    goto priv_insn;
2173
                rs1 = GET_FIELD(insn, 13, 17);
2174
                switch (rs1) {
2175
                case 0: // hpstate
2176
                    // gen_op_rdhpstate();
2177
                    break;
2178
                case 1: // htstate
2179
                    // gen_op_rdhtstate();
2180
                    break;
2181
                case 3: // hintp
2182
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2183
                    break;
2184
                case 5: // htba
2185
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
2186
                    break;
2187
                case 6: // hver
2188
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
2189
                    break;
2190
                case 31: // hstick_cmpr
2191
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2192
                    break;
2193
                default:
2194
                    goto illegal_insn;
2195
                }
2196
#endif
2197
                gen_movl_TN_reg(rd, cpu_dst);
2198
                break;
2199
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2200
                if (!supervisor(dc))
2201
                    goto priv_insn;
2202
#ifdef TARGET_SPARC64
2203
                rs1 = GET_FIELD(insn, 13, 17);
2204
                switch (rs1) {
2205
                case 0: // tpc
2206
                    {
2207
                        TCGv_ptr r_tsptr;
2208

    
2209
                        r_tsptr = tcg_temp_new_ptr();
2210
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2211
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2212
                                      offsetof(trap_state, tpc));
2213
                        tcg_temp_free_ptr(r_tsptr);
2214
                    }
2215
                    break;
2216
                case 1: // tnpc
2217
                    {
2218
                        TCGv_ptr r_tsptr;
2219

    
2220
                        r_tsptr = tcg_temp_new_ptr();
2221
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2222
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2223
                                      offsetof(trap_state, tnpc));
2224
                        tcg_temp_free_ptr(r_tsptr);
2225
                    }
2226
                    break;
2227
                case 2: // tstate
2228
                    {
2229
                        TCGv_ptr r_tsptr;
2230

    
2231
                        r_tsptr = tcg_temp_new_ptr();
2232
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2233
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2234
                                      offsetof(trap_state, tstate));
2235
                        tcg_temp_free_ptr(r_tsptr);
2236
                    }
2237
                    break;
2238
                case 3: // tt
2239
                    {
2240
                        TCGv_ptr r_tsptr;
2241

    
2242
                        r_tsptr = tcg_temp_new_ptr();
2243
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2244
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2245
                                       offsetof(trap_state, tt));
2246
                        tcg_temp_free_ptr(r_tsptr);
2247
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2248
                    }
2249
                    break;
2250
                case 4: // tick
2251
                    {
2252
                        TCGv_ptr r_tickptr;
2253

    
2254
                        r_tickptr = tcg_temp_new_ptr();
2255
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2256
                                       offsetof(CPUState, tick));
2257
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2258
                        gen_movl_TN_reg(rd, cpu_tmp0);
2259
                        tcg_temp_free_ptr(r_tickptr);
2260
                    }
2261
                    break;
2262
                case 5: // tba
2263
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2264
                    break;
2265
                case 6: // pstate
2266
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2267
                                   offsetof(CPUSPARCState, pstate));
2268
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2269
                    break;
2270
                case 7: // tl
2271
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2272
                                   offsetof(CPUSPARCState, tl));
2273
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2274
                    break;
2275
                case 8: // pil
2276
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2277
                                   offsetof(CPUSPARCState, psrpil));
2278
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2279
                    break;
2280
                case 9: // cwp
2281
                    gen_helper_rdcwp(cpu_tmp0);
2282
                    break;
2283
                case 10: // cansave
2284
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2285
                                   offsetof(CPUSPARCState, cansave));
2286
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2287
                    break;
2288
                case 11: // canrestore
2289
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2290
                                   offsetof(CPUSPARCState, canrestore));
2291
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2292
                    break;
2293
                case 12: // cleanwin
2294
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2295
                                   offsetof(CPUSPARCState, cleanwin));
2296
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2297
                    break;
2298
                case 13: // otherwin
2299
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2300
                                   offsetof(CPUSPARCState, otherwin));
2301
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2302
                    break;
2303
                case 14: // wstate
2304
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2305
                                   offsetof(CPUSPARCState, wstate));
2306
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2307
                    break;
2308
                case 16: // UA2005 gl
2309
                    CHECK_IU_FEATURE(dc, GL);
2310
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2311
                                   offsetof(CPUSPARCState, gl));
2312
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2313
                    break;
2314
                case 26: // UA2005 strand status
2315
                    CHECK_IU_FEATURE(dc, HYPV);
2316
                    if (!hypervisor(dc))
2317
                        goto priv_insn;
2318
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2319
                    break;
2320
                case 31: // ver
2321
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2322
                    break;
2323
                case 15: // fq
2324
                default:
2325
                    goto illegal_insn;
2326
                }
2327
#else
2328
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2329
#endif
2330
                gen_movl_TN_reg(rd, cpu_tmp0);
2331
                break;
2332
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2333
#ifdef TARGET_SPARC64
2334
                save_state(dc, cpu_cond);
2335
                gen_helper_flushw();
2336
#else
2337
                if (!supervisor(dc))
2338
                    goto priv_insn;
2339
                gen_movl_TN_reg(rd, cpu_tbr);
2340
#endif
2341
                break;
2342
#endif
2343
            } else if (xop == 0x34) {   /* FPU Operations */
2344
                if (gen_trap_ifnofpu(dc, cpu_cond))
2345
                    goto jmp_insn;
2346
                gen_op_clear_ieee_excp_and_FTT();
2347
                rs1 = GET_FIELD(insn, 13, 17);
2348
                rs2 = GET_FIELD(insn, 27, 31);
2349
                xop = GET_FIELD(insn, 18, 26);
2350
                save_state(dc, cpu_cond);
2351
                switch (xop) {
2352
                case 0x1: /* fmovs */
2353
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2354
                    break;
2355
                case 0x5: /* fnegs */
2356
                    gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2357
                    break;
2358
                case 0x9: /* fabss */
2359
                    gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2360
                    break;
2361
                case 0x29: /* fsqrts */
2362
                    CHECK_FPU_FEATURE(dc, FSQRT);
2363
                    gen_clear_float_exceptions();
2364
                    gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2365
                    gen_helper_check_ieee_exceptions();
2366
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2367
                    break;
2368
                case 0x2a: /* fsqrtd */
2369
                    CHECK_FPU_FEATURE(dc, FSQRT);
2370
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2371
                    gen_clear_float_exceptions();
2372
                    gen_helper_fsqrtd();
2373
                    gen_helper_check_ieee_exceptions();
2374
                    gen_op_store_DT0_fpr(DFPREG(rd));
2375
                    break;
2376
                case 0x2b: /* fsqrtq */
2377
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2378
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2379
                    gen_clear_float_exceptions();
2380
                    gen_helper_fsqrtq();
2381
                    gen_helper_check_ieee_exceptions();
2382
                    gen_op_store_QT0_fpr(QFPREG(rd));
2383
                    break;
2384
                case 0x41: /* fadds */
2385
                    gen_clear_float_exceptions();
2386
                    gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2387
                    gen_helper_check_ieee_exceptions();
2388
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2389
                    break;
2390
                case 0x42: /* faddd */
2391
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2392
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2393
                    gen_clear_float_exceptions();
2394
                    gen_helper_faddd();
2395
                    gen_helper_check_ieee_exceptions();
2396
                    gen_op_store_DT0_fpr(DFPREG(rd));
2397
                    break;
2398
                case 0x43: /* faddq */
2399
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2400
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2401
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2402
                    gen_clear_float_exceptions();
2403
                    gen_helper_faddq();
2404
                    gen_helper_check_ieee_exceptions();
2405
                    gen_op_store_QT0_fpr(QFPREG(rd));
2406
                    break;
2407
                case 0x45: /* fsubs */
2408
                    gen_clear_float_exceptions();
2409
                    gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2410
                    gen_helper_check_ieee_exceptions();
2411
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2412
                    break;
2413
                case 0x46: /* fsubd */
2414
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2415
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2416
                    gen_clear_float_exceptions();
2417
                    gen_helper_fsubd();
2418
                    gen_helper_check_ieee_exceptions();
2419
                    gen_op_store_DT0_fpr(DFPREG(rd));
2420
                    break;
2421
                case 0x47: /* fsubq */
2422
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2423
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2424
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2425
                    gen_clear_float_exceptions();
2426
                    gen_helper_fsubq();
2427
                    gen_helper_check_ieee_exceptions();
2428
                    gen_op_store_QT0_fpr(QFPREG(rd));
2429
                    break;
2430
                case 0x49: /* fmuls */
2431
                    CHECK_FPU_FEATURE(dc, FMUL);
2432
                    gen_clear_float_exceptions();
2433
                    gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2434
                    gen_helper_check_ieee_exceptions();
2435
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2436
                    break;
2437
                case 0x4a: /* fmuld */
2438
                    CHECK_FPU_FEATURE(dc, FMUL);
2439
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2440
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2441
                    gen_clear_float_exceptions();
2442
                    gen_helper_fmuld();
2443
                    gen_helper_check_ieee_exceptions();
2444
                    gen_op_store_DT0_fpr(DFPREG(rd));
2445
                    break;
2446
                case 0x4b: /* fmulq */
2447
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2448
                    CHECK_FPU_FEATURE(dc, FMUL);
2449
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2450
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2451
                    gen_clear_float_exceptions();
2452
                    gen_helper_fmulq();
2453
                    gen_helper_check_ieee_exceptions();
2454
                    gen_op_store_QT0_fpr(QFPREG(rd));
2455
                    break;
2456
                case 0x4d: /* fdivs */
2457
                    gen_clear_float_exceptions();
2458
                    gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2459
                    gen_helper_check_ieee_exceptions();
2460
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2461
                    break;
2462
                case 0x4e: /* fdivd */
2463
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2464
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2465
                    gen_clear_float_exceptions();
2466
                    gen_helper_fdivd();
2467
                    gen_helper_check_ieee_exceptions();
2468
                    gen_op_store_DT0_fpr(DFPREG(rd));
2469
                    break;
2470
                case 0x4f: /* fdivq */
2471
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2472
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2473
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2474
                    gen_clear_float_exceptions();
2475
                    gen_helper_fdivq();
2476
                    gen_helper_check_ieee_exceptions();
2477
                    gen_op_store_QT0_fpr(QFPREG(rd));
2478
                    break;
2479
                case 0x69: /* fsmuld */
2480
                    CHECK_FPU_FEATURE(dc, FSMULD);
2481
                    gen_clear_float_exceptions();
2482
                    gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2483
                    gen_helper_check_ieee_exceptions();
2484
                    gen_op_store_DT0_fpr(DFPREG(rd));
2485
                    break;
2486
                case 0x6e: /* fdmulq */
2487
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2488
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2489
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2490
                    gen_clear_float_exceptions();
2491
                    gen_helper_fdmulq();
2492
                    gen_helper_check_ieee_exceptions();
2493
                    gen_op_store_QT0_fpr(QFPREG(rd));
2494
                    break;
2495
                case 0xc4: /* fitos */
2496
                    gen_clear_float_exceptions();
2497
                    gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2498
                    gen_helper_check_ieee_exceptions();
2499
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2500
                    break;
2501
                case 0xc6: /* fdtos */
2502
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2503
                    gen_clear_float_exceptions();
2504
                    gen_helper_fdtos(cpu_tmp32);
2505
                    gen_helper_check_ieee_exceptions();
2506
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2507
                    break;
2508
                case 0xc7: /* fqtos */
2509
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2510
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2511
                    gen_clear_float_exceptions();
2512
                    gen_helper_fqtos(cpu_tmp32);
2513
                    gen_helper_check_ieee_exceptions();
2514
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2515
                    break;
2516
                case 0xc8: /* fitod */
2517
                    gen_helper_fitod(cpu_fpr[rs2]);
2518
                    gen_op_store_DT0_fpr(DFPREG(rd));
2519
                    break;
2520
                case 0xc9: /* fstod */
2521
                    gen_helper_fstod(cpu_fpr[rs2]);
2522
                    gen_op_store_DT0_fpr(DFPREG(rd));
2523
                    break;
2524
                case 0xcb: /* fqtod */
2525
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2526
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2527
                    gen_clear_float_exceptions();
2528
                    gen_helper_fqtod();
2529
                    gen_helper_check_ieee_exceptions();
2530
                    gen_op_store_DT0_fpr(DFPREG(rd));
2531
                    break;
2532
                case 0xcc: /* fitoq */
2533
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2534
                    gen_helper_fitoq(cpu_fpr[rs2]);
2535
                    gen_op_store_QT0_fpr(QFPREG(rd));
2536
                    break;
2537
                case 0xcd: /* fstoq */
2538
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2539
                    gen_helper_fstoq(cpu_fpr[rs2]);
2540
                    gen_op_store_QT0_fpr(QFPREG(rd));
2541
                    break;
2542
                case 0xce: /* fdtoq */
2543
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2544
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2545
                    gen_helper_fdtoq();
2546
                    gen_op_store_QT0_fpr(QFPREG(rd));
2547
                    break;
2548
                case 0xd1: /* fstoi */
2549
                    gen_clear_float_exceptions();
2550
                    gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2551
                    gen_helper_check_ieee_exceptions();
2552
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2553
                    break;
2554
                case 0xd2: /* fdtoi */
2555
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2556
                    gen_clear_float_exceptions();
2557
                    gen_helper_fdtoi(cpu_tmp32);
2558
                    gen_helper_check_ieee_exceptions();
2559
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2560
                    break;
2561
                case 0xd3: /* fqtoi */
2562
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2563
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2564
                    gen_clear_float_exceptions();
2565
                    gen_helper_fqtoi(cpu_tmp32);
2566
                    gen_helper_check_ieee_exceptions();
2567
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2568
                    break;
2569
#ifdef TARGET_SPARC64
2570
                case 0x2: /* V9 fmovd */
2571
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2572
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2573
                                    cpu_fpr[DFPREG(rs2) + 1]);
2574
                    break;
2575
                case 0x3: /* V9 fmovq */
2576
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2577
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2578
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2579
                                    cpu_fpr[QFPREG(rs2) + 1]);
2580
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2581
                                    cpu_fpr[QFPREG(rs2) + 2]);
2582
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2583
                                    cpu_fpr[QFPREG(rs2) + 3]);
2584
                    break;
2585
                case 0x6: /* V9 fnegd */
2586
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2587
                    gen_helper_fnegd();
2588
                    gen_op_store_DT0_fpr(DFPREG(rd));
2589
                    break;
2590
                case 0x7: /* V9 fnegq */
2591
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2592
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2593
                    gen_helper_fnegq();
2594
                    gen_op_store_QT0_fpr(QFPREG(rd));
2595
                    break;
2596
                case 0xa: /* V9 fabsd */
2597
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2598
                    gen_helper_fabsd();
2599
                    gen_op_store_DT0_fpr(DFPREG(rd));
2600
                    break;
2601
                case 0xb: /* V9 fabsq */
2602
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2603
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2604
                    gen_helper_fabsq();
2605
                    gen_op_store_QT0_fpr(QFPREG(rd));
2606
                    break;
2607
                case 0x81: /* V9 fstox */
2608
                    gen_clear_float_exceptions();
2609
                    gen_helper_fstox(cpu_fpr[rs2]);
2610
                    gen_helper_check_ieee_exceptions();
2611
                    gen_op_store_DT0_fpr(DFPREG(rd));
2612
                    break;
2613
                case 0x82: /* V9 fdtox */
2614
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2615
                    gen_clear_float_exceptions();
2616
                    gen_helper_fdtox();
2617
                    gen_helper_check_ieee_exceptions();
2618
                    gen_op_store_DT0_fpr(DFPREG(rd));
2619
                    break;
2620
                case 0x83: /* V9 fqtox */
2621
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2622
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2623
                    gen_clear_float_exceptions();
2624
                    gen_helper_fqtox();
2625
                    gen_helper_check_ieee_exceptions();
2626
                    gen_op_store_DT0_fpr(DFPREG(rd));
2627
                    break;
2628
                case 0x84: /* V9 fxtos */
2629
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2630
                    gen_clear_float_exceptions();
2631
                    gen_helper_fxtos(cpu_tmp32);
2632
                    gen_helper_check_ieee_exceptions();
2633
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2634
                    break;
2635
                case 0x88: /* V9 fxtod */
2636
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2637
                    gen_clear_float_exceptions();
2638
                    gen_helper_fxtod();
2639
                    gen_helper_check_ieee_exceptions();
2640
                    gen_op_store_DT0_fpr(DFPREG(rd));
2641
                    break;
2642
                case 0x8c: /* V9 fxtoq */
2643
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2644
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2645
                    gen_clear_float_exceptions();
2646
                    gen_helper_fxtoq();
2647
                    gen_helper_check_ieee_exceptions();
2648
                    gen_op_store_QT0_fpr(QFPREG(rd));
2649
                    break;
2650
#endif
2651
                default:
2652
                    goto illegal_insn;
2653
                }
2654
            } else if (xop == 0x35) {   /* FPU Operations */
2655
#ifdef TARGET_SPARC64
2656
                int cond;
2657
#endif
2658
                if (gen_trap_ifnofpu(dc, cpu_cond))
2659
                    goto jmp_insn;
2660
                gen_op_clear_ieee_excp_and_FTT();
2661
                rs1 = GET_FIELD(insn, 13, 17);
2662
                rs2 = GET_FIELD(insn, 27, 31);
2663
                xop = GET_FIELD(insn, 18, 26);
2664
                save_state(dc, cpu_cond);
2665
#ifdef TARGET_SPARC64
2666
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2667
                    int l1;
2668

    
2669
                    l1 = gen_new_label();
2670
                    cond = GET_FIELD_SP(insn, 14, 17);
2671
                    cpu_src1 = get_src1(insn, cpu_src1);
2672
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2673
                                       0, l1);
2674
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2675
                    gen_set_label(l1);
2676
                    break;
2677
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2678
                    int l1;
2679

    
2680
                    l1 = gen_new_label();
2681
                    cond = GET_FIELD_SP(insn, 14, 17);
2682
                    cpu_src1 = get_src1(insn, cpu_src1);
2683
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2684
                                       0, l1);
2685
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2686
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2687
                    gen_set_label(l1);
2688
                    break;
2689
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2690
                    int l1;
2691

    
2692
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2693
                    l1 = gen_new_label();
2694
                    cond = GET_FIELD_SP(insn, 14, 17);
2695
                    cpu_src1 = get_src1(insn, cpu_src1);
2696
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2697
                                       0, l1);
2698
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2699
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2700
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2701
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2702
                    gen_set_label(l1);
2703
                    break;
2704
                }
2705
#endif
2706
                switch (xop) {
2707
#ifdef TARGET_SPARC64
2708
#define FMOVSCC(fcc)                                                    \
2709
                    {                                                   \
2710
                        TCGv r_cond;                                    \
2711
                        int l1;                                         \
2712
                                                                        \
2713
                        l1 = gen_new_label();                           \
2714
                        r_cond = tcg_temp_new();                        \
2715
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2716
                        gen_fcond(r_cond, fcc, cond);                   \
2717
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2718
                                           0, l1);                      \
2719
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2720
                        gen_set_label(l1);                              \
2721
                        tcg_temp_free(r_cond);                          \
2722
                    }
2723
#define FMOVDCC(fcc)                                                    \
2724
                    {                                                   \
2725
                        TCGv r_cond;                                    \
2726
                        int l1;                                         \
2727
                                                                        \
2728
                        l1 = gen_new_label();                           \
2729
                        r_cond = tcg_temp_new();                        \
2730
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2731
                        gen_fcond(r_cond, fcc, cond);                   \
2732
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2733
                                           0, l1);                      \
2734
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2735
                                        cpu_fpr[DFPREG(rs2)]);          \
2736
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2737
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2738
                        gen_set_label(l1);                              \
2739
                        tcg_temp_free(r_cond);                          \
2740
                    }
2741
#define FMOVQCC(fcc)                                                    \
2742
                    {                                                   \
2743
                        TCGv r_cond;                                    \
2744
                        int l1;                                         \
2745
                                                                        \
2746
                        l1 = gen_new_label();                           \
2747
                        r_cond = tcg_temp_new();                        \
2748
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2749
                        gen_fcond(r_cond, fcc, cond);                   \
2750
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2751
                                           0, l1);                      \
2752
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2753
                                        cpu_fpr[QFPREG(rs2)]);          \
2754
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2755
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2756
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2757
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2758
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2759
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2760
                        gen_set_label(l1);                              \
2761
                        tcg_temp_free(r_cond);                          \
2762
                    }
2763
                    case 0x001: /* V9 fmovscc %fcc0 */
2764
                        FMOVSCC(0);
2765
                        break;
2766
                    case 0x002: /* V9 fmovdcc %fcc0 */
2767
                        FMOVDCC(0);
2768
                        break;
2769
                    case 0x003: /* V9 fmovqcc %fcc0 */
2770
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2771
                        FMOVQCC(0);
2772
                        break;
2773
                    case 0x041: /* V9 fmovscc %fcc1 */
2774
                        FMOVSCC(1);
2775
                        break;
2776
                    case 0x042: /* V9 fmovdcc %fcc1 */
2777
                        FMOVDCC(1);
2778
                        break;
2779
                    case 0x043: /* V9 fmovqcc %fcc1 */
2780
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2781
                        FMOVQCC(1);
2782
                        break;
2783
                    case 0x081: /* V9 fmovscc %fcc2 */
2784
                        FMOVSCC(2);
2785
                        break;
2786
                    case 0x082: /* V9 fmovdcc %fcc2 */
2787
                        FMOVDCC(2);
2788
                        break;
2789
                    case 0x083: /* V9 fmovqcc %fcc2 */
2790
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2791
                        FMOVQCC(2);
2792
                        break;
2793
                    case 0x0c1: /* V9 fmovscc %fcc3 */
2794
                        FMOVSCC(3);
2795
                        break;
2796
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
2797
                        FMOVDCC(3);
2798
                        break;
2799
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
2800
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2801
                        FMOVQCC(3);
2802
                        break;
2803
#undef FMOVSCC
2804
#undef FMOVDCC
2805
#undef FMOVQCC
2806
#define FMOVSCC(icc)                                                    \
2807
                    {                                                   \
2808
                        TCGv r_cond;                                    \
2809
                        int l1;                                         \
2810
                                                                        \
2811
                        l1 = gen_new_label();                           \
2812
                        r_cond = tcg_temp_new();                        \
2813
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2814
                        gen_cond(r_cond, icc, cond, dc);                \
2815
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2816
                                           0, l1);                      \
2817
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2818
                        gen_set_label(l1);                              \
2819
                        tcg_temp_free(r_cond);                          \
2820
                    }
2821
#define FMOVDCC(icc)                                                    \
2822
                    {                                                   \
2823
                        TCGv r_cond;                                    \
2824
                        int l1;                                         \
2825
                                                                        \
2826
                        l1 = gen_new_label();                           \
2827
                        r_cond = tcg_temp_new();                        \
2828
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2829
                        gen_cond(r_cond, icc, cond, dc);                \
2830
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2831
                                           0, l1);                      \
2832
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2833
                                        cpu_fpr[DFPREG(rs2)]);          \
2834
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2835
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2836
                        gen_set_label(l1);                              \
2837
                        tcg_temp_free(r_cond);                          \
2838
                    }
2839
#define FMOVQCC(icc)                                                    \
2840
                    {                                                   \
2841
                        TCGv r_cond;                                    \
2842
                        int l1;                                         \
2843
                                                                        \
2844
                        l1 = gen_new_label();                           \
2845
                        r_cond = tcg_temp_new();                        \
2846
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2847
                        gen_cond(r_cond, icc, cond, dc);                \
2848
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2849
                                           0, l1);                      \
2850
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2851
                                        cpu_fpr[QFPREG(rs2)]);          \
2852
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2853
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2854
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2855
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2856
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2857
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2858
                        gen_set_label(l1);                              \
2859
                        tcg_temp_free(r_cond);                          \
2860
                    }
2861

    
2862
                    case 0x101: /* V9 fmovscc %icc */
2863
                        FMOVSCC(0);
2864
                        break;
2865
                    case 0x102: /* V9 fmovdcc %icc */
2866
                        FMOVDCC(0);
2867
                        break;
2868
                    case 0x103: /* V9 fmovqcc %icc */
2869
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2870
                        FMOVQCC(0);
2871
                        break;
2872
                    case 0x181: /* V9 fmovscc %xcc */
2873
                        FMOVSCC(1);
2874
                        break;
2875
                    case 0x182: /* V9 fmovdcc %xcc */
2876
                        FMOVDCC(1);
2877
                        break;
2878
                    case 0x183: /* V9 fmovqcc %xcc */
2879
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2880
                        FMOVQCC(1);
2881
                        break;
2882
#undef FMOVSCC
2883
#undef FMOVDCC
2884
#undef FMOVQCC
2885
#endif
2886
                    case 0x51: /* fcmps, V9 %fcc */
2887
                        gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2888
                        break;
2889
                    case 0x52: /* fcmpd, V9 %fcc */
2890
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2891
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2892
                        gen_op_fcmpd(rd & 3);
2893
                        break;
2894
                    case 0x53: /* fcmpq, V9 %fcc */
2895
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2896
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2897
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2898
                        gen_op_fcmpq(rd & 3);
2899
                        break;
2900
                    case 0x55: /* fcmpes, V9 %fcc */
2901
                        gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2902
                        break;
2903
                    case 0x56: /* fcmped, V9 %fcc */
2904
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2905
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2906
                        gen_op_fcmped(rd & 3);
2907
                        break;
2908
                    case 0x57: /* fcmpeq, V9 %fcc */
2909
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2910
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2911
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2912
                        gen_op_fcmpeq(rd & 3);
2913
                        break;
2914
                    default:
2915
                        goto illegal_insn;
2916
                }
2917
            } else if (xop == 0x2) {
2918
                // clr/mov shortcut
2919

    
2920
                rs1 = GET_FIELD(insn, 13, 17);
2921
                if (rs1 == 0) {
2922
                    // or %g0, x, y -> mov T0, x; mov y, T0
2923
                    if (IS_IMM) {       /* immediate */
2924
                        TCGv r_const;
2925

    
2926
                        simm = GET_FIELDs(insn, 19, 31);
2927
                        r_const = tcg_const_tl(simm);
2928
                        gen_movl_TN_reg(rd, r_const);
2929
                        tcg_temp_free(r_const);
2930
                    } else {            /* register */
2931
                        rs2 = GET_FIELD(insn, 27, 31);
2932
                        gen_movl_reg_TN(rs2, cpu_dst);
2933
                        gen_movl_TN_reg(rd, cpu_dst);
2934
                    }
2935
                } else {
2936
                    cpu_src1 = get_src1(insn, cpu_src1);
2937
                    if (IS_IMM) {       /* immediate */
2938
                        simm = GET_FIELDs(insn, 19, 31);
2939
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2940
                        gen_movl_TN_reg(rd, cpu_dst);
2941
                    } else {            /* register */
2942
                        // or x, %g0, y -> mov T1, x; mov y, T1
2943
                        rs2 = GET_FIELD(insn, 27, 31);
2944
                        if (rs2 != 0) {
2945
                            gen_movl_reg_TN(rs2, cpu_src2);
2946
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2947
                            gen_movl_TN_reg(rd, cpu_dst);
2948
                        } else
2949
                            gen_movl_TN_reg(rd, cpu_src1);
2950
                    }
2951
                }
2952
#ifdef TARGET_SPARC64
2953
            } else if (xop == 0x25) { /* sll, V9 sllx */
2954
                cpu_src1 = get_src1(insn, cpu_src1);
2955
                if (IS_IMM) {   /* immediate */
2956
                    simm = GET_FIELDs(insn, 20, 31);
2957
                    if (insn & (1 << 12)) {
2958
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2959
                    } else {
2960
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2961
                    }
2962
                } else {                /* register */
2963
                    rs2 = GET_FIELD(insn, 27, 31);
2964
                    gen_movl_reg_TN(rs2, cpu_src2);
2965
                    if (insn & (1 << 12)) {
2966
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2967
                    } else {
2968
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2969
                    }
2970
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2971
                }
2972
                gen_movl_TN_reg(rd, cpu_dst);
2973
            } else if (xop == 0x26) { /* srl, V9 srlx */
2974
                cpu_src1 = get_src1(insn, cpu_src1);
2975
                if (IS_IMM) {   /* immediate */
2976
                    simm = GET_FIELDs(insn, 20, 31);
2977
                    if (insn & (1 << 12)) {
2978
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2979
                    } else {
2980
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2981
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2982
                    }
2983
                } else {                /* register */
2984
                    rs2 = GET_FIELD(insn, 27, 31);
2985
                    gen_movl_reg_TN(rs2, cpu_src2);
2986
                    if (insn & (1 << 12)) {
2987
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2988
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2989
                    } else {
2990
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2991
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2992
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2993
                    }
2994
                }
2995
                gen_movl_TN_reg(rd, cpu_dst);
2996
            } else if (xop == 0x27) { /* sra, V9 srax */
2997
                cpu_src1 = get_src1(insn, cpu_src1);
2998
                if (IS_IMM) {   /* immediate */
2999
                    simm = GET_FIELDs(insn, 20, 31);
3000
                    if (insn & (1 << 12)) {
3001
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3002
                    } else {
3003
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3004
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3005
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3006
                    }
3007
                } else {                /* register */
3008
                    rs2 = GET_FIELD(insn, 27, 31);
3009
                    gen_movl_reg_TN(rs2, cpu_src2);
3010
                    if (insn & (1 << 12)) {
3011
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3012
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3013
                    } else {
3014
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3015
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3016
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3017
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3018
                    }
3019
                }
3020
                gen_movl_TN_reg(rd, cpu_dst);
3021
#endif
3022
            } else if (xop < 0x36) {
3023
                if (xop < 0x20) {
3024
                    cpu_src1 = get_src1(insn, cpu_src1);
3025
                    cpu_src2 = get_src2(insn, cpu_src2);
3026
                    switch (xop & ~0x10) {
3027
                    case 0x0: /* add */
3028
                        if (IS_IMM) {
3029
                            simm = GET_FIELDs(insn, 19, 31);
3030
                            if (xop & 0x10) {
3031
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3032
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3033
                                dc->cc_op = CC_OP_ADD;
3034
                            } else {
3035
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3036
                            }
3037
                        } else {
3038
                            if (xop & 0x10) {
3039
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3040
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3041
                                dc->cc_op = CC_OP_ADD;
3042
                            } else {
3043
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3044
                            }
3045
                        }
3046
                        break;
3047
                    case 0x1: /* and */
3048
                        if (IS_IMM) {
3049
                            simm = GET_FIELDs(insn, 19, 31);
3050
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3051
                        } else {
3052
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3053
                        }
3054
                        if (xop & 0x10) {
3055
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3056
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3057
                            dc->cc_op = CC_OP_LOGIC;
3058
                        }
3059
                        break;
3060
                    case 0x2: /* or */
3061
                        if (IS_IMM) {
3062
                            simm = GET_FIELDs(insn, 19, 31);
3063
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3064
                        } else {
3065
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3066
                        }
3067
                        if (xop & 0x10) {
3068
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3069
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3070
                            dc->cc_op = CC_OP_LOGIC;
3071
                        }
3072
                        break;
3073
                    case 0x3: /* xor */
3074
                        if (IS_IMM) {
3075
                            simm = GET_FIELDs(insn, 19, 31);
3076
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3077
                        } else {
3078
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3079
                        }
3080
                        if (xop & 0x10) {
3081
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3082
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3083
                            dc->cc_op = CC_OP_LOGIC;
3084
                        }
3085
                        break;
3086
                    case 0x4: /* sub */
3087
                        if (IS_IMM) {
3088
                            simm = GET_FIELDs(insn, 19, 31);
3089
                            if (xop & 0x10) {
3090
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3091
                            } else {
3092
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3093
                            }
3094
                        } else {
3095
                            if (xop & 0x10) {
3096
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3097
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3098
                                dc->cc_op = CC_OP_SUB;
3099
                            } else {
3100
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3101
                            }
3102
                        }
3103
                        break;
3104
                    case 0x5: /* andn */
3105
                        if (IS_IMM) {
3106
                            simm = GET_FIELDs(insn, 19, 31);
3107
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3108
                        } else {
3109
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3110
                        }
3111
                        if (xop & 0x10) {
3112
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3113
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3114
                            dc->cc_op = CC_OP_LOGIC;
3115
                        }
3116
                        break;
3117
                    case 0x6: /* orn */
3118
                        if (IS_IMM) {
3119
                            simm = GET_FIELDs(insn, 19, 31);
3120
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3121
                        } else {
3122
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3123
                        }
3124
                        if (xop & 0x10) {
3125
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3126
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3127
                            dc->cc_op = CC_OP_LOGIC;
3128
                        }
3129
                        break;
3130
                    case 0x7: /* xorn */
3131
                        if (IS_IMM) {
3132
                            simm = GET_FIELDs(insn, 19, 31);
3133
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3134
                        } else {
3135
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3136
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3137
                        }
3138
                        if (xop & 0x10) {
3139
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3140
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3141
                            dc->cc_op = CC_OP_LOGIC;
3142
                        }
3143
                        break;
3144
                    case 0x8: /* addx, V9 addc */
3145
                        gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3146
                                        (xop & 0x10));
3147
                        break;
3148
#ifdef TARGET_SPARC64
3149
                    case 0x9: /* V9 mulx */
3150
                        if (IS_IMM) {
3151
                            simm = GET_FIELDs(insn, 19, 31);
3152
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3153
                        } else {
3154
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3155
                        }
3156
                        break;
3157
#endif
3158
                    case 0xa: /* umul */
3159
                        CHECK_IU_FEATURE(dc, MUL);
3160
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3161
                        if (xop & 0x10) {
3162
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3163
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3164
                            dc->cc_op = CC_OP_LOGIC;
3165
                        }
3166
                        break;
3167
                    case 0xb: /* smul */
3168
                        CHECK_IU_FEATURE(dc, MUL);
3169
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3170
                        if (xop & 0x10) {
3171
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3172
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3173
                            dc->cc_op = CC_OP_LOGIC;
3174
                        }
3175
                        break;
3176
                    case 0xc: /* subx, V9 subc */
3177
                        gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3178
                                        (xop & 0x10));
3179
                        break;
3180
#ifdef TARGET_SPARC64
3181
                    case 0xd: /* V9 udivx */
3182
                        {
3183
                            TCGv r_temp1, r_temp2;
3184
                            r_temp1 = tcg_temp_local_new();
3185
                            r_temp2 = tcg_temp_local_new();
3186
                            tcg_gen_mov_tl(r_temp1, cpu_src1);
3187
                            tcg_gen_mov_tl(r_temp2, cpu_src2);
3188
                            gen_trap_ifdivzero_tl(r_temp2);
3189
                            tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3190
                            tcg_temp_free(r_temp1);
3191
                            tcg_temp_free(r_temp2);
3192
                        }
3193
                        break;
3194
#endif
3195
                    case 0xe: /* udiv */
3196
                        CHECK_IU_FEATURE(dc, DIV);
3197
                        if (xop & 0x10) {
3198
                            gen_helper_udiv_cc(cpu_dst, cpu_src1, cpu_src2);
3199
                            dc->cc_op = CC_OP_DIV;
3200
                        } else {
3201
                            gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3202
                        }
3203
                        break;
3204
                    case 0xf: /* sdiv */
3205
                        CHECK_IU_FEATURE(dc, DIV);
3206
                        if (xop & 0x10) {
3207
                            gen_helper_sdiv_cc(cpu_dst, cpu_src1, cpu_src2);
3208
                            dc->cc_op = CC_OP_DIV;
3209
                        } else {
3210
                            gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3211
                        }
3212
                        break;
3213
                    default:
3214
                        goto illegal_insn;
3215
                    }
3216
                    gen_movl_TN_reg(rd, cpu_dst);
3217
                } else {
3218
                    cpu_src1 = get_src1(insn, cpu_src1);
3219
                    cpu_src2 = get_src2(insn, cpu_src2);
3220
                    switch (xop) {
3221
                    case 0x20: /* taddcc */
3222
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3223
                        gen_movl_TN_reg(rd, cpu_dst);
3224
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3225
                        dc->cc_op = CC_OP_TADD;
3226
                        break;
3227
                    case 0x21: /* tsubcc */
3228
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3229
                        gen_movl_TN_reg(rd, cpu_dst);
3230
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3231
                        dc->cc_op = CC_OP_TSUB;
3232
                        break;
3233
                    case 0x22: /* taddcctv */
3234
                        save_state(dc, cpu_cond);
3235
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3236
                        gen_movl_TN_reg(rd, cpu_dst);
3237
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3238
                        dc->cc_op = CC_OP_TADDTV;
3239
                        break;
3240
                    case 0x23: /* tsubcctv */
3241
                        save_state(dc, cpu_cond);
3242
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3243
                        gen_movl_TN_reg(rd, cpu_dst);
3244
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3245
                        dc->cc_op = CC_OP_TSUBTV;
3246
                        break;
3247
                    case 0x24: /* mulscc */
3248
                        gen_helper_compute_psr();
3249
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3250
                        gen_movl_TN_reg(rd, cpu_dst);
3251
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3252
                        dc->cc_op = CC_OP_ADD;
3253
                        break;
3254
#ifndef TARGET_SPARC64
3255
                    case 0x25:  /* sll */
3256
                        if (IS_IMM) { /* immediate */
3257
                            simm = GET_FIELDs(insn, 20, 31);
3258
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3259
                        } else { /* register */
3260
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3261
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3262
                        }
3263
                        gen_movl_TN_reg(rd, cpu_dst);
3264
                        break;
3265
                    case 0x26:  /* srl */
3266
                        if (IS_IMM) { /* immediate */
3267
                            simm = GET_FIELDs(insn, 20, 31);
3268
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3269
                        } else { /* register */
3270
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3271
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3272
                        }
3273
                        gen_movl_TN_reg(rd, cpu_dst);
3274
                        break;
3275
                    case 0x27:  /* sra */
3276
                        if (IS_IMM) { /* immediate */
3277
                            simm = GET_FIELDs(insn, 20, 31);
3278
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3279
                        } else { /* register */
3280
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3281
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3282
                        }
3283
                        gen_movl_TN_reg(rd, cpu_dst);
3284
                        break;
3285
#endif
3286
                    case 0x30:
3287
                        {
3288
                            switch(rd) {
3289
                            case 0: /* wry */
3290
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3291
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3292
                                break;
3293
#ifndef TARGET_SPARC64
3294
                            case 0x01 ... 0x0f: /* undefined in the
3295
                                                   SPARCv8 manual, nop
3296
                                                   on the microSPARC
3297
                                                   II */
3298
                            case 0x10 ... 0x1f: /* implementation-dependent
3299
                                                   in the SPARCv8
3300
                                                   manual, nop on the
3301
                                                   microSPARC II */
3302
                                break;
3303
#else
3304
                            case 0x2: /* V9 wrccr */
3305
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3306
                                gen_helper_wrccr(cpu_dst);
3307
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3308
                                dc->cc_op = CC_OP_FLAGS;
3309
                                break;
3310
                            case 0x3: /* V9 wrasi */
3311
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3312
                                tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3313
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3314
                                break;
3315
                            case 0x6: /* V9 wrfprs */
3316
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3317
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3318
                                save_state(dc, cpu_cond);
3319
                                gen_op_next_insn();
3320
                                tcg_gen_exit_tb(0);
3321
                                dc->is_br = 1;
3322
                                break;
3323
                            case 0xf: /* V9 sir, nop if user */
3324
#if !defined(CONFIG_USER_ONLY)
3325
                                if (supervisor(dc)) {
3326
                                    ; // XXX
3327
                                }
3328
#endif
3329
                                break;
3330
                            case 0x13: /* Graphics Status */
3331
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3332
                                    goto jmp_insn;
3333
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3334
                                break;
3335
                            case 0x14: /* Softint set */
3336
                                if (!supervisor(dc))
3337
                                    goto illegal_insn;
3338
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3339
                                gen_helper_set_softint(cpu_tmp64);
3340
                                break;
3341
                            case 0x15: /* Softint clear */
3342
                                if (!supervisor(dc))
3343
                                    goto illegal_insn;
3344
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3345
                                gen_helper_clear_softint(cpu_tmp64);
3346
                                break;
3347
                            case 0x16: /* Softint write */
3348
                                if (!supervisor(dc))
3349
                                    goto illegal_insn;
3350
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3351
                                gen_helper_write_softint(cpu_tmp64);
3352
                                break;
3353
                            case 0x17: /* Tick compare */
3354
#if !defined(CONFIG_USER_ONLY)
3355
                                if (!supervisor(dc))
3356
                                    goto illegal_insn;
3357
#endif
3358
                                {
3359
                                    TCGv_ptr r_tickptr;
3360

    
3361
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3362
                                                   cpu_src2);
3363
                                    r_tickptr = tcg_temp_new_ptr();
3364
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3365
                                                   offsetof(CPUState, tick));
3366
                                    gen_helper_tick_set_limit(r_tickptr,
3367
                                                              cpu_tick_cmpr);
3368
                                    tcg_temp_free_ptr(r_tickptr);
3369
                                }
3370
                                break;
3371
                            case 0x18: /* System tick */
3372
#if !defined(CONFIG_USER_ONLY)
3373
                                if (!supervisor(dc))
3374
                                    goto illegal_insn;
3375
#endif
3376
                                {
3377
                                    TCGv_ptr r_tickptr;
3378

    
3379
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3380
                                                   cpu_src2);
3381
                                    r_tickptr = tcg_temp_new_ptr();
3382
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3383
                                                   offsetof(CPUState, stick));
3384
                                    gen_helper_tick_set_count(r_tickptr,
3385
                                                              cpu_dst);
3386
                                    tcg_temp_free_ptr(r_tickptr);
3387
                                }
3388
                                break;
3389
                            case 0x19: /* System tick compare */
3390
#if !defined(CONFIG_USER_ONLY)
3391
                                if (!supervisor(dc))
3392
                                    goto illegal_insn;
3393
#endif
3394
                                {
3395
                                    TCGv_ptr r_tickptr;
3396

    
3397
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3398
                                                   cpu_src2);
3399
                                    r_tickptr = tcg_temp_new_ptr();
3400
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3401
                                                   offsetof(CPUState, stick));
3402
                                    gen_helper_tick_set_limit(r_tickptr,
3403
                                                              cpu_stick_cmpr);
3404
                                    tcg_temp_free_ptr(r_tickptr);
3405
                                }
3406
                                break;
3407

    
3408
                            case 0x10: /* Performance Control */
3409
                            case 0x11: /* Performance Instrumentation
3410
                                          Counter */
3411
                            case 0x12: /* Dispatch Control */
3412
#endif
3413
                            default:
3414
                                goto illegal_insn;
3415
                            }
3416
                        }
3417
                        break;
3418
#if !defined(CONFIG_USER_ONLY)
3419
                    case 0x31: /* wrpsr, V9 saved, restored */
3420
                        {
3421
                            if (!supervisor(dc))
3422
                                goto priv_insn;
3423
#ifdef TARGET_SPARC64
3424
                            switch (rd) {
3425
                            case 0:
3426
                                gen_helper_saved();
3427
                                break;
3428
                            case 1:
3429
                                gen_helper_restored();
3430
                                break;
3431
                            case 2: /* UA2005 allclean */
3432
                            case 3: /* UA2005 otherw */
3433
                            case 4: /* UA2005 normalw */
3434
                            case 5: /* UA2005 invalw */
3435
                                // XXX
3436
                            default:
3437
                                goto illegal_insn;
3438
                            }
3439
#else
3440
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3441
                            gen_helper_wrpsr(cpu_dst);
3442
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3443
                            dc->cc_op = CC_OP_FLAGS;
3444
                            save_state(dc, cpu_cond);
3445
                            gen_op_next_insn();
3446
                            tcg_gen_exit_tb(0);
3447
                            dc->is_br = 1;
3448
#endif
3449
                        }
3450
                        break;
3451
                    case 0x32: /* wrwim, V9 wrpr */
3452
                        {
3453
                            if (!supervisor(dc))
3454
                                goto priv_insn;
3455
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3456
#ifdef TARGET_SPARC64
3457
                            switch (rd) {
3458
                            case 0: // tpc
3459
                                {
3460
                                    TCGv_ptr r_tsptr;
3461

    
3462
                                    r_tsptr = tcg_temp_new_ptr();
3463
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3464
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3465
                                                  offsetof(trap_state, tpc));
3466
                                    tcg_temp_free_ptr(r_tsptr);
3467
                                }
3468
                                break;
3469
                            case 1: // tnpc
3470
                                {
3471
                                    TCGv_ptr r_tsptr;
3472

    
3473
                                    r_tsptr = tcg_temp_new_ptr();
3474
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3475
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3476
                                                  offsetof(trap_state, tnpc));
3477
                                    tcg_temp_free_ptr(r_tsptr);
3478
                                }
3479
                                break;
3480
                            case 2: // tstate
3481
                                {
3482
                                    TCGv_ptr r_tsptr;
3483

    
3484
                                    r_tsptr = tcg_temp_new_ptr();
3485
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3486
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3487
                                                  offsetof(trap_state,
3488
                                                           tstate));
3489
                                    tcg_temp_free_ptr(r_tsptr);
3490
                                }
3491
                                break;
3492
                            case 3: // tt
3493
                                {
3494
                                    TCGv_ptr r_tsptr;
3495

    
3496
                                    r_tsptr = tcg_temp_new_ptr();
3497
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3498
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3499
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3500
                                                   offsetof(trap_state, tt));
3501
                                    tcg_temp_free_ptr(r_tsptr);
3502
                                }
3503
                                break;
3504
                            case 4: // tick
3505
                                {
3506
                                    TCGv_ptr r_tickptr;
3507

    
3508
                                    r_tickptr = tcg_temp_new_ptr();
3509
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3510
                                                   offsetof(CPUState, tick));
3511
                                    gen_helper_tick_set_count(r_tickptr,
3512
                                                              cpu_tmp0);
3513
                                    tcg_temp_free_ptr(r_tickptr);
3514
                                }
3515
                                break;
3516
                            case 5: // tba
3517
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3518
                                break;
3519
                            case 6: // pstate
3520
                                {
3521
                                    TCGv r_tmp = tcg_temp_local_new();
3522

    
3523
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3524
                                    save_state(dc, cpu_cond);
3525
                                    gen_helper_wrpstate(r_tmp);
3526
                                    tcg_temp_free(r_tmp);
3527
                                    dc->npc = DYNAMIC_PC;
3528
                                }
3529
                                break;
3530
                            case 7: // tl
3531
                                {
3532
                                    TCGv r_tmp = tcg_temp_local_new();
3533

    
3534
                                    tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3535
                                    save_state(dc, cpu_cond);
3536
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3537
                                    tcg_temp_free(r_tmp);
3538
                                    tcg_gen_st_i32(cpu_tmp32, cpu_env,
3539
                                                   offsetof(CPUSPARCState, tl));
3540
                                    dc->npc = DYNAMIC_PC;
3541
                                }
3542
                                break;
3543
                            case 8: // pil
3544
                                gen_helper_wrpil(cpu_tmp0);
3545
                                break;
3546
                            case 9: // cwp
3547
                                gen_helper_wrcwp(cpu_tmp0);
3548
                                break;
3549
                            case 10: // cansave
3550
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3551
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3552
                                               offsetof(CPUSPARCState,
3553
                                                        cansave));
3554
                                break;
3555
                            case 11: // canrestore
3556
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3557
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3558
                                               offsetof(CPUSPARCState,
3559
                                                        canrestore));
3560
                                break;
3561
                            case 12: // cleanwin
3562
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3563
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3564
                                               offsetof(CPUSPARCState,
3565
                                                        cleanwin));
3566
                                break;
3567
                            case 13: // otherwin
3568
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3569
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3570
                                               offsetof(CPUSPARCState,
3571
                                                        otherwin));
3572
                                break;
3573
                            case 14: // wstate
3574
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3575
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3576
                                               offsetof(CPUSPARCState,
3577
                                                        wstate));
3578
                                break;
3579
                            case 16: // UA2005 gl
3580
                                CHECK_IU_FEATURE(dc, GL);
3581
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3582
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3583
                                               offsetof(CPUSPARCState, gl));
3584
                                break;
3585
                            case 26: // UA2005 strand status
3586
                                CHECK_IU_FEATURE(dc, HYPV);
3587
                                if (!hypervisor(dc))
3588
                                    goto priv_insn;
3589
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3590
                                break;
3591
                            default:
3592
                                goto illegal_insn;
3593
                            }
3594
#else
3595
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3596
                            if (dc->def->nwindows != 32)
3597
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3598
                                                (1 << dc->def->nwindows) - 1);
3599
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3600
#endif
3601
                        }
3602
                        break;
3603
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3604
                        {
3605
#ifndef TARGET_SPARC64
3606
                            if (!supervisor(dc))
3607
                                goto priv_insn;
3608
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3609
#else
3610
                            CHECK_IU_FEATURE(dc, HYPV);
3611
                            if (!hypervisor(dc))
3612
                                goto priv_insn;
3613
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3614
                            switch (rd) {
3615
                            case 0: // hpstate
3616
                                // XXX gen_op_wrhpstate();
3617
                                save_state(dc, cpu_cond);
3618
                                gen_op_next_insn();
3619
                                tcg_gen_exit_tb(0);
3620
                                dc->is_br = 1;
3621
                                break;
3622
                            case 1: // htstate
3623
                                // XXX gen_op_wrhtstate();
3624
                                break;
3625
                            case 3: // hintp
3626
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3627
                                break;
3628
                            case 5: // htba
3629
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3630
                                break;
3631
                            case 31: // hstick_cmpr
3632
                                {
3633
                                    TCGv_ptr r_tickptr;
3634

    
3635
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3636
                                    r_tickptr = tcg_temp_new_ptr();
3637
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3638
                                                   offsetof(CPUState, hstick));
3639
                                    gen_helper_tick_set_limit(r_tickptr,
3640
                                                              cpu_hstick_cmpr);
3641
                                    tcg_temp_free_ptr(r_tickptr);
3642
                                }
3643
                                break;
3644
                            case 6: // hver readonly
3645
                            default:
3646
                                goto illegal_insn;
3647
                            }
3648
#endif
3649
                        }
3650
                        break;
3651
#endif
3652
#ifdef TARGET_SPARC64
3653
                    case 0x2c: /* V9 movcc */
3654
                        {
3655
                            int cc = GET_FIELD_SP(insn, 11, 12);
3656
                            int cond = GET_FIELD_SP(insn, 14, 17);
3657
                            TCGv r_cond;
3658
                            int l1;
3659

    
3660
                            r_cond = tcg_temp_new();
3661
                            if (insn & (1 << 18)) {
3662
                                if (cc == 0)
3663
                                    gen_cond(r_cond, 0, cond, dc);
3664
                                else if (cc == 2)
3665
                                    gen_cond(r_cond, 1, cond, dc);
3666
                                else
3667
                                    goto illegal_insn;
3668
                            } else {
3669
                                gen_fcond(r_cond, cc, cond);
3670
                            }
3671

    
3672
                            l1 = gen_new_label();
3673

    
3674
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3675
                            if (IS_IMM) {       /* immediate */
3676
                                TCGv r_const;
3677

    
3678
                                simm = GET_FIELD_SPs(insn, 0, 10);
3679
                                r_const = tcg_const_tl(simm);
3680
                                gen_movl_TN_reg(rd, r_const);
3681
                                tcg_temp_free(r_const);
3682
                            } else {
3683
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3684
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3685
                                gen_movl_TN_reg(rd, cpu_tmp0);
3686
                            }
3687
                            gen_set_label(l1);
3688
                            tcg_temp_free(r_cond);
3689
                            break;
3690
                        }
3691
                    case 0x2d: /* V9 sdivx */
3692
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3693
                        gen_movl_TN_reg(rd, cpu_dst);
3694
                        break;
3695
                    case 0x2e: /* V9 popc */
3696
                        {
3697
                            cpu_src2 = get_src2(insn, cpu_src2);
3698
                            gen_helper_popc(cpu_dst, cpu_src2);
3699
                            gen_movl_TN_reg(rd, cpu_dst);
3700
                        }
3701
                    case 0x2f: /* V9 movr */
3702
                        {
3703
                            int cond = GET_FIELD_SP(insn, 10, 12);
3704
                            int l1;
3705

    
3706
                            cpu_src1 = get_src1(insn, cpu_src1);
3707

    
3708
                            l1 = gen_new_label();
3709

    
3710
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3711
                                              cpu_src1, 0, l1);
3712
                            if (IS_IMM) {       /* immediate */
3713
                                TCGv r_const;
3714

    
3715
                                simm = GET_FIELD_SPs(insn, 0, 9);
3716
                                r_const = tcg_const_tl(simm);
3717
                                gen_movl_TN_reg(rd, r_const);
3718
                                tcg_temp_free(r_const);
3719
                            } else {
3720
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3721
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3722
                                gen_movl_TN_reg(rd, cpu_tmp0);
3723
                            }
3724
                            gen_set_label(l1);
3725
                            break;
3726
                        }
3727
#endif
3728
                    default:
3729
                        goto illegal_insn;
3730
                    }
3731
                }
3732
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3733
#ifdef TARGET_SPARC64
3734
                int opf = GET_FIELD_SP(insn, 5, 13);
3735
                rs1 = GET_FIELD(insn, 13, 17);
3736
                rs2 = GET_FIELD(insn, 27, 31);
3737
                if (gen_trap_ifnofpu(dc, cpu_cond))
3738
                    goto jmp_insn;
3739

    
3740
                switch (opf) {
3741
                case 0x000: /* VIS I edge8cc */
3742
                case 0x001: /* VIS II edge8n */
3743
                case 0x002: /* VIS I edge8lcc */
3744
                case 0x003: /* VIS II edge8ln */
3745
                case 0x004: /* VIS I edge16cc */
3746
                case 0x005: /* VIS II edge16n */
3747
                case 0x006: /* VIS I edge16lcc */
3748
                case 0x007: /* VIS II edge16ln */
3749
                case 0x008: /* VIS I edge32cc */
3750
                case 0x009: /* VIS II edge32n */
3751
                case 0x00a: /* VIS I edge32lcc */
3752
                case 0x00b: /* VIS II edge32ln */
3753
                    // XXX
3754
                    goto illegal_insn;
3755
                case 0x010: /* VIS I array8 */
3756
                    CHECK_FPU_FEATURE(dc, VIS1);
3757
                    cpu_src1 = get_src1(insn, cpu_src1);
3758
                    gen_movl_reg_TN(rs2, cpu_src2);
3759
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3760
                    gen_movl_TN_reg(rd, cpu_dst);
3761
                    break;
3762
                case 0x012: /* VIS I array16 */
3763
                    CHECK_FPU_FEATURE(dc, VIS1);
3764
                    cpu_src1 = get_src1(insn, cpu_src1);
3765
                    gen_movl_reg_TN(rs2, cpu_src2);
3766
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3767
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3768
                    gen_movl_TN_reg(rd, cpu_dst);
3769
                    break;
3770
                case 0x014: /* VIS I array32 */
3771
                    CHECK_FPU_FEATURE(dc, VIS1);
3772
                    cpu_src1 = get_src1(insn, cpu_src1);
3773
                    gen_movl_reg_TN(rs2, cpu_src2);
3774
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3775
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3776
                    gen_movl_TN_reg(rd, cpu_dst);
3777
                    break;
3778
                case 0x018: /* VIS I alignaddr */
3779
                    CHECK_FPU_FEATURE(dc, VIS1);
3780
                    cpu_src1 = get_src1(insn, cpu_src1);
3781
                    gen_movl_reg_TN(rs2, cpu_src2);
3782
                    gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3783
                    gen_movl_TN_reg(rd, cpu_dst);
3784
                    break;
3785
                case 0x019: /* VIS II bmask */
3786
                case 0x01a: /* VIS I alignaddrl */
3787
                    // XXX
3788
                    goto illegal_insn;
3789
                case 0x020: /* VIS I fcmple16 */
3790
                    CHECK_FPU_FEATURE(dc, VIS1);
3791
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3792
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3793
                    gen_helper_fcmple16(cpu_dst);
3794
                    gen_movl_TN_reg(rd, cpu_dst);
3795
                    break;
3796
                case 0x022: /* VIS I fcmpne16 */
3797
                    CHECK_FPU_FEATURE(dc, VIS1);
3798
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3799
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3800
                    gen_helper_fcmpne16(cpu_dst);
3801
                    gen_movl_TN_reg(rd, cpu_dst);
3802
                    break;
3803
                case 0x024: /* VIS I fcmple32 */
3804
                    CHECK_FPU_FEATURE(dc, VIS1);
3805
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3806
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3807
                    gen_helper_fcmple32(cpu_dst);
3808
                    gen_movl_TN_reg(rd, cpu_dst);
3809
                    break;
3810
                case 0x026: /* VIS I fcmpne32 */
3811
                    CHECK_FPU_FEATURE(dc, VIS1);
3812
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3813
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3814
                    gen_helper_fcmpne32(cpu_dst);
3815
                    gen_movl_TN_reg(rd, cpu_dst);
3816
                    break;
3817
                case 0x028: /* VIS I fcmpgt16 */
3818
                    CHECK_FPU_FEATURE(dc, VIS1);
3819
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3820
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3821
                    gen_helper_fcmpgt16(cpu_dst);
3822
                    gen_movl_TN_reg(rd, cpu_dst);
3823
                    break;
3824
                case 0x02a: /* VIS I fcmpeq16 */
3825
                    CHECK_FPU_FEATURE(dc, VIS1);
3826
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3827
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3828
                    gen_helper_fcmpeq16(cpu_dst);
3829
                    gen_movl_TN_reg(rd, cpu_dst);
3830
                    break;
3831
                case 0x02c: /* VIS I fcmpgt32 */
3832
                    CHECK_FPU_FEATURE(dc, VIS1);
3833
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3834
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3835
                    gen_helper_fcmpgt32(cpu_dst);
3836
                    gen_movl_TN_reg(rd, cpu_dst);
3837
                    break;
3838
                case 0x02e: /* VIS I fcmpeq32 */
3839
                    CHECK_FPU_FEATURE(dc, VIS1);
3840
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3841
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3842
                    gen_helper_fcmpeq32(cpu_dst);
3843
                    gen_movl_TN_reg(rd, cpu_dst);
3844
                    break;
3845
                case 0x031: /* VIS I fmul8x16 */
3846
                    CHECK_FPU_FEATURE(dc, VIS1);
3847
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3848
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3849
                    gen_helper_fmul8x16();
3850
                    gen_op_store_DT0_fpr(DFPREG(rd));
3851
                    break;
3852
                case 0x033: /* VIS I fmul8x16au */
3853
                    CHECK_FPU_FEATURE(dc, VIS1);
3854
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3855
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3856
                    gen_helper_fmul8x16au();
3857
                    gen_op_store_DT0_fpr(DFPREG(rd));
3858
                    break;
3859
                case 0x035: /* VIS I fmul8x16al */
3860
                    CHECK_FPU_FEATURE(dc, VIS1);
3861
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3862
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3863
                    gen_helper_fmul8x16al();
3864
                    gen_op_store_DT0_fpr(DFPREG(rd));
3865
                    break;
3866
                case 0x036: /* VIS I fmul8sux16 */
3867
                    CHECK_FPU_FEATURE(dc, VIS1);
3868
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3869
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3870
                    gen_helper_fmul8sux16();
3871
                    gen_op_store_DT0_fpr(DFPREG(rd));
3872
                    break;
3873
                case 0x037: /* VIS I fmul8ulx16 */
3874
                    CHECK_FPU_FEATURE(dc, VIS1);
3875
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3876
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3877
                    gen_helper_fmul8ulx16();
3878
                    gen_op_store_DT0_fpr(DFPREG(rd));
3879
                    break;
3880
                case 0x038: /* VIS I fmuld8sux16 */
3881
                    CHECK_FPU_FEATURE(dc, VIS1);
3882
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3883
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3884
                    gen_helper_fmuld8sux16();
3885
                    gen_op_store_DT0_fpr(DFPREG(rd));
3886
                    break;
3887
                case 0x039: /* VIS I fmuld8ulx16 */
3888
                    CHECK_FPU_FEATURE(dc, VIS1);
3889
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3890
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3891
                    gen_helper_fmuld8ulx16();
3892
                    gen_op_store_DT0_fpr(DFPREG(rd));
3893
                    break;
3894
                case 0x03a: /* VIS I fpack32 */
3895
                case 0x03b: /* VIS I fpack16 */
3896
                case 0x03d: /* VIS I fpackfix */
3897
                case 0x03e: /* VIS I pdist */
3898
                    // XXX
3899
                    goto illegal_insn;
3900
                case 0x048: /* VIS I faligndata */
3901
                    CHECK_FPU_FEATURE(dc, VIS1);
3902
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3903
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3904
                    gen_helper_faligndata();
3905
                    gen_op_store_DT0_fpr(DFPREG(rd));
3906
                    break;
3907
                case 0x04b: /* VIS I fpmerge */
3908
                    CHECK_FPU_FEATURE(dc, VIS1);
3909
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3910
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3911
                    gen_helper_fpmerge();
3912
                    gen_op_store_DT0_fpr(DFPREG(rd));
3913
                    break;
3914
                case 0x04c: /* VIS II bshuffle */
3915
                    // XXX
3916
                    goto illegal_insn;
3917
                case 0x04d: /* VIS I fexpand */
3918
                    CHECK_FPU_FEATURE(dc, VIS1);
3919
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3920
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3921
                    gen_helper_fexpand();
3922
                    gen_op_store_DT0_fpr(DFPREG(rd));
3923
                    break;
3924
                case 0x050: /* VIS I fpadd16 */
3925
                    CHECK_FPU_FEATURE(dc, VIS1);
3926
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3927
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3928
                    gen_helper_fpadd16();
3929
                    gen_op_store_DT0_fpr(DFPREG(rd));
3930
                    break;
3931
                case 0x051: /* VIS I fpadd16s */
3932
                    CHECK_FPU_FEATURE(dc, VIS1);
3933
                    gen_helper_fpadd16s(cpu_fpr[rd],
3934
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3935
                    break;
3936
                case 0x052: /* VIS I fpadd32 */
3937
                    CHECK_FPU_FEATURE(dc, VIS1);
3938
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3939
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3940
                    gen_helper_fpadd32();
3941
                    gen_op_store_DT0_fpr(DFPREG(rd));
3942
                    break;
3943
                case 0x053: /* VIS I fpadd32s */
3944
                    CHECK_FPU_FEATURE(dc, VIS1);
3945
                    gen_helper_fpadd32s(cpu_fpr[rd],
3946
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3947
                    break;
3948
                case 0x054: /* VIS I fpsub16 */
3949
                    CHECK_FPU_FEATURE(dc, VIS1);
3950
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3951
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3952
                    gen_helper_fpsub16();
3953
                    gen_op_store_DT0_fpr(DFPREG(rd));
3954
                    break;
3955
                case 0x055: /* VIS I fpsub16s */
3956
                    CHECK_FPU_FEATURE(dc, VIS1);
3957
                    gen_helper_fpsub16s(cpu_fpr[rd],
3958
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3959
                    break;
3960
                case 0x056: /* VIS I fpsub32 */
3961
                    CHECK_FPU_FEATURE(dc, VIS1);
3962
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3963
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3964
                    gen_helper_fpsub32();
3965
                    gen_op_store_DT0_fpr(DFPREG(rd));
3966
                    break;
3967
                case 0x057: /* VIS I fpsub32s */
3968
                    CHECK_FPU_FEATURE(dc, VIS1);
3969
                    gen_helper_fpsub32s(cpu_fpr[rd],
3970
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3971
                    break;
3972
                case 0x060: /* VIS I fzero */
3973
                    CHECK_FPU_FEATURE(dc, VIS1);
3974
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3975
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3976
                    break;
3977
                case 0x061: /* VIS I fzeros */
3978
                    CHECK_FPU_FEATURE(dc, VIS1);
3979
                    tcg_gen_movi_i32(cpu_fpr[rd], 0);
3980
                    break;
3981
                case 0x062: /* VIS I fnor */
3982
                    CHECK_FPU_FEATURE(dc, VIS1);
3983
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3984
                                    cpu_fpr[DFPREG(rs2)]);
3985
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3986
                                    cpu_fpr[DFPREG(rs2) + 1]);
3987
                    break;
3988
                case 0x063: /* VIS I fnors */
3989
                    CHECK_FPU_FEATURE(dc, VIS1);
3990
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3991
                    break;
3992
                case 0x064: /* VIS I fandnot2 */
3993
                    CHECK_FPU_FEATURE(dc, VIS1);
3994
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3995
                                     cpu_fpr[DFPREG(rs2)]);
3996
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3997
                                     cpu_fpr[DFPREG(rs1) + 1],
3998
                                     cpu_fpr[DFPREG(rs2) + 1]);
3999
                    break;
4000
                case 0x065: /* VIS I fandnot2s */
4001
                    CHECK_FPU_FEATURE(dc, VIS1);
4002
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4003
                    break;
4004
                case 0x066: /* VIS I fnot2 */
4005
                    CHECK_FPU_FEATURE(dc, VIS1);
4006
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4007
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4008
                                    cpu_fpr[DFPREG(rs2) + 1]);
4009
                    break;
4010
                case 0x067: /* VIS I fnot2s */
4011
                    CHECK_FPU_FEATURE(dc, VIS1);
4012
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4013
                    break;
4014
                case 0x068: /* VIS I fandnot1 */
4015
                    CHECK_FPU_FEATURE(dc, VIS1);
4016
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4017
                                     cpu_fpr[DFPREG(rs1)]);
4018
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4019
                                     cpu_fpr[DFPREG(rs2) + 1],
4020
                                     cpu_fpr[DFPREG(rs1) + 1]);
4021
                    break;
4022
                case 0x069: /* VIS I fandnot1s */
4023
                    CHECK_FPU_FEATURE(dc, VIS1);
4024
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4025
                    break;
4026
                case 0x06a: /* VIS I fnot1 */
4027
                    CHECK_FPU_FEATURE(dc, VIS1);
4028
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4029
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4030
                                    cpu_fpr[DFPREG(rs1) + 1]);
4031
                    break;
4032
                case 0x06b: /* VIS I fnot1s */
4033
                    CHECK_FPU_FEATURE(dc, VIS1);
4034
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4035
                    break;
4036
                case 0x06c: /* VIS I fxor */
4037
                    CHECK_FPU_FEATURE(dc, VIS1);
4038
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4039
                                    cpu_fpr[DFPREG(rs2)]);
4040
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4041
                                    cpu_fpr[DFPREG(rs1) + 1],
4042
                                    cpu_fpr[DFPREG(rs2) + 1]);
4043
                    break;
4044
                case 0x06d: /* VIS I fxors */
4045
                    CHECK_FPU_FEATURE(dc, VIS1);
4046
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4047
                    break;
4048
                case 0x06e: /* VIS I fnand */
4049
                    CHECK_FPU_FEATURE(dc, VIS1);
4050
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4051
                                     cpu_fpr[DFPREG(rs2)]);
4052
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4053
                                     cpu_fpr[DFPREG(rs2) + 1]);
4054
                    break;
4055
                case 0x06f: /* VIS I fnands */
4056
                    CHECK_FPU_FEATURE(dc, VIS1);
4057
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4058
                    break;
4059
                case 0x070: /* VIS I fand */
4060
                    CHECK_FPU_FEATURE(dc, VIS1);
4061
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4062
                                    cpu_fpr[DFPREG(rs2)]);
4063
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4064
                                    cpu_fpr[DFPREG(rs1) + 1],
4065
                                    cpu_fpr[DFPREG(rs2) + 1]);
4066
                    break;
4067
                case 0x071: /* VIS I fands */
4068
                    CHECK_FPU_FEATURE(dc, VIS1);
4069
                    tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4070
                    break;
4071
                case 0x072: /* VIS I fxnor */
4072
                    CHECK_FPU_FEATURE(dc, VIS1);
4073
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4074
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4075
                                    cpu_fpr[DFPREG(rs1)]);
4076
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4077
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4078
                                    cpu_fpr[DFPREG(rs1) + 1]);
4079
                    break;
4080
                case 0x073: /* VIS I fxnors */
4081
                    CHECK_FPU_FEATURE(dc, VIS1);
4082
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4083
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4084
                    break;
4085
                case 0x074: /* VIS I fsrc1 */
4086
                    CHECK_FPU_FEATURE(dc, VIS1);
4087
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4088
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4089
                                    cpu_fpr[DFPREG(rs1) + 1]);
4090
                    break;
4091
                case 0x075: /* VIS I fsrc1s */
4092
                    CHECK_FPU_FEATURE(dc, VIS1);
4093
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4094
                    break;
4095
                case 0x076: /* VIS I fornot2 */
4096
                    CHECK_FPU_FEATURE(dc, VIS1);
4097
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4098
                                    cpu_fpr[DFPREG(rs2)]);
4099
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4100
                                    cpu_fpr[DFPREG(rs1) + 1],
4101
                                    cpu_fpr[DFPREG(rs2) + 1]);
4102
                    break;
4103
                case 0x077: /* VIS I fornot2s */
4104
                    CHECK_FPU_FEATURE(dc, VIS1);
4105
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4106
                    break;
4107
                case 0x078: /* VIS I fsrc2 */
4108
                    CHECK_FPU_FEATURE(dc, VIS1);
4109
                    gen_op_load_fpr_DT0(DFPREG(rs2));
4110
                    gen_op_store_DT0_fpr(DFPREG(rd));
4111
                    break;
4112
                case 0x079: /* VIS I fsrc2s */
4113
                    CHECK_FPU_FEATURE(dc, VIS1);
4114
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4115
                    break;
4116
                case 0x07a: /* VIS I fornot1 */
4117
                    CHECK_FPU_FEATURE(dc, VIS1);
4118
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4119
                                    cpu_fpr[DFPREG(rs1)]);
4120
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4121
                                    cpu_fpr[DFPREG(rs2) + 1],
4122
                                    cpu_fpr[DFPREG(rs1) + 1]);
4123
                    break;
4124
                case 0x07b: /* VIS I fornot1s */
4125
                    CHECK_FPU_FEATURE(dc, VIS1);
4126
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4127
                    break;
4128
                case 0x07c: /* VIS I for */
4129
                    CHECK_FPU_FEATURE(dc, VIS1);
4130
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4131
                                   cpu_fpr[DFPREG(rs2)]);
4132
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4133
                                   cpu_fpr[DFPREG(rs1) + 1],
4134
                                   cpu_fpr[DFPREG(rs2) + 1]);
4135
                    break;
4136
                case 0x07d: /* VIS I fors */
4137
                    CHECK_FPU_FEATURE(dc, VIS1);
4138
                    tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4139
                    break;
4140
                case 0x07e: /* VIS I fone */
4141
                    CHECK_FPU_FEATURE(dc, VIS1);
4142
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4143
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4144
                    break;
4145
                case 0x07f: /* VIS I fones */
4146
                    CHECK_FPU_FEATURE(dc, VIS1);
4147
                    tcg_gen_movi_i32(cpu_fpr[rd], -1);
4148
                    break;
4149
                case 0x080: /* VIS I shutdown */
4150
                case 0x081: /* VIS II siam */
4151
                    // XXX
4152
                    goto illegal_insn;
4153
                default:
4154
                    goto illegal_insn;
4155
                }
4156
#else
4157
                goto ncp_insn;
4158
#endif
4159
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4160
#ifdef TARGET_SPARC64
4161
                goto illegal_insn;
4162
#else
4163
                goto ncp_insn;
4164
#endif
4165
#ifdef TARGET_SPARC64
4166
            } else if (xop == 0x39) { /* V9 return */
4167
                TCGv_i32 r_const;
4168

    
4169
                save_state(dc, cpu_cond);
4170
                cpu_src1 = get_src1(insn, cpu_src1);
4171
                if (IS_IMM) {   /* immediate */
4172
                    simm = GET_FIELDs(insn, 19, 31);
4173
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4174
                } else {                /* register */
4175
                    rs2 = GET_FIELD(insn, 27, 31);
4176
                    if (rs2) {
4177
                        gen_movl_reg_TN(rs2, cpu_src2);
4178
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4179
                    } else
4180
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4181
                }
4182
                gen_helper_restore();
4183
                gen_mov_pc_npc(dc, cpu_cond);
4184
                r_const = tcg_const_i32(3);
4185
                gen_helper_check_align(cpu_dst, r_const);
4186
                tcg_temp_free_i32(r_const);
4187
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4188
                dc->npc = DYNAMIC_PC;
4189
                goto jmp_insn;
4190
#endif
4191
            } else {
4192
                cpu_src1 = get_src1(insn, cpu_src1);
4193
                if (IS_IMM) {   /* immediate */
4194
                    simm = GET_FIELDs(insn, 19, 31);
4195
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4196
                } else {                /* register */
4197
                    rs2 = GET_FIELD(insn, 27, 31);
4198
                    if (rs2) {
4199
                        gen_movl_reg_TN(rs2, cpu_src2);
4200
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4201
                    } else
4202
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4203
                }
4204
                switch (xop) {
4205
                case 0x38:      /* jmpl */
4206
                    {
4207
                        TCGv r_pc;
4208
                        TCGv_i32 r_const;
4209

    
4210
                        r_pc = tcg_const_tl(dc->pc);
4211
                        gen_movl_TN_reg(rd, r_pc);
4212
                        tcg_temp_free(r_pc);
4213
                        gen_mov_pc_npc(dc, cpu_cond);
4214
                        r_const = tcg_const_i32(3);
4215
                        gen_helper_check_align(cpu_dst, r_const);
4216
                        tcg_temp_free_i32(r_const);
4217
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4218
                        dc->npc = DYNAMIC_PC;
4219
                    }
4220
                    goto jmp_insn;
4221
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4222
                case 0x39:      /* rett, V9 return */
4223
                    {
4224
                        TCGv_i32 r_const;
4225

    
4226
                        if (!supervisor(dc))
4227
                            goto priv_insn;
4228
                        gen_mov_pc_npc(dc, cpu_cond);
4229
                        r_const = tcg_const_i32(3);
4230
                        gen_helper_check_align(cpu_dst, r_const);
4231
                        tcg_temp_free_i32(r_const);
4232
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4233
                        dc->npc = DYNAMIC_PC;
4234
                        gen_helper_rett();
4235
                    }
4236
                    goto jmp_insn;
4237
#endif
4238
                case 0x3b: /* flush */
4239
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4240
                        goto unimp_flush;
4241
                    /* nop */
4242
                    break;
4243
                case 0x3c:      /* save */
4244
                    save_state(dc, cpu_cond);
4245
                    gen_helper_save();
4246
                    gen_movl_TN_reg(rd, cpu_dst);
4247
                    break;
4248
                case 0x3d:      /* restore */
4249
                    save_state(dc, cpu_cond);
4250
                    gen_helper_restore();
4251
                    gen_movl_TN_reg(rd, cpu_dst);
4252
                    break;
4253
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4254
                case 0x3e:      /* V9 done/retry */
4255
                    {
4256
                        switch (rd) {
4257
                        case 0:
4258
                            if (!supervisor(dc))
4259
                                goto priv_insn;
4260
                            dc->npc = DYNAMIC_PC;
4261
                            dc->pc = DYNAMIC_PC;
4262
                            gen_helper_done();
4263
                            goto jmp_insn;
4264
                        case 1:
4265
                            if (!supervisor(dc))
4266
                                goto priv_insn;
4267
                            dc->npc = DYNAMIC_PC;
4268
                            dc->pc = DYNAMIC_PC;
4269
                            gen_helper_retry();
4270
                            goto jmp_insn;
4271
                        default:
4272
                            goto illegal_insn;
4273
                        }
4274
                    }
4275
                    break;
4276
#endif
4277
                default:
4278
                    goto illegal_insn;
4279
                }
4280
            }
4281
            break;
4282
        }
4283
        break;
4284
    case 3:                     /* load/store instructions */
4285
        {
4286
            unsigned int xop = GET_FIELD(insn, 7, 12);
4287

    
4288
            /* flush pending conditional evaluations before exposing
4289
               cpu state */
4290
            if (dc->cc_op != CC_OP_FLAGS) {
4291
                dc->cc_op = CC_OP_FLAGS;
4292
                gen_helper_compute_psr();
4293
            }
4294
            cpu_src1 = get_src1(insn, cpu_src1);
4295
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4296
                rs2 = GET_FIELD(insn, 27, 31);
4297
                gen_movl_reg_TN(rs2, cpu_src2);
4298
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4299
            } else if (IS_IMM) {     /* immediate */
4300
                simm = GET_FIELDs(insn, 19, 31);
4301
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4302
            } else {            /* register */
4303
                rs2 = GET_FIELD(insn, 27, 31);
4304
                if (rs2 != 0) {
4305
                    gen_movl_reg_TN(rs2, cpu_src2);
4306
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4307
                } else
4308
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4309
            }
4310
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4311
                (xop > 0x17 && xop <= 0x1d ) ||
4312
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4313
                switch (xop) {
4314
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4315
                    gen_address_mask(dc, cpu_addr);
4316
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4317
                    break;
4318
                case 0x1:       /* ldub, load unsigned byte */
4319
                    gen_address_mask(dc, cpu_addr);
4320
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4321
                    break;
4322
                case 0x2:       /* lduh, load unsigned halfword */
4323
                    gen_address_mask(dc, cpu_addr);
4324
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4325
                    break;
4326
                case 0x3:       /* ldd, load double word */
4327
                    if (rd & 1)
4328
                        goto illegal_insn;
4329
                    else {
4330
                        TCGv_i32 r_const;
4331

    
4332
                        save_state(dc, cpu_cond);
4333
                        r_const = tcg_const_i32(7);
4334
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4335
                        tcg_temp_free_i32(r_const);
4336
                        gen_address_mask(dc, cpu_addr);
4337
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4338
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4339
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4340
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4341
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4342
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4343
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4344
                    }
4345
                    break;
4346
                case 0x9:       /* ldsb, load signed byte */
4347
                    gen_address_mask(dc, cpu_addr);
4348
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4349
                    break;
4350
                case 0xa:       /* ldsh, load signed halfword */
4351
                    gen_address_mask(dc, cpu_addr);
4352
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4353
                    break;
4354
                case 0xd:       /* ldstub -- XXX: should be atomically */
4355
                    {
4356
                        TCGv r_const;
4357

    
4358
                        gen_address_mask(dc, cpu_addr);
4359
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4360
                        r_const = tcg_const_tl(0xff);
4361
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4362
                        tcg_temp_free(r_const);
4363
                    }
4364
                    break;
4365
                case 0x0f:      /* swap, swap register with memory. Also
4366
                                   atomically */
4367
                    CHECK_IU_FEATURE(dc, SWAP);
4368
                    gen_movl_reg_TN(rd, cpu_val);
4369
                    gen_address_mask(dc, cpu_addr);
4370
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4371
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4372
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4373
                    break;
4374
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4375
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4376
#ifndef TARGET_SPARC64
4377
                    if (IS_IMM)
4378
                        goto illegal_insn;
4379
                    if (!supervisor(dc))
4380
                        goto priv_insn;
4381
#endif
4382
                    save_state(dc, cpu_cond);
4383
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4384
                    break;
4385
                case 0x11:      /* lduba, load unsigned byte alternate */
4386
#ifndef TARGET_SPARC64
4387
                    if (IS_IMM)
4388
                        goto illegal_insn;
4389
                    if (!supervisor(dc))
4390
                        goto priv_insn;
4391
#endif
4392
                    save_state(dc, cpu_cond);
4393
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4394
                    break;
4395
                case 0x12:      /* lduha, load unsigned halfword alternate */
4396
#ifndef TARGET_SPARC64
4397
                    if (IS_IMM)
4398
                        goto illegal_insn;
4399
                    if (!supervisor(dc))
4400
                        goto priv_insn;
4401
#endif
4402
                    save_state(dc, cpu_cond);
4403
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4404
                    break;
4405
                case 0x13:      /* ldda, load double word alternate */
4406
#ifndef TARGET_SPARC64
4407
                    if (IS_IMM)
4408
                        goto illegal_insn;
4409
                    if (!supervisor(dc))
4410
                        goto priv_insn;
4411
#endif
4412
                    if (rd & 1)
4413
                        goto illegal_insn;
4414
                    save_state(dc, cpu_cond);
4415
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4416
                    goto skip_move;
4417
                case 0x19:      /* ldsba, load signed byte alternate */
4418
#ifndef TARGET_SPARC64
4419
                    if (IS_IMM)
4420
                        goto illegal_insn;
4421
                    if (!supervisor(dc))
4422
                        goto priv_insn;
4423
#endif
4424
                    save_state(dc, cpu_cond);
4425
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4426
                    break;
4427
                case 0x1a:      /* ldsha, load signed halfword alternate */
4428
#ifndef TARGET_SPARC64
4429
                    if (IS_IMM)
4430
                        goto illegal_insn;
4431
                    if (!supervisor(dc))
4432
                        goto priv_insn;
4433
#endif
4434
                    save_state(dc, cpu_cond);
4435
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4436
                    break;
4437
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4438
#ifndef TARGET_SPARC64
4439
                    if (IS_IMM)
4440
                        goto illegal_insn;
4441
                    if (!supervisor(dc))
4442
                        goto priv_insn;
4443
#endif
4444
                    save_state(dc, cpu_cond);
4445
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4446
                    break;
4447
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4448
                                   atomically */
4449
                    CHECK_IU_FEATURE(dc, SWAP);
4450
#ifndef TARGET_SPARC64
4451
                    if (IS_IMM)
4452
                        goto illegal_insn;
4453
                    if (!supervisor(dc))
4454
                        goto priv_insn;
4455
#endif
4456
                    save_state(dc, cpu_cond);
4457
                    gen_movl_reg_TN(rd, cpu_val);
4458
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4459
                    break;
4460

    
4461
#ifndef TARGET_SPARC64
4462
                case 0x30: /* ldc */
4463
                case 0x31: /* ldcsr */
4464
                case 0x33: /* lddc */
4465
                    goto ncp_insn;
4466
#endif
4467
#endif
4468
#ifdef TARGET_SPARC64
4469
                case 0x08: /* V9 ldsw */
4470
                    gen_address_mask(dc, cpu_addr);
4471
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4472
                    break;
4473
                case 0x0b: /* V9 ldx */
4474
                    gen_address_mask(dc, cpu_addr);
4475
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4476
                    break;
4477
                case 0x18: /* V9 ldswa */
4478
                    save_state(dc, cpu_cond);
4479
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4480
                    break;
4481
                case 0x1b: /* V9 ldxa */
4482
                    save_state(dc, cpu_cond);
4483
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4484
                    break;
4485
                case 0x2d: /* V9 prefetch, no effect */
4486
                    goto skip_move;
4487
                case 0x30: /* V9 ldfa */
4488
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4489
                        goto jmp_insn;
4490
                    }
4491
                    save_state(dc, cpu_cond);
4492
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4493
                    goto skip_move;
4494
                case 0x33: /* V9 lddfa */
4495
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4496
                        goto jmp_insn;
4497
                    }
4498
                    save_state(dc, cpu_cond);
4499
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4500
                    goto skip_move;
4501
                case 0x3d: /* V9 prefetcha, no effect */
4502
                    goto skip_move;
4503
                case 0x32: /* V9 ldqfa */
4504
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4505
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4506
                        goto jmp_insn;
4507
                    }
4508
                    save_state(dc, cpu_cond);
4509
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4510
                    goto skip_move;
4511
#endif
4512
                default:
4513
                    goto illegal_insn;
4514
                }
4515
                gen_movl_TN_reg(rd, cpu_val);
4516
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4517
            skip_move: ;
4518
#endif
4519
            } else if (xop >= 0x20 && xop < 0x24) {
4520
                if (gen_trap_ifnofpu(dc, cpu_cond))
4521
                    goto jmp_insn;
4522
                save_state(dc, cpu_cond);
4523
                switch (xop) {
4524
                case 0x20:      /* ldf, load fpreg */
4525
                    gen_address_mask(dc, cpu_addr);
4526
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4527
                    tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4528
                    break;
4529
                case 0x21:      /* ldfsr, V9 ldxfsr */
4530
#ifdef TARGET_SPARC64
4531
                    gen_address_mask(dc, cpu_addr);
4532
                    if (rd == 1) {
4533
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4534
                        gen_helper_ldxfsr(cpu_tmp64);
4535
                    } else {
4536
                        tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4537
                        tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4538
                        gen_helper_ldfsr(cpu_tmp32);
4539
                    }
4540
#else
4541
                    {
4542
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4543
                        gen_helper_ldfsr(cpu_tmp32);
4544
                    }
4545
#endif
4546
                    break;
4547
                case 0x22:      /* ldqf, load quad fpreg */
4548
                    {
4549
                        TCGv_i32 r_const;
4550

    
4551
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4552
                        r_const = tcg_const_i32(dc->mem_idx);
4553
                        gen_address_mask(dc, cpu_addr);
4554
                        gen_helper_ldqf(cpu_addr, r_const);
4555
                        tcg_temp_free_i32(r_const);
4556
                        gen_op_store_QT0_fpr(QFPREG(rd));
4557
                    }
4558
                    break;
4559
                case 0x23:      /* lddf, load double fpreg */
4560
                    {
4561
                        TCGv_i32 r_const;
4562

    
4563
                        r_const = tcg_const_i32(dc->mem_idx);
4564
                        gen_address_mask(dc, cpu_addr);
4565
                        gen_helper_lddf(cpu_addr, r_const);
4566
                        tcg_temp_free_i32(r_const);
4567
                        gen_op_store_DT0_fpr(DFPREG(rd));
4568
                    }
4569
                    break;
4570
                default:
4571
                    goto illegal_insn;
4572
                }
4573
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4574
                       xop == 0xe || xop == 0x1e) {
4575
                gen_movl_reg_TN(rd, cpu_val);
4576
                switch (xop) {
4577
                case 0x4: /* st, store word */
4578
                    gen_address_mask(dc, cpu_addr);
4579
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4580
                    break;
4581
                case 0x5: /* stb, store byte */
4582
                    gen_address_mask(dc, cpu_addr);
4583
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4584
                    break;
4585
                case 0x6: /* sth, store halfword */
4586
                    gen_address_mask(dc, cpu_addr);
4587
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4588
                    break;
4589
                case 0x7: /* std, store double word */
4590
                    if (rd & 1)
4591
                        goto illegal_insn;
4592
                    else {
4593
                        TCGv_i32 r_const;
4594

    
4595
                        save_state(dc, cpu_cond);
4596
                        gen_address_mask(dc, cpu_addr);
4597
                        r_const = tcg_const_i32(7);
4598
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4599
                        tcg_temp_free_i32(r_const);
4600
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4601
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4602
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4603
                    }
4604
                    break;
4605
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4606
                case 0x14: /* sta, V9 stwa, store word alternate */
4607
#ifndef TARGET_SPARC64
4608
                    if (IS_IMM)
4609
                        goto illegal_insn;
4610
                    if (!supervisor(dc))
4611
                        goto priv_insn;
4612
#endif
4613
                    save_state(dc, cpu_cond);
4614
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4615
                    dc->npc = DYNAMIC_PC;
4616
                    break;
4617
                case 0x15: /* stba, store byte alternate */
4618
#ifndef TARGET_SPARC64
4619
                    if (IS_IMM)
4620
                        goto illegal_insn;
4621
                    if (!supervisor(dc))
4622
                        goto priv_insn;
4623
#endif
4624
                    save_state(dc, cpu_cond);
4625
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4626
                    dc->npc = DYNAMIC_PC;
4627
                    break;
4628
                case 0x16: /* stha, store halfword alternate */
4629
#ifndef TARGET_SPARC64
4630
                    if (IS_IMM)
4631
                        goto illegal_insn;
4632
                    if (!supervisor(dc))
4633
                        goto priv_insn;
4634
#endif
4635
                    save_state(dc, cpu_cond);
4636
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4637
                    dc->npc = DYNAMIC_PC;
4638
                    break;
4639
                case 0x17: /* stda, store double word alternate */
4640
#ifndef TARGET_SPARC64
4641
                    if (IS_IMM)
4642
                        goto illegal_insn;
4643
                    if (!supervisor(dc))
4644
                        goto priv_insn;
4645
#endif
4646
                    if (rd & 1)
4647
                        goto illegal_insn;
4648
                    else {
4649
                        save_state(dc, cpu_cond);
4650
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4651
                    }
4652
                    break;
4653
#endif
4654
#ifdef TARGET_SPARC64
4655
                case 0x0e: /* V9 stx */
4656
                    gen_address_mask(dc, cpu_addr);
4657
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4658
                    break;
4659
                case 0x1e: /* V9 stxa */
4660
                    save_state(dc, cpu_cond);
4661
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4662
                    dc->npc = DYNAMIC_PC;
4663
                    break;
4664
#endif
4665
                default:
4666
                    goto illegal_insn;
4667
                }
4668
            } else if (xop > 0x23 && xop < 0x28) {
4669
                if (gen_trap_ifnofpu(dc, cpu_cond))
4670
                    goto jmp_insn;
4671
                save_state(dc, cpu_cond);
4672
                switch (xop) {
4673
                case 0x24: /* stf, store fpreg */
4674
                    gen_address_mask(dc, cpu_addr);
4675
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4676
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4677
                    break;
4678
                case 0x25: /* stfsr, V9 stxfsr */
4679
#ifdef TARGET_SPARC64
4680
                    gen_address_mask(dc, cpu_addr);
4681
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4682
                    if (rd == 1)
4683
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4684
                    else
4685
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4686
#else
4687
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4688
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4689
#endif
4690
                    break;
4691
                case 0x26:
4692
#ifdef TARGET_SPARC64
4693
                    /* V9 stqf, store quad fpreg */
4694
                    {
4695
                        TCGv_i32 r_const;
4696

    
4697
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4698
                        gen_op_load_fpr_QT0(QFPREG(rd));
4699
                        r_const = tcg_const_i32(dc->mem_idx);
4700
                        gen_address_mask(dc, cpu_addr);
4701
                        gen_helper_stqf(cpu_addr, r_const);
4702
                        tcg_temp_free_i32(r_const);
4703
                    }
4704
                    break;
4705
#else /* !TARGET_SPARC64 */
4706
                    /* stdfq, store floating point queue */
4707
#if defined(CONFIG_USER_ONLY)
4708
                    goto illegal_insn;
4709
#else
4710
                    if (!supervisor(dc))
4711
                        goto priv_insn;
4712
                    if (gen_trap_ifnofpu(dc, cpu_cond))
4713
                        goto jmp_insn;
4714
                    goto nfq_insn;
4715
#endif
4716
#endif
4717
                case 0x27: /* stdf, store double fpreg */
4718
                    {
4719
                        TCGv_i32 r_const;
4720

    
4721
                        gen_op_load_fpr_DT0(DFPREG(rd));
4722
                        r_const = tcg_const_i32(dc->mem_idx);
4723
                        gen_address_mask(dc, cpu_addr);
4724
                        gen_helper_stdf(cpu_addr, r_const);
4725
                        tcg_temp_free_i32(r_const);
4726
                    }
4727
                    break;
4728
                default:
4729
                    goto illegal_insn;
4730
                }
4731
            } else if (xop > 0x33 && xop < 0x3f) {
4732
                save_state(dc, cpu_cond);
4733
                switch (xop) {
4734
#ifdef TARGET_SPARC64
4735
                case 0x34: /* V9 stfa */
4736
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4737
                        goto jmp_insn;
4738
                    }
4739
                    gen_stf_asi(cpu_addr, insn, 4, rd);
4740
                    break;
4741
                case 0x36: /* V9 stqfa */
4742
                    {
4743
                        TCGv_i32 r_const;
4744

    
4745
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4746
                        if (gen_trap_ifnofpu(dc, cpu_cond)) {
4747
                            goto jmp_insn;
4748
                        }
4749
                        r_const = tcg_const_i32(7);
4750
                        gen_helper_check_align(cpu_addr, r_const);
4751
                        tcg_temp_free_i32(r_const);
4752
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4753
                    }
4754
                    break;
4755
                case 0x37: /* V9 stdfa */
4756
                    if (gen_trap_ifnofpu(dc, cpu_cond)) {
4757
                        goto jmp_insn;
4758
                    }
4759
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4760
                    break;
4761
                case 0x3c: /* V9 casa */
4762
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4763
                    gen_movl_TN_reg(rd, cpu_val);
4764
                    break;
4765
                case 0x3e: /* V9 casxa */
4766
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4767
                    gen_movl_TN_reg(rd, cpu_val);
4768
                    break;
4769
#else
4770
                case 0x34: /* stc */
4771
                case 0x35: /* stcsr */
4772
                case 0x36: /* stdcq */
4773
                case 0x37: /* stdc */
4774
                    goto ncp_insn;
4775
#endif
4776
                default:
4777
                    goto illegal_insn;
4778
                }
4779
            } else
4780
                goto illegal_insn;
4781
        }
4782
        break;
4783
    }
4784
    /* default case for non jump instructions */
4785
    if (dc->npc == DYNAMIC_PC) {
4786
        dc->pc = DYNAMIC_PC;
4787
        gen_op_next_insn();
4788
    } else if (dc->npc == JUMP_PC) {
4789
        /* we can do a static jump */
4790
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4791
        dc->is_br = 1;
4792
    } else {
4793
        dc->pc = dc->npc;
4794
        dc->npc = dc->npc + 4;
4795
    }
4796
 jmp_insn:
4797
    goto egress;
4798
 illegal_insn:
4799
    {
4800
        TCGv_i32 r_const;
4801

    
4802
        save_state(dc, cpu_cond);
4803
        r_const = tcg_const_i32(TT_ILL_INSN);
4804
        gen_helper_raise_exception(r_const);
4805
        tcg_temp_free_i32(r_const);
4806
        dc->is_br = 1;
4807
    }
4808
    goto egress;
4809
 unimp_flush:
4810
    {
4811
        TCGv_i32 r_const;
4812

    
4813
        save_state(dc, cpu_cond);
4814
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4815
        gen_helper_raise_exception(r_const);
4816
        tcg_temp_free_i32(r_const);
4817
        dc->is_br = 1;
4818
    }
4819
    goto egress;
4820
#if !defined(CONFIG_USER_ONLY)
4821
 priv_insn:
4822
    {
4823
        TCGv_i32 r_const;
4824

    
4825
        save_state(dc, cpu_cond);
4826
        r_const = tcg_const_i32(TT_PRIV_INSN);
4827
        gen_helper_raise_exception(r_const);
4828
        tcg_temp_free_i32(r_const);
4829
        dc->is_br = 1;
4830
    }
4831
    goto egress;
4832
#endif
4833
 nfpu_insn:
4834
    save_state(dc, cpu_cond);
4835
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4836
    dc->is_br = 1;
4837
    goto egress;
4838
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4839
 nfq_insn:
4840
    save_state(dc, cpu_cond);
4841
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4842
    dc->is_br = 1;
4843
    goto egress;
4844
#endif
4845
#ifndef TARGET_SPARC64
4846
 ncp_insn:
4847
    {
4848
        TCGv r_const;
4849

    
4850
        save_state(dc, cpu_cond);
4851
        r_const = tcg_const_i32(TT_NCP_INSN);
4852
        gen_helper_raise_exception(r_const);
4853
        tcg_temp_free(r_const);
4854
        dc->is_br = 1;
4855
    }
4856
    goto egress;
4857
#endif
4858
 egress:
4859
    tcg_temp_free(cpu_tmp1);
4860
    tcg_temp_free(cpu_tmp2);
4861
}
4862

    
4863
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4864
                                                  int spc, CPUSPARCState *env)
4865
{
4866
    target_ulong pc_start, last_pc;
4867
    uint16_t *gen_opc_end;
4868
    DisasContext dc1, *dc = &dc1;
4869
    CPUBreakpoint *bp;
4870
    int j, lj = -1;
4871
    int num_insns;
4872
    int max_insns;
4873

    
4874
    memset(dc, 0, sizeof(DisasContext));
4875
    dc->tb = tb;
4876
    pc_start = tb->pc;
4877
    dc->pc = pc_start;
4878
    last_pc = dc->pc;
4879
    dc->npc = (target_ulong) tb->cs_base;
4880
    dc->cc_op = CC_OP_DYNAMIC;
4881
    dc->mem_idx = cpu_mmu_index(env);
4882
    dc->def = env->def;
4883
    dc->fpu_enabled = tb_fpu_enabled(tb->flags);
4884
    dc->address_mask_32bit = tb_am_enabled(tb->flags);
4885
    dc->singlestep = (env->singlestep_enabled || singlestep);
4886
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4887

    
4888
    cpu_tmp0 = tcg_temp_new();
4889
    cpu_tmp32 = tcg_temp_new_i32();
4890
    cpu_tmp64 = tcg_temp_new_i64();
4891

    
4892
    cpu_dst = tcg_temp_local_new();
4893

    
4894
    // loads and stores
4895
    cpu_val = tcg_temp_local_new();
4896
    cpu_addr = tcg_temp_local_new();
4897

    
4898
    num_insns = 0;
4899
    max_insns = tb->cflags & CF_COUNT_MASK;
4900
    if (max_insns == 0)
4901
        max_insns = CF_COUNT_MASK;
4902
    gen_icount_start();
4903
    do {
4904
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4905
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4906
                if (bp->pc == dc->pc) {
4907
                    if (dc->pc != pc_start)
4908
                        save_state(dc, cpu_cond);
4909
                    gen_helper_debug();
4910
                    tcg_gen_exit_tb(0);
4911
                    dc->is_br = 1;
4912
                    goto exit_gen_loop;
4913
                }
4914
            }
4915
        }
4916
        if (spc) {
4917
            qemu_log("Search PC...\n");
4918
            j = gen_opc_ptr - gen_opc_buf;
4919
            if (lj < j) {
4920
                lj++;
4921
                while (lj < j)
4922
                    gen_opc_instr_start[lj++] = 0;
4923
                gen_opc_pc[lj] = dc->pc;
4924
                gen_opc_npc[lj] = dc->npc;
4925
                gen_opc_instr_start[lj] = 1;
4926
                gen_opc_icount[lj] = num_insns;
4927
            }
4928
        }
4929
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4930
            gen_io_start();
4931
        last_pc = dc->pc;
4932
        disas_sparc_insn(dc);
4933
        num_insns++;
4934

    
4935
        if (dc->is_br)
4936
            break;
4937
        /* if the next PC is different, we abort now */
4938
        if (dc->pc != (last_pc + 4))
4939
            break;
4940
        /* if we reach a page boundary, we stop generation so that the
4941
           PC of a TT_TFAULT exception is always in the right page */
4942
        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4943
            break;
4944
        /* if single step mode, we generate only one instruction and
4945
           generate an exception */
4946
        if (dc->singlestep) {
4947
            break;
4948
        }
4949
    } while ((gen_opc_ptr < gen_opc_end) &&
4950
             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4951
             num_insns < max_insns);
4952

    
4953
 exit_gen_loop:
4954
    tcg_temp_free(cpu_addr);
4955
    tcg_temp_free(cpu_val);
4956
    tcg_temp_free(cpu_dst);
4957
    tcg_temp_free_i64(cpu_tmp64);
4958
    tcg_temp_free_i32(cpu_tmp32);
4959
    tcg_temp_free(cpu_tmp0);
4960
    if (tb->cflags & CF_LAST_IO)
4961
        gen_io_end();
4962
    if (!dc->is_br) {
4963
        if (dc->pc != DYNAMIC_PC &&
4964
            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4965
            /* static PC and NPC: we can use direct chaining */
4966
            gen_goto_tb(dc, 0, dc->pc, dc->npc);
4967
        } else {
4968
            if (dc->pc != DYNAMIC_PC)
4969
                tcg_gen_movi_tl(cpu_pc, dc->pc);
4970
            save_npc(dc, cpu_cond);
4971
            tcg_gen_exit_tb(0);
4972
        }
4973
    }
4974
    gen_icount_end(tb, num_insns);
4975
    *gen_opc_ptr = INDEX_op_end;
4976
    if (spc) {
4977
        j = gen_opc_ptr - gen_opc_buf;
4978
        lj++;
4979
        while (lj <= j)
4980
            gen_opc_instr_start[lj++] = 0;
4981
#if 0
4982
        log_page_dump();
4983
#endif
4984
        gen_opc_jump_pc[0] = dc->jump_pc[0];
4985
        gen_opc_jump_pc[1] = dc->jump_pc[1];
4986
    } else {
4987
        tb->size = last_pc + 4 - pc_start;
4988
        tb->icount = num_insns;
4989
    }
4990
#ifdef DEBUG_DISAS
4991
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4992
        qemu_log("--------------\n");
4993
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
4994
        log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4995
        qemu_log("\n");
4996
    }
4997
#endif
4998
}
4999

    
5000
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5001
{
5002
    gen_intermediate_code_internal(tb, 0, env);
5003
}
5004

    
5005
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5006
{
5007
    gen_intermediate_code_internal(tb, 1, env);
5008
}
5009

    
5010
void gen_intermediate_code_init(CPUSPARCState *env)
5011
{
5012
    unsigned int i;
5013
    static int inited;
5014
    static const char * const gregnames[8] = {
5015
        NULL, // g0 not used
5016
        "g1",
5017
        "g2",
5018
        "g3",
5019
        "g4",
5020
        "g5",
5021
        "g6",
5022
        "g7",
5023
    };
5024
    static const char * const fregnames[64] = {
5025
        "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5026
        "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5027
        "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5028
        "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5029
        "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5030
        "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5031
        "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5032
        "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5033
    };
5034

    
5035
    /* init various static tables */
5036
    if (!inited) {
5037
        inited = 1;
5038

    
5039
        cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5040
        cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5041
                                             offsetof(CPUState, regwptr),
5042
                                             "regwptr");
5043
#ifdef TARGET_SPARC64
5044
        cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5045
                                         "xcc");
5046
        cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5047
                                         "asi");
5048
        cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5049
                                          "fprs");
5050
        cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5051
                                     "gsr");
5052
        cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5053
                                           offsetof(CPUState, tick_cmpr),
5054
                                           "tick_cmpr");
5055
        cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5056
                                            offsetof(CPUState, stick_cmpr),
5057
                                            "stick_cmpr");
5058
        cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5059
                                             offsetof(CPUState, hstick_cmpr),
5060
                                             "hstick_cmpr");
5061
        cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5062
                                       "hintp");
5063
        cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5064
                                      "htba");
5065
        cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5066
                                      "hver");
5067
        cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5068
                                     offsetof(CPUState, ssr), "ssr");
5069
        cpu_ver = tcg_global_mem_new(TCG_AREG0,
5070
                                     offsetof(CPUState, version), "ver");
5071
        cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5072
                                             offsetof(CPUState, softint),
5073
                                             "softint");
5074
#else
5075
        cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5076
                                     "wim");
5077
#endif
5078
        cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5079
                                      "cond");
5080
        cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5081
                                        "cc_src");
5082
        cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5083
                                         offsetof(CPUState, cc_src2),
5084
                                         "cc_src2");
5085
        cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5086
                                        "cc_dst");
5087
        cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5088
                                           "cc_op");
5089
        cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5090
                                         "psr");
5091
        cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5092
                                     "fsr");
5093
        cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5094
                                    "pc");
5095
        cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5096
                                     "npc");
5097
        cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5098
#ifndef CONFIG_USER_ONLY
5099
        cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5100
                                     "tbr");
5101
#endif
5102
        for (i = 1; i < 8; i++)
5103
            cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5104
                                              offsetof(CPUState, gregs[i]),
5105
                                              gregnames[i]);
5106
        for (i = 0; i < TARGET_FPREGS; i++)
5107
            cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5108
                                                offsetof(CPUState, fpr[i]),
5109
                                                fregnames[i]);
5110

    
5111
        /* register helpers */
5112

    
5113
#define GEN_HELPER 2
5114
#include "helper.h"
5115
    }
5116
}
5117

    
5118
void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5119
{
5120
    target_ulong npc;
5121
    env->pc = gen_opc_pc[pc_pos];
5122
    npc = gen_opc_npc[pc_pos];
5123
    if (npc == 1) {
5124
        /* dynamic NPC: already stored */
5125
    } else if (npc == 2) {
5126
        /* jump PC: use 'cond' and the jump targets of the translation */
5127
        if (env->cond) {
5128
            env->npc = gen_opc_jump_pc[0];
5129
        } else {
5130
            env->npc = gen_opc_jump_pc[1];
5131
        }
5132
    } else {
5133
        env->npc = npc;
5134
    }
5135

    
5136
    /* flush pending conditional evaluations before exposing cpu state */
5137
    if (CC_OP != CC_OP_FLAGS) {
5138
        helper_compute_psr();
5139
    }
5140
}