Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ bdf9f35d

History | View | Annotate | Download (195.7 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, write to the Free Software
19
   Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA  02110-1301 USA
20
 */
21

    
22
#include <stdarg.h>
23
#include <stdlib.h>
24
#include <stdio.h>
25
#include <string.h>
26
#include <inttypes.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define GEN_HELPER 1
35
#include "helper.h"
36

    
37
#define DEBUG_DISAS
38

    
39
#define DYNAMIC_PC  1 /* dynamic pc value */
40
#define JUMP_PC     2 /* dynamic pc value which takes only two values
41
                         according to jump_pc[T2] */
42

    
43
/* global register indexes */
44
static TCGv_ptr cpu_env, cpu_regwptr;
45
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46
static TCGv_i32 cpu_psr;
47
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48
static TCGv cpu_y;
49
#ifndef CONFIG_USER_ONLY
50
static TCGv cpu_tbr;
51
#endif
52
static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53
#ifdef TARGET_SPARC64
54
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55
static TCGv cpu_gsr;
56
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58
static TCGv_i32 cpu_softint;
59
#else
60
static TCGv cpu_wim;
61
#endif
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0;
64
static TCGv_i32 cpu_tmp32;
65
static TCGv_i64 cpu_tmp64;
66
/* Floating point registers */
67
static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68

    
69
#include "gen-icount.h"
70

    
71
typedef struct DisasContext {
72
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
73
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75
    int is_br;
76
    int mem_idx;
77
    int fpu_enabled;
78
    int address_mask_32bit;
79
    uint32_t cc_op;  /* current CC operation */
80
    struct TranslationBlock *tb;
81
    sparc_def_t *def;
82
} DisasContext;
83

    
84
// This function uses non-native bit order
85
#define GET_FIELD(X, FROM, TO)                                  \
86
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87

    
88
// This function uses the order in the manuals, i.e. bit 0 is 2^0
89
#define GET_FIELD_SP(X, FROM, TO)               \
90
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
91

    
92
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94

    
95
#ifdef TARGET_SPARC64
96
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98
#else
99
#define DFPREG(r) (r & 0x1e)
100
#define QFPREG(r) (r & 0x1c)
101
#endif
102

    
103
#define UA2005_HTRAP_MASK 0xff
104
#define V8_TRAP_MASK 0x7f
105

    
106
static int sign_extend(int x, int len)
107
{
108
    len = 32 - len;
109
    return (x << len) >> len;
110
}
111

    
112
#define IS_IMM (insn & (1<<13))
113

    
114
/* floating point registers moves */
115
static void gen_op_load_fpr_DT0(unsigned int src)
116
{
117
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118
                   offsetof(CPU_DoubleU, l.upper));
119
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120
                   offsetof(CPU_DoubleU, l.lower));
121
}
122

    
123
static void gen_op_load_fpr_DT1(unsigned int src)
124
{
125
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126
                   offsetof(CPU_DoubleU, l.upper));
127
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128
                   offsetof(CPU_DoubleU, l.lower));
129
}
130

    
131
static void gen_op_store_DT0_fpr(unsigned int dst)
132
{
133
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134
                   offsetof(CPU_DoubleU, l.upper));
135
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136
                   offsetof(CPU_DoubleU, l.lower));
137
}
138

    
139
static void gen_op_load_fpr_QT0(unsigned int src)
140
{
141
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142
                   offsetof(CPU_QuadU, l.upmost));
143
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144
                   offsetof(CPU_QuadU, l.upper));
145
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146
                   offsetof(CPU_QuadU, l.lower));
147
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148
                   offsetof(CPU_QuadU, l.lowest));
149
}
150

    
151
static void gen_op_load_fpr_QT1(unsigned int src)
152
{
153
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154
                   offsetof(CPU_QuadU, l.upmost));
155
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156
                   offsetof(CPU_QuadU, l.upper));
157
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158
                   offsetof(CPU_QuadU, l.lower));
159
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160
                   offsetof(CPU_QuadU, l.lowest));
161
}
162

    
163
static void gen_op_store_QT0_fpr(unsigned int dst)
164
{
165
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166
                   offsetof(CPU_QuadU, l.upmost));
167
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168
                   offsetof(CPU_QuadU, l.upper));
169
    tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170
                   offsetof(CPU_QuadU, l.lower));
171
    tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172
                   offsetof(CPU_QuadU, l.lowest));
173
}
174

    
175
/* moves */
176
#ifdef CONFIG_USER_ONLY
177
#define supervisor(dc) 0
178
#ifdef TARGET_SPARC64
179
#define hypervisor(dc) 0
180
#endif
181
#else
182
#define supervisor(dc) (dc->mem_idx >= 1)
183
#ifdef TARGET_SPARC64
184
#define hypervisor(dc) (dc->mem_idx == 2)
185
#else
186
#endif
187
#endif
188

    
189
#ifdef TARGET_SPARC64
190
#ifndef TARGET_ABI32
191
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
192
#else
193
#define AM_CHECK(dc) (1)
194
#endif
195
#endif
196

    
197
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
198
{
199
#ifdef TARGET_SPARC64
200
    if (AM_CHECK(dc))
201
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202
#endif
203
}
204

    
205
static inline void gen_movl_reg_TN(int reg, TCGv tn)
206
{
207
    if (reg == 0)
208
        tcg_gen_movi_tl(tn, 0);
209
    else if (reg < 8)
210
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211
    else {
212
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
213
    }
214
}
215

    
216
static inline void gen_movl_TN_reg(int reg, TCGv tn)
217
{
218
    if (reg == 0)
219
        return;
220
    else if (reg < 8)
221
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
222
    else {
223
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
224
    }
225
}
226

    
227
static inline void gen_goto_tb(DisasContext *s, int tb_num,
228
                               target_ulong pc, target_ulong npc)
229
{
230
    TranslationBlock *tb;
231

    
232
    tb = s->tb;
233
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK))  {
235
        /* jump to same page: we can use a direct jump */
236
        tcg_gen_goto_tb(tb_num);
237
        tcg_gen_movi_tl(cpu_pc, pc);
238
        tcg_gen_movi_tl(cpu_npc, npc);
239
        tcg_gen_exit_tb((long)tb + tb_num);
240
    } else {
241
        /* jump to another page: currently not optimized */
242
        tcg_gen_movi_tl(cpu_pc, pc);
243
        tcg_gen_movi_tl(cpu_npc, npc);
244
        tcg_gen_exit_tb(0);
245
    }
246
}
247

    
248
// XXX suboptimal
249
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
250
{
251
    tcg_gen_extu_i32_tl(reg, src);
252
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253
    tcg_gen_andi_tl(reg, reg, 0x1);
254
}
255

    
256
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
257
{
258
    tcg_gen_extu_i32_tl(reg, src);
259
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260
    tcg_gen_andi_tl(reg, reg, 0x1);
261
}
262

    
263
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
264
{
265
    tcg_gen_extu_i32_tl(reg, src);
266
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267
    tcg_gen_andi_tl(reg, reg, 0x1);
268
}
269

    
270
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
271
{
272
    tcg_gen_extu_i32_tl(reg, src);
273
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274
    tcg_gen_andi_tl(reg, reg, 0x1);
275
}
276

    
277
static inline void gen_cc_clear_icc(void)
278
{
279
    tcg_gen_movi_i32(cpu_psr, 0);
280
}
281

    
282
#ifdef TARGET_SPARC64
283
static inline void gen_cc_clear_xcc(void)
284
{
285
    tcg_gen_movi_i32(cpu_xcc, 0);
286
}
287
#endif
288

    
289
/* old op:
290
    if (!T0)
291
        env->psr |= PSR_ZERO;
292
    if ((int32_t) T0 < 0)
293
        env->psr |= PSR_NEG;
294
*/
295
static inline void gen_cc_NZ_icc(TCGv dst)
296
{
297
    TCGv r_temp;
298
    int l1, l2;
299

    
300
    l1 = gen_new_label();
301
    l2 = gen_new_label();
302
    r_temp = tcg_temp_new();
303
    tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305
    tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
306
    gen_set_label(l1);
307
    tcg_gen_ext32s_tl(r_temp, dst);
308
    tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309
    tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
310
    gen_set_label(l2);
311
    tcg_temp_free(r_temp);
312
}
313

    
314
#ifdef TARGET_SPARC64
315
static inline void gen_cc_NZ_xcc(TCGv dst)
316
{
317
    int l1, l2;
318

    
319
    l1 = gen_new_label();
320
    l2 = gen_new_label();
321
    tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322
    tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
323
    gen_set_label(l1);
324
    tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325
    tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
326
    gen_set_label(l2);
327
}
328
#endif
329

    
330
/* old op:
331
    if (T0 < src1)
332
        env->psr |= PSR_CARRY;
333
*/
334
static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
335
{
336
    TCGv r_temp1, r_temp2;
337
    int l1;
338

    
339
    l1 = gen_new_label();
340
    r_temp1 = tcg_temp_new();
341
    r_temp2 = tcg_temp_new();
342
    tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343
    tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344
    tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345
    tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
346
    gen_set_label(l1);
347
    tcg_temp_free(r_temp1);
348
    tcg_temp_free(r_temp2);
349
}
350

    
351
#ifdef TARGET_SPARC64
352
static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
353
{
354
    int l1;
355

    
356
    l1 = gen_new_label();
357
    tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358
    tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
359
    gen_set_label(l1);
360
}
361
#endif
362

    
363
/* old op:
364
    if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
365
        env->psr |= PSR_OVF;
366
*/
367
static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
368
{
369
    TCGv r_temp;
370

    
371
    r_temp = tcg_temp_new();
372
    tcg_gen_xor_tl(r_temp, src1, src2);
373
    tcg_gen_not_tl(r_temp, r_temp);
374
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377
    tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378
    tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379
    tcg_temp_free(r_temp);
380
    tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
381
}
382

    
383
#ifdef TARGET_SPARC64
384
static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
385
{
386
    TCGv r_temp;
387

    
388
    r_temp = tcg_temp_new();
389
    tcg_gen_xor_tl(r_temp, src1, src2);
390
    tcg_gen_not_tl(r_temp, r_temp);
391
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394
    tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395
    tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396
    tcg_temp_free(r_temp);
397
    tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
398
}
399
#endif
400

    
401
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
402
{
403
    TCGv r_temp;
404
    TCGv_i32 r_const;
405
    int l1;
406

    
407
    l1 = gen_new_label();
408

    
409
    r_temp = tcg_temp_new();
410
    tcg_gen_xor_tl(r_temp, src1, src2);
411
    tcg_gen_not_tl(r_temp, r_temp);
412
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416
    r_const = tcg_const_i32(TT_TOVF);
417
    gen_helper_raise_exception(r_const);
418
    tcg_temp_free_i32(r_const);
419
    gen_set_label(l1);
420
    tcg_temp_free(r_temp);
421
}
422

    
423
static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
424
{
425
    int l1;
426

    
427
    l1 = gen_new_label();
428
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
429
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
430
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
431
    tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
432
    gen_set_label(l1);
433
}
434

    
435
static inline void gen_op_logic_cc(TCGv dst)
436
{
437
    tcg_gen_mov_tl(cpu_cc_dst, dst);
438

    
439
    gen_cc_clear_icc();
440
    gen_cc_NZ_icc(cpu_cc_dst);
441
#ifdef TARGET_SPARC64
442
    gen_cc_clear_xcc();
443
    gen_cc_NZ_xcc(cpu_cc_dst);
444
#endif
445
}
446

    
447
static inline void gen_tag_tv(TCGv src1, TCGv src2)
448
{
449
    int l1;
450
    TCGv_i32 r_const;
451

    
452
    l1 = gen_new_label();
453
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
454
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
455
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
456
    r_const = tcg_const_i32(TT_TOVF);
457
    gen_helper_raise_exception(r_const);
458
    tcg_temp_free_i32(r_const);
459
    gen_set_label(l1);
460
}
461

    
462
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
463
{
464
    tcg_gen_mov_tl(cpu_cc_src, src1);
465
    tcg_gen_movi_tl(cpu_cc_src2, src2);
466
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
467
    tcg_gen_mov_tl(dst, cpu_cc_dst);
468
}
469

    
470
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
471
{
472
    tcg_gen_mov_tl(cpu_cc_src, src1);
473
    tcg_gen_mov_tl(cpu_cc_src2, src2);
474
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
475
    tcg_gen_mov_tl(dst, cpu_cc_dst);
476
}
477

    
478
static inline void gen_op_addx_cc2(TCGv dst)
479
{
480
    gen_cc_NZ_icc(cpu_cc_dst);
481
    gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
482
    gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
483
#ifdef TARGET_SPARC64
484
    gen_cc_NZ_xcc(cpu_cc_dst);
485
    gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
486
    gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
487
#endif
488
    tcg_gen_mov_tl(dst, cpu_cc_dst);
489
}
490

    
491
static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
492
{
493
    tcg_gen_mov_tl(cpu_cc_src, src1);
494
    tcg_gen_movi_tl(cpu_cc_src2, src2);
495
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
496
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
497
    gen_cc_clear_icc();
498
    gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
499
#ifdef TARGET_SPARC64
500
    gen_cc_clear_xcc();
501
    gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
502
#endif
503
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
504
    gen_op_addx_cc2(dst);
505
}
506

    
507
static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
508
{
509
    tcg_gen_mov_tl(cpu_cc_src, src1);
510
    tcg_gen_mov_tl(cpu_cc_src2, src2);
511
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
512
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
513
    gen_cc_clear_icc();
514
    gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
515
#ifdef TARGET_SPARC64
516
    gen_cc_clear_xcc();
517
    gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
518
#endif
519
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
520
    gen_op_addx_cc2(dst);
521
}
522

    
523
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
524
{
525
    tcg_gen_mov_tl(cpu_cc_src, src1);
526
    tcg_gen_mov_tl(cpu_cc_src2, src2);
527
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528
    gen_cc_clear_icc();
529
    gen_cc_NZ_icc(cpu_cc_dst);
530
    gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
531
    gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
532
    gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
533
#ifdef TARGET_SPARC64
534
    gen_cc_clear_xcc();
535
    gen_cc_NZ_xcc(cpu_cc_dst);
536
    gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
537
    gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
538
#endif
539
    tcg_gen_mov_tl(dst, cpu_cc_dst);
540
}
541

    
542
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
543
{
544
    tcg_gen_mov_tl(cpu_cc_src, src1);
545
    tcg_gen_mov_tl(cpu_cc_src2, src2);
546
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
547
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
548
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
549
    gen_cc_clear_icc();
550
    gen_cc_NZ_icc(cpu_cc_dst);
551
    gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
552
#ifdef TARGET_SPARC64
553
    gen_cc_clear_xcc();
554
    gen_cc_NZ_xcc(cpu_cc_dst);
555
    gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
556
    gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
557
#endif
558
    tcg_gen_mov_tl(dst, cpu_cc_dst);
559
}
560

    
561
/* old op:
562
    if (src1 < T1)
563
        env->psr |= PSR_CARRY;
564
*/
565
static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
566
{
567
    TCGv r_temp1, r_temp2;
568
    int l1;
569

    
570
    l1 = gen_new_label();
571
    r_temp1 = tcg_temp_new();
572
    r_temp2 = tcg_temp_new();
573
    tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
574
    tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
575
    tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
576
    tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
577
    gen_set_label(l1);
578
    tcg_temp_free(r_temp1);
579
    tcg_temp_free(r_temp2);
580
}
581

    
582
#ifdef TARGET_SPARC64
583
static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
584
{
585
    int l1;
586

    
587
    l1 = gen_new_label();
588
    tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
589
    tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
590
    gen_set_label(l1);
591
}
592
#endif
593

    
594
/* old op:
595
    if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
596
        env->psr |= PSR_OVF;
597
*/
598
static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
599
{
600
    TCGv r_temp;
601

    
602
    r_temp = tcg_temp_new();
603
    tcg_gen_xor_tl(r_temp, src1, src2);
604
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
605
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
606
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
607
    tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
608
    tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
609
    tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
610
    tcg_temp_free(r_temp);
611
}
612

    
613
#ifdef TARGET_SPARC64
614
static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
615
{
616
    TCGv r_temp;
617

    
618
    r_temp = tcg_temp_new();
619
    tcg_gen_xor_tl(r_temp, src1, src2);
620
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
621
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
622
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
623
    tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
624
    tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
625
    tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
626
    tcg_temp_free(r_temp);
627
}
628
#endif
629

    
630
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
631
{
632
    TCGv r_temp;
633
    TCGv_i32 r_const;
634
    int l1;
635

    
636
    l1 = gen_new_label();
637

    
638
    r_temp = tcg_temp_new();
639
    tcg_gen_xor_tl(r_temp, src1, src2);
640
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
641
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
642
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
643
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
644
    r_const = tcg_const_i32(TT_TOVF);
645
    gen_helper_raise_exception(r_const);
646
    tcg_temp_free_i32(r_const);
647
    gen_set_label(l1);
648
    tcg_temp_free(r_temp);
649
}
650

    
651
static inline void gen_op_sub_cc2(TCGv dst)
652
{
653
    gen_cc_clear_icc();
654
    gen_cc_NZ_icc(cpu_cc_dst);
655
    gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
656
    gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657
#ifdef TARGET_SPARC64
658
    gen_cc_clear_xcc();
659
    gen_cc_NZ_xcc(cpu_cc_dst);
660
    gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
661
    gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
662
#endif
663
    tcg_gen_mov_tl(dst, cpu_cc_dst);
664
}
665

    
666
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2)
667
{
668
    tcg_gen_mov_tl(cpu_cc_src, src1);
669
    tcg_gen_movi_tl(cpu_cc_src2, src2);
670
    if (src2 == 0) {
671
        tcg_gen_mov_tl(dst, src1);
672
        gen_op_logic_cc(dst);
673
    } else {
674
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
675
        gen_op_sub_cc2(dst);
676
    }
677
}
678

    
679
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
680
{
681
    tcg_gen_mov_tl(cpu_cc_src, src1);
682
    tcg_gen_mov_tl(cpu_cc_src2, src2);
683
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
684
    gen_op_sub_cc2(dst);
685
}
686

    
687
static inline void gen_op_subx_cc2(TCGv dst)
688
{
689
    gen_cc_NZ_icc(cpu_cc_dst);
690
    gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
691
    gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
692
#ifdef TARGET_SPARC64
693
    gen_cc_NZ_xcc(cpu_cc_dst);
694
    gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
695
    gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
696
#endif
697
    tcg_gen_mov_tl(dst, cpu_cc_dst);
698
}
699

    
700
static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
701
{
702
    tcg_gen_mov_tl(cpu_cc_src, src1);
703
    tcg_gen_movi_tl(cpu_cc_src2, src2);
704
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
705
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
706
    gen_cc_clear_icc();
707
    gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
708
#ifdef TARGET_SPARC64
709
    gen_cc_clear_xcc();
710
    gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
711
#endif
712
    tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
713
    gen_op_subx_cc2(dst);
714
}
715

    
716
static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
717
{
718
    tcg_gen_mov_tl(cpu_cc_src, src1);
719
    tcg_gen_mov_tl(cpu_cc_src2, src2);
720
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
721
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
722
    gen_cc_clear_icc();
723
    gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
724
#ifdef TARGET_SPARC64
725
    gen_cc_clear_xcc();
726
    gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
727
#endif
728
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
729
    gen_op_subx_cc2(dst);
730
}
731

    
732
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
733
{
734
    tcg_gen_mov_tl(cpu_cc_src, src1);
735
    tcg_gen_mov_tl(cpu_cc_src2, src2);
736
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
737
    gen_cc_clear_icc();
738
    gen_cc_NZ_icc(cpu_cc_dst);
739
    gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
740
    gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
741
    gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
742
#ifdef TARGET_SPARC64
743
    gen_cc_clear_xcc();
744
    gen_cc_NZ_xcc(cpu_cc_dst);
745
    gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
746
    gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
747
#endif
748
    tcg_gen_mov_tl(dst, cpu_cc_dst);
749
}
750

    
751
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
752
{
753
    tcg_gen_mov_tl(cpu_cc_src, src1);
754
    tcg_gen_mov_tl(cpu_cc_src2, src2);
755
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
756
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
757
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
758
    gen_cc_clear_icc();
759
    gen_cc_NZ_icc(cpu_cc_dst);
760
    gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
761
#ifdef TARGET_SPARC64
762
    gen_cc_clear_xcc();
763
    gen_cc_NZ_xcc(cpu_cc_dst);
764
    gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
765
    gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
766
#endif
767
    tcg_gen_mov_tl(dst, cpu_cc_dst);
768
}
769

    
770
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
771
{
772
    TCGv r_temp;
773
    int l1;
774

    
775
    l1 = gen_new_label();
776
    r_temp = tcg_temp_new();
777

    
778
    /* old op:
779
    if (!(env->y & 1))
780
        T1 = 0;
781
    */
782
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
783
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
784
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
785
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
786
    tcg_gen_movi_tl(cpu_cc_src2, 0);
787
    gen_set_label(l1);
788

    
789
    // b2 = T0 & 1;
790
    // env->y = (b2 << 31) | (env->y >> 1);
791
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
792
    tcg_gen_shli_tl(r_temp, r_temp, 31);
793
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
794
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
795
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
796
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
797

    
798
    // b1 = N ^ V;
799
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
800
    gen_mov_reg_V(r_temp, cpu_psr);
801
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
802
    tcg_temp_free(r_temp);
803

    
804
    // T0 = (b1 << 31) | (T0 >> 1);
805
    // src1 = T0;
806
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
807
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
808
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
809

    
810
    /* do addition and update flags */
811
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
812

    
813
    gen_cc_clear_icc();
814
    gen_cc_NZ_icc(cpu_cc_dst);
815
    gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
816
    gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
817
    tcg_gen_mov_tl(dst, cpu_cc_dst);
818
}
819

    
820
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
821
{
822
    TCGv_i64 r_temp, r_temp2;
823

    
824
    r_temp = tcg_temp_new_i64();
825
    r_temp2 = tcg_temp_new_i64();
826

    
827
    tcg_gen_extu_tl_i64(r_temp, src2);
828
    tcg_gen_extu_tl_i64(r_temp2, src1);
829
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
830

    
831
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
832
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
833
    tcg_temp_free_i64(r_temp);
834
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
835
#ifdef TARGET_SPARC64
836
    tcg_gen_mov_i64(dst, r_temp2);
837
#else
838
    tcg_gen_trunc_i64_tl(dst, r_temp2);
839
#endif
840
    tcg_temp_free_i64(r_temp2);
841
}
842

    
843
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
844
{
845
    TCGv_i64 r_temp, r_temp2;
846

    
847
    r_temp = tcg_temp_new_i64();
848
    r_temp2 = tcg_temp_new_i64();
849

    
850
    tcg_gen_ext_tl_i64(r_temp, src2);
851
    tcg_gen_ext_tl_i64(r_temp2, src1);
852
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
853

    
854
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
855
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
856
    tcg_temp_free_i64(r_temp);
857
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
858
#ifdef TARGET_SPARC64
859
    tcg_gen_mov_i64(dst, r_temp2);
860
#else
861
    tcg_gen_trunc_i64_tl(dst, r_temp2);
862
#endif
863
    tcg_temp_free_i64(r_temp2);
864
}
865

    
866
#ifdef TARGET_SPARC64
867
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
868
{
869
    TCGv_i32 r_const;
870
    int l1;
871

    
872
    l1 = gen_new_label();
873
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
874
    r_const = tcg_const_i32(TT_DIV_ZERO);
875
    gen_helper_raise_exception(r_const);
876
    tcg_temp_free_i32(r_const);
877
    gen_set_label(l1);
878
}
879

    
880
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
881
{
882
    int l1, l2;
883

    
884
    l1 = gen_new_label();
885
    l2 = gen_new_label();
886
    tcg_gen_mov_tl(cpu_cc_src, src1);
887
    tcg_gen_mov_tl(cpu_cc_src2, src2);
888
    gen_trap_ifdivzero_tl(cpu_cc_src2);
889
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
890
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
891
    tcg_gen_movi_i64(dst, INT64_MIN);
892
    tcg_gen_br(l2);
893
    gen_set_label(l1);
894
    tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
895
    gen_set_label(l2);
896
}
897
#endif
898

    
899
static inline void gen_op_div_cc(TCGv dst)
900
{
901
    int l1;
902

    
903
    tcg_gen_mov_tl(cpu_cc_dst, dst);
904
    gen_cc_clear_icc();
905
    gen_cc_NZ_icc(cpu_cc_dst);
906
    l1 = gen_new_label();
907
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
908
    tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
909
    gen_set_label(l1);
910
}
911

    
912
// 1
913
static inline void gen_op_eval_ba(TCGv dst)
914
{
915
    tcg_gen_movi_tl(dst, 1);
916
}
917

    
918
// Z
919
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
920
{
921
    gen_mov_reg_Z(dst, src);
922
}
923

    
924
// Z | (N ^ V)
925
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
926
{
927
    gen_mov_reg_N(cpu_tmp0, src);
928
    gen_mov_reg_V(dst, src);
929
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
930
    gen_mov_reg_Z(cpu_tmp0, src);
931
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
932
}
933

    
934
// N ^ V
935
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
936
{
937
    gen_mov_reg_V(cpu_tmp0, src);
938
    gen_mov_reg_N(dst, src);
939
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
940
}
941

    
942
// C | Z
943
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
944
{
945
    gen_mov_reg_Z(cpu_tmp0, src);
946
    gen_mov_reg_C(dst, src);
947
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
948
}
949

    
950
// C
951
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
952
{
953
    gen_mov_reg_C(dst, src);
954
}
955

    
956
// V
957
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
958
{
959
    gen_mov_reg_V(dst, src);
960
}
961

    
962
// 0
963
static inline void gen_op_eval_bn(TCGv dst)
964
{
965
    tcg_gen_movi_tl(dst, 0);
966
}
967

    
968
// N
969
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
970
{
971
    gen_mov_reg_N(dst, src);
972
}
973

    
974
// !Z
975
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
976
{
977
    gen_mov_reg_Z(dst, src);
978
    tcg_gen_xori_tl(dst, dst, 0x1);
979
}
980

    
981
// !(Z | (N ^ V))
982
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
983
{
984
    gen_mov_reg_N(cpu_tmp0, src);
985
    gen_mov_reg_V(dst, src);
986
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
987
    gen_mov_reg_Z(cpu_tmp0, src);
988
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
989
    tcg_gen_xori_tl(dst, dst, 0x1);
990
}
991

    
992
// !(N ^ V)
993
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
994
{
995
    gen_mov_reg_V(cpu_tmp0, src);
996
    gen_mov_reg_N(dst, src);
997
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
998
    tcg_gen_xori_tl(dst, dst, 0x1);
999
}
1000

    
1001
// !(C | Z)
1002
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
1003
{
1004
    gen_mov_reg_Z(cpu_tmp0, src);
1005
    gen_mov_reg_C(dst, src);
1006
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
1007
    tcg_gen_xori_tl(dst, dst, 0x1);
1008
}
1009

    
1010
// !C
1011
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
1012
{
1013
    gen_mov_reg_C(dst, src);
1014
    tcg_gen_xori_tl(dst, dst, 0x1);
1015
}
1016

    
1017
// !N
1018
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
1019
{
1020
    gen_mov_reg_N(dst, src);
1021
    tcg_gen_xori_tl(dst, dst, 0x1);
1022
}
1023

    
1024
// !V
1025
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
1026
{
1027
    gen_mov_reg_V(dst, src);
1028
    tcg_gen_xori_tl(dst, dst, 0x1);
1029
}
1030

    
1031
/*
1032
  FPSR bit field FCC1 | FCC0:
1033
   0 =
1034
   1 <
1035
   2 >
1036
   3 unordered
1037
*/
1038
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
1039
                                    unsigned int fcc_offset)
1040
{
1041
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
1042
    tcg_gen_andi_tl(reg, reg, 0x1);
1043
}
1044

    
1045
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1046
                                    unsigned int fcc_offset)
1047
{
1048
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
1049
    tcg_gen_andi_tl(reg, reg, 0x1);
1050
}
1051

    
1052
// !0: FCC0 | FCC1
1053
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1054
                                    unsigned int fcc_offset)
1055
{
1056
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1057
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1058
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
1059
}
1060

    
1061
// 1 or 2: FCC0 ^ FCC1
1062
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1063
                                    unsigned int fcc_offset)
1064
{
1065
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1066
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1068
}
1069

    
1070
// 1 or 3: FCC0
1071
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1072
                                    unsigned int fcc_offset)
1073
{
1074
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1075
}
1076

    
1077
// 1: FCC0 & !FCC1
1078
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1079
                                    unsigned int fcc_offset)
1080
{
1081
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1082
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1083
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1084
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1085
}
1086

    
1087
// 2 or 3: FCC1
1088
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1089
                                    unsigned int fcc_offset)
1090
{
1091
    gen_mov_reg_FCC1(dst, src, fcc_offset);
1092
}
1093

    
1094
// 2: !FCC0 & FCC1
1095
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1096
                                    unsigned int fcc_offset)
1097
{
1098
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1099
    tcg_gen_xori_tl(dst, dst, 0x1);
1100
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1101
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1102
}
1103

    
1104
// 3: FCC0 & FCC1
1105
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1106
                                    unsigned int fcc_offset)
1107
{
1108
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1109
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1110
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1111
}
1112

    
1113
// 0: !(FCC0 | FCC1)
1114
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1115
                                    unsigned int fcc_offset)
1116
{
1117
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1118
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1119
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
1120
    tcg_gen_xori_tl(dst, dst, 0x1);
1121
}
1122

    
1123
// 0 or 3: !(FCC0 ^ FCC1)
1124
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1125
                                    unsigned int fcc_offset)
1126
{
1127
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1128
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1129
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1130
    tcg_gen_xori_tl(dst, dst, 0x1);
1131
}
1132

    
1133
// 0 or 2: !FCC0
1134
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1135
                                    unsigned int fcc_offset)
1136
{
1137
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1138
    tcg_gen_xori_tl(dst, dst, 0x1);
1139
}
1140

    
1141
// !1: !(FCC0 & !FCC1)
1142
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1143
                                    unsigned int fcc_offset)
1144
{
1145
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1146
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1147
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1148
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1149
    tcg_gen_xori_tl(dst, dst, 0x1);
1150
}
1151

    
1152
// 0 or 1: !FCC1
1153
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1154
                                    unsigned int fcc_offset)
1155
{
1156
    gen_mov_reg_FCC1(dst, src, fcc_offset);
1157
    tcg_gen_xori_tl(dst, dst, 0x1);
1158
}
1159

    
1160
// !2: !(!FCC0 & FCC1)
1161
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1162
                                    unsigned int fcc_offset)
1163
{
1164
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1165
    tcg_gen_xori_tl(dst, dst, 0x1);
1166
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1167
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1168
    tcg_gen_xori_tl(dst, dst, 0x1);
1169
}
1170

    
1171
// !3: !(FCC0 & FCC1)
1172
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1173
                                    unsigned int fcc_offset)
1174
{
1175
    gen_mov_reg_FCC0(dst, src, fcc_offset);
1176
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1177
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
1178
    tcg_gen_xori_tl(dst, dst, 0x1);
1179
}
1180

    
1181
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1182
                               target_ulong pc2, TCGv r_cond)
1183
{
1184
    int l1;
1185

    
1186
    l1 = gen_new_label();
1187

    
1188
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1189

    
1190
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
1191

    
1192
    gen_set_label(l1);
1193
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
1194
}
1195

    
1196
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1197
                                target_ulong pc2, TCGv r_cond)
1198
{
1199
    int l1;
1200

    
1201
    l1 = gen_new_label();
1202

    
1203
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1204

    
1205
    gen_goto_tb(dc, 0, pc2, pc1);
1206

    
1207
    gen_set_label(l1);
1208
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1209
}
1210

    
1211
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1212
                                      TCGv r_cond)
1213
{
1214
    int l1, l2;
1215

    
1216
    l1 = gen_new_label();
1217
    l2 = gen_new_label();
1218

    
1219
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1220

    
1221
    tcg_gen_movi_tl(cpu_npc, npc1);
1222
    tcg_gen_br(l2);
1223

    
1224
    gen_set_label(l1);
1225
    tcg_gen_movi_tl(cpu_npc, npc2);
1226
    gen_set_label(l2);
1227
}
1228

    
1229
/* call this function before using the condition register as it may
1230
   have been set for a jump */
1231
static inline void flush_cond(DisasContext *dc, TCGv cond)
1232
{
1233
    if (dc->npc == JUMP_PC) {
1234
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1235
        dc->npc = DYNAMIC_PC;
1236
    }
1237
}
1238

    
1239
static inline void save_npc(DisasContext *dc, TCGv cond)
1240
{
1241
    if (dc->npc == JUMP_PC) {
1242
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1243
        dc->npc = DYNAMIC_PC;
1244
    } else if (dc->npc != DYNAMIC_PC) {
1245
        tcg_gen_movi_tl(cpu_npc, dc->npc);
1246
    }
1247
}
1248

    
1249
static inline void save_state(DisasContext *dc, TCGv cond)
1250
{
1251
    tcg_gen_movi_tl(cpu_pc, dc->pc);
1252
    save_npc(dc, cond);
1253
}
1254

    
1255
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1256
{
1257
    if (dc->npc == JUMP_PC) {
1258
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1259
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1260
        dc->pc = DYNAMIC_PC;
1261
    } else if (dc->npc == DYNAMIC_PC) {
1262
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1263
        dc->pc = DYNAMIC_PC;
1264
    } else {
1265
        dc->pc = dc->npc;
1266
    }
1267
}
1268

    
1269
static inline void gen_op_next_insn(void)
1270
{
1271
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1272
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1273
}
1274

    
1275
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1276
                            DisasContext *dc)
1277
{
1278
    TCGv_i32 r_src;
1279

    
1280
#ifdef TARGET_SPARC64
1281
    if (cc)
1282
        r_src = cpu_xcc;
1283
    else
1284
        r_src = cpu_psr;
1285
#else
1286
    r_src = cpu_psr;
1287
#endif
1288
    switch (dc->cc_op) {
1289
    case CC_OP_FLAGS:
1290
        break;
1291
    default:
1292
        gen_helper_compute_psr();
1293
        dc->cc_op = CC_OP_FLAGS;
1294
        break;
1295
    }
1296
    switch (cond) {
1297
    case 0x0:
1298
        gen_op_eval_bn(r_dst);
1299
        break;
1300
    case 0x1:
1301
        gen_op_eval_be(r_dst, r_src);
1302
        break;
1303
    case 0x2:
1304
        gen_op_eval_ble(r_dst, r_src);
1305
        break;
1306
    case 0x3:
1307
        gen_op_eval_bl(r_dst, r_src);
1308
        break;
1309
    case 0x4:
1310
        gen_op_eval_bleu(r_dst, r_src);
1311
        break;
1312
    case 0x5:
1313
        gen_op_eval_bcs(r_dst, r_src);
1314
        break;
1315
    case 0x6:
1316
        gen_op_eval_bneg(r_dst, r_src);
1317
        break;
1318
    case 0x7:
1319
        gen_op_eval_bvs(r_dst, r_src);
1320
        break;
1321
    case 0x8:
1322
        gen_op_eval_ba(r_dst);
1323
        break;
1324
    case 0x9:
1325
        gen_op_eval_bne(r_dst, r_src);
1326
        break;
1327
    case 0xa:
1328
        gen_op_eval_bg(r_dst, r_src);
1329
        break;
1330
    case 0xb:
1331
        gen_op_eval_bge(r_dst, r_src);
1332
        break;
1333
    case 0xc:
1334
        gen_op_eval_bgu(r_dst, r_src);
1335
        break;
1336
    case 0xd:
1337
        gen_op_eval_bcc(r_dst, r_src);
1338
        break;
1339
    case 0xe:
1340
        gen_op_eval_bpos(r_dst, r_src);
1341
        break;
1342
    case 0xf:
1343
        gen_op_eval_bvc(r_dst, r_src);
1344
        break;
1345
    }
1346
}
1347

    
1348
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1349
{
1350
    unsigned int offset;
1351

    
1352
    switch (cc) {
1353
    default:
1354
    case 0x0:
1355
        offset = 0;
1356
        break;
1357
    case 0x1:
1358
        offset = 32 - 10;
1359
        break;
1360
    case 0x2:
1361
        offset = 34 - 10;
1362
        break;
1363
    case 0x3:
1364
        offset = 36 - 10;
1365
        break;
1366
    }
1367

    
1368
    switch (cond) {
1369
    case 0x0:
1370
        gen_op_eval_bn(r_dst);
1371
        break;
1372
    case 0x1:
1373
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1374
        break;
1375
    case 0x2:
1376
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1377
        break;
1378
    case 0x3:
1379
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1380
        break;
1381
    case 0x4:
1382
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1383
        break;
1384
    case 0x5:
1385
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1386
        break;
1387
    case 0x6:
1388
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1389
        break;
1390
    case 0x7:
1391
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1392
        break;
1393
    case 0x8:
1394
        gen_op_eval_ba(r_dst);
1395
        break;
1396
    case 0x9:
1397
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1398
        break;
1399
    case 0xa:
1400
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1401
        break;
1402
    case 0xb:
1403
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1404
        break;
1405
    case 0xc:
1406
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1407
        break;
1408
    case 0xd:
1409
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1410
        break;
1411
    case 0xe:
1412
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1413
        break;
1414
    case 0xf:
1415
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1416
        break;
1417
    }
1418
}
1419

    
1420
#ifdef TARGET_SPARC64
1421
// Inverted logic
1422
static const int gen_tcg_cond_reg[8] = {
1423
    -1,
1424
    TCG_COND_NE,
1425
    TCG_COND_GT,
1426
    TCG_COND_GE,
1427
    -1,
1428
    TCG_COND_EQ,
1429
    TCG_COND_LE,
1430
    TCG_COND_LT,
1431
};
1432

    
1433
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1434
{
1435
    int l1;
1436

    
1437
    l1 = gen_new_label();
1438
    tcg_gen_movi_tl(r_dst, 0);
1439
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1440
    tcg_gen_movi_tl(r_dst, 1);
1441
    gen_set_label(l1);
1442
}
1443
#endif
1444

    
1445
/* XXX: potentially incorrect if dynamic npc */
1446
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1447
                      TCGv r_cond)
1448
{
1449
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1450
    target_ulong target = dc->pc + offset;
1451

    
1452
    if (cond == 0x0) {
1453
        /* unconditional not taken */
1454
        if (a) {
1455
            dc->pc = dc->npc + 4;
1456
            dc->npc = dc->pc + 4;
1457
        } else {
1458
            dc->pc = dc->npc;
1459
            dc->npc = dc->pc + 4;
1460
        }
1461
    } else if (cond == 0x8) {
1462
        /* unconditional taken */
1463
        if (a) {
1464
            dc->pc = target;
1465
            dc->npc = dc->pc + 4;
1466
        } else {
1467
            dc->pc = dc->npc;
1468
            dc->npc = target;
1469
        }
1470
    } else {
1471
        flush_cond(dc, r_cond);
1472
        gen_cond(r_cond, cc, cond, dc);
1473
        if (a) {
1474
            gen_branch_a(dc, target, dc->npc, r_cond);
1475
            dc->is_br = 1;
1476
        } else {
1477
            dc->pc = dc->npc;
1478
            dc->jump_pc[0] = target;
1479
            dc->jump_pc[1] = dc->npc + 4;
1480
            dc->npc = JUMP_PC;
1481
        }
1482
    }
1483
}
1484

    
1485
/* XXX: potentially incorrect if dynamic npc */
1486
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1487
                      TCGv r_cond)
1488
{
1489
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1490
    target_ulong target = dc->pc + offset;
1491

    
1492
    if (cond == 0x0) {
1493
        /* unconditional not taken */
1494
        if (a) {
1495
            dc->pc = dc->npc + 4;
1496
            dc->npc = dc->pc + 4;
1497
        } else {
1498
            dc->pc = dc->npc;
1499
            dc->npc = dc->pc + 4;
1500
        }
1501
    } else if (cond == 0x8) {
1502
        /* unconditional taken */
1503
        if (a) {
1504
            dc->pc = target;
1505
            dc->npc = dc->pc + 4;
1506
        } else {
1507
            dc->pc = dc->npc;
1508
            dc->npc = target;
1509
        }
1510
    } else {
1511
        flush_cond(dc, r_cond);
1512
        gen_fcond(r_cond, cc, cond);
1513
        if (a) {
1514
            gen_branch_a(dc, target, dc->npc, r_cond);
1515
            dc->is_br = 1;
1516
        } else {
1517
            dc->pc = dc->npc;
1518
            dc->jump_pc[0] = target;
1519
            dc->jump_pc[1] = dc->npc + 4;
1520
            dc->npc = JUMP_PC;
1521
        }
1522
    }
1523
}
1524

    
1525
#ifdef TARGET_SPARC64
1526
/* XXX: potentially incorrect if dynamic npc */
1527
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1528
                          TCGv r_cond, TCGv r_reg)
1529
{
1530
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1531
    target_ulong target = dc->pc + offset;
1532

    
1533
    flush_cond(dc, r_cond);
1534
    gen_cond_reg(r_cond, cond, r_reg);
1535
    if (a) {
1536
        gen_branch_a(dc, target, dc->npc, r_cond);
1537
        dc->is_br = 1;
1538
    } else {
1539
        dc->pc = dc->npc;
1540
        dc->jump_pc[0] = target;
1541
        dc->jump_pc[1] = dc->npc + 4;
1542
        dc->npc = JUMP_PC;
1543
    }
1544
}
1545

    
1546
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1547
{
1548
    switch (fccno) {
1549
    case 0:
1550
        gen_helper_fcmps(r_rs1, r_rs2);
1551
        break;
1552
    case 1:
1553
        gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1554
        break;
1555
    case 2:
1556
        gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1557
        break;
1558
    case 3:
1559
        gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1560
        break;
1561
    }
1562
}
1563

    
1564
static inline void gen_op_fcmpd(int fccno)
1565
{
1566
    switch (fccno) {
1567
    case 0:
1568
        gen_helper_fcmpd();
1569
        break;
1570
    case 1:
1571
        gen_helper_fcmpd_fcc1();
1572
        break;
1573
    case 2:
1574
        gen_helper_fcmpd_fcc2();
1575
        break;
1576
    case 3:
1577
        gen_helper_fcmpd_fcc3();
1578
        break;
1579
    }
1580
}
1581

    
1582
static inline void gen_op_fcmpq(int fccno)
1583
{
1584
    switch (fccno) {
1585
    case 0:
1586
        gen_helper_fcmpq();
1587
        break;
1588
    case 1:
1589
        gen_helper_fcmpq_fcc1();
1590
        break;
1591
    case 2:
1592
        gen_helper_fcmpq_fcc2();
1593
        break;
1594
    case 3:
1595
        gen_helper_fcmpq_fcc3();
1596
        break;
1597
    }
1598
}
1599

    
1600
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1601
{
1602
    switch (fccno) {
1603
    case 0:
1604
        gen_helper_fcmpes(r_rs1, r_rs2);
1605
        break;
1606
    case 1:
1607
        gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1608
        break;
1609
    case 2:
1610
        gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1611
        break;
1612
    case 3:
1613
        gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1614
        break;
1615
    }
1616
}
1617

    
1618
static inline void gen_op_fcmped(int fccno)
1619
{
1620
    switch (fccno) {
1621
    case 0:
1622
        gen_helper_fcmped();
1623
        break;
1624
    case 1:
1625
        gen_helper_fcmped_fcc1();
1626
        break;
1627
    case 2:
1628
        gen_helper_fcmped_fcc2();
1629
        break;
1630
    case 3:
1631
        gen_helper_fcmped_fcc3();
1632
        break;
1633
    }
1634
}
1635

    
1636
static inline void gen_op_fcmpeq(int fccno)
1637
{
1638
    switch (fccno) {
1639
    case 0:
1640
        gen_helper_fcmpeq();
1641
        break;
1642
    case 1:
1643
        gen_helper_fcmpeq_fcc1();
1644
        break;
1645
    case 2:
1646
        gen_helper_fcmpeq_fcc2();
1647
        break;
1648
    case 3:
1649
        gen_helper_fcmpeq_fcc3();
1650
        break;
1651
    }
1652
}
1653

    
1654
#else
1655

    
1656
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1657
{
1658
    gen_helper_fcmps(r_rs1, r_rs2);
1659
}
1660

    
1661
static inline void gen_op_fcmpd(int fccno)
1662
{
1663
    gen_helper_fcmpd();
1664
}
1665

    
1666
static inline void gen_op_fcmpq(int fccno)
1667
{
1668
    gen_helper_fcmpq();
1669
}
1670

    
1671
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1672
{
1673
    gen_helper_fcmpes(r_rs1, r_rs2);
1674
}
1675

    
1676
static inline void gen_op_fcmped(int fccno)
1677
{
1678
    gen_helper_fcmped();
1679
}
1680

    
1681
static inline void gen_op_fcmpeq(int fccno)
1682
{
1683
    gen_helper_fcmpeq();
1684
}
1685
#endif
1686

    
1687
static inline void gen_op_fpexception_im(int fsr_flags)
1688
{
1689
    TCGv_i32 r_const;
1690

    
1691
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1692
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1693
    r_const = tcg_const_i32(TT_FP_EXCP);
1694
    gen_helper_raise_exception(r_const);
1695
    tcg_temp_free_i32(r_const);
1696
}
1697

    
1698
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1699
{
1700
#if !defined(CONFIG_USER_ONLY)
1701
    if (!dc->fpu_enabled) {
1702
        TCGv_i32 r_const;
1703

    
1704
        save_state(dc, r_cond);
1705
        r_const = tcg_const_i32(TT_NFPU_INSN);
1706
        gen_helper_raise_exception(r_const);
1707
        tcg_temp_free_i32(r_const);
1708
        dc->is_br = 1;
1709
        return 1;
1710
    }
1711
#endif
1712
    return 0;
1713
}
1714

    
1715
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1716
{
1717
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1718
}
1719

    
1720
static inline void gen_clear_float_exceptions(void)
1721
{
1722
    gen_helper_clear_float_exceptions();
1723
}
1724

    
1725
/* asi moves */
1726
#ifdef TARGET_SPARC64
1727
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1728
{
1729
    int asi;
1730
    TCGv_i32 r_asi;
1731

    
1732
    if (IS_IMM) {
1733
        r_asi = tcg_temp_new_i32();
1734
        tcg_gen_mov_i32(r_asi, cpu_asi);
1735
    } else {
1736
        asi = GET_FIELD(insn, 19, 26);
1737
        r_asi = tcg_const_i32(asi);
1738
    }
1739
    return r_asi;
1740
}
1741

    
1742
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1743
                              int sign)
1744
{
1745
    TCGv_i32 r_asi, r_size, r_sign;
1746

    
1747
    r_asi = gen_get_asi(insn, addr);
1748
    r_size = tcg_const_i32(size);
1749
    r_sign = tcg_const_i32(sign);
1750
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1751
    tcg_temp_free_i32(r_sign);
1752
    tcg_temp_free_i32(r_size);
1753
    tcg_temp_free_i32(r_asi);
1754
}
1755

    
1756
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1757
{
1758
    TCGv_i32 r_asi, r_size;
1759

    
1760
    r_asi = gen_get_asi(insn, addr);
1761
    r_size = tcg_const_i32(size);
1762
    gen_helper_st_asi(addr, src, r_asi, r_size);
1763
    tcg_temp_free_i32(r_size);
1764
    tcg_temp_free_i32(r_asi);
1765
}
1766

    
1767
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1768
{
1769
    TCGv_i32 r_asi, r_size, r_rd;
1770

    
1771
    r_asi = gen_get_asi(insn, addr);
1772
    r_size = tcg_const_i32(size);
1773
    r_rd = tcg_const_i32(rd);
1774
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1775
    tcg_temp_free_i32(r_rd);
1776
    tcg_temp_free_i32(r_size);
1777
    tcg_temp_free_i32(r_asi);
1778
}
1779

    
1780
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1781
{
1782
    TCGv_i32 r_asi, r_size, r_rd;
1783

    
1784
    r_asi = gen_get_asi(insn, addr);
1785
    r_size = tcg_const_i32(size);
1786
    r_rd = tcg_const_i32(rd);
1787
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1788
    tcg_temp_free_i32(r_rd);
1789
    tcg_temp_free_i32(r_size);
1790
    tcg_temp_free_i32(r_asi);
1791
}
1792

    
1793
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1794
{
1795
    TCGv_i32 r_asi, r_size, r_sign;
1796

    
1797
    r_asi = gen_get_asi(insn, addr);
1798
    r_size = tcg_const_i32(4);
1799
    r_sign = tcg_const_i32(0);
1800
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1801
    tcg_temp_free_i32(r_sign);
1802
    gen_helper_st_asi(addr, dst, r_asi, r_size);
1803
    tcg_temp_free_i32(r_size);
1804
    tcg_temp_free_i32(r_asi);
1805
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1806
}
1807

    
1808
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1809
{
1810
    TCGv_i32 r_asi, r_rd;
1811

    
1812
    r_asi = gen_get_asi(insn, addr);
1813
    r_rd = tcg_const_i32(rd);
1814
    gen_helper_ldda_asi(addr, r_asi, r_rd);
1815
    tcg_temp_free_i32(r_rd);
1816
    tcg_temp_free_i32(r_asi);
1817
}
1818

    
1819
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1820
{
1821
    TCGv_i32 r_asi, r_size;
1822

    
1823
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1824
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1825
    r_asi = gen_get_asi(insn, addr);
1826
    r_size = tcg_const_i32(8);
1827
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1828
    tcg_temp_free_i32(r_size);
1829
    tcg_temp_free_i32(r_asi);
1830
}
1831

    
1832
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1833
                               int rd)
1834
{
1835
    TCGv r_val1;
1836
    TCGv_i32 r_asi;
1837

    
1838
    r_val1 = tcg_temp_new();
1839
    gen_movl_reg_TN(rd, r_val1);
1840
    r_asi = gen_get_asi(insn, addr);
1841
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1842
    tcg_temp_free_i32(r_asi);
1843
    tcg_temp_free(r_val1);
1844
}
1845

    
1846
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1847
                                int rd)
1848
{
1849
    TCGv_i32 r_asi;
1850

    
1851
    gen_movl_reg_TN(rd, cpu_tmp64);
1852
    r_asi = gen_get_asi(insn, addr);
1853
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1854
    tcg_temp_free_i32(r_asi);
1855
}
1856

    
1857
#elif !defined(CONFIG_USER_ONLY)
1858

    
1859
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1860
                              int sign)
1861
{
1862
    TCGv_i32 r_asi, r_size, r_sign;
1863

    
1864
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1865
    r_size = tcg_const_i32(size);
1866
    r_sign = tcg_const_i32(sign);
1867
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1868
    tcg_temp_free(r_sign);
1869
    tcg_temp_free(r_size);
1870
    tcg_temp_free(r_asi);
1871
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1872
}
1873

    
1874
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1875
{
1876
    TCGv_i32 r_asi, r_size;
1877

    
1878
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
1879
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1880
    r_size = tcg_const_i32(size);
1881
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1882
    tcg_temp_free(r_size);
1883
    tcg_temp_free(r_asi);
1884
}
1885

    
1886
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1887
{
1888
    TCGv_i32 r_asi, r_size, r_sign;
1889
    TCGv_i64 r_val;
1890

    
1891
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1892
    r_size = tcg_const_i32(4);
1893
    r_sign = tcg_const_i32(0);
1894
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1895
    tcg_temp_free(r_sign);
1896
    r_val = tcg_temp_new_i64();
1897
    tcg_gen_extu_tl_i64(r_val, dst);
1898
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1899
    tcg_temp_free_i64(r_val);
1900
    tcg_temp_free(r_size);
1901
    tcg_temp_free(r_asi);
1902
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1903
}
1904

    
1905
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1906
{
1907
    TCGv_i32 r_asi, r_size, r_sign;
1908

    
1909
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1910
    r_size = tcg_const_i32(8);
1911
    r_sign = tcg_const_i32(0);
1912
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1913
    tcg_temp_free(r_sign);
1914
    tcg_temp_free(r_size);
1915
    tcg_temp_free(r_asi);
1916
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1917
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
1918
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1919
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1920
    gen_movl_TN_reg(rd, hi);
1921
}
1922

    
1923
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1924
{
1925
    TCGv_i32 r_asi, r_size;
1926

    
1927
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1928
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1929
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1930
    r_size = tcg_const_i32(8);
1931
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1932
    tcg_temp_free(r_size);
1933
    tcg_temp_free(r_asi);
1934
}
1935
#endif
1936

    
1937
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1938
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1939
{
1940
    TCGv_i64 r_val;
1941
    TCGv_i32 r_asi, r_size;
1942

    
1943
    gen_ld_asi(dst, addr, insn, 1, 0);
1944

    
1945
    r_val = tcg_const_i64(0xffULL);
1946
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1947
    r_size = tcg_const_i32(1);
1948
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1949
    tcg_temp_free_i32(r_size);
1950
    tcg_temp_free_i32(r_asi);
1951
    tcg_temp_free_i64(r_val);
1952
}
1953
#endif
1954

    
1955
static inline TCGv get_src1(unsigned int insn, TCGv def)
1956
{
1957
    TCGv r_rs1 = def;
1958
    unsigned int rs1;
1959

    
1960
    rs1 = GET_FIELD(insn, 13, 17);
1961
    if (rs1 == 0)
1962
        r_rs1 = tcg_const_tl(0); // XXX how to free?
1963
    else if (rs1 < 8)
1964
        r_rs1 = cpu_gregs[rs1];
1965
    else
1966
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1967
    return r_rs1;
1968
}
1969

    
1970
static inline TCGv get_src2(unsigned int insn, TCGv def)
1971
{
1972
    TCGv r_rs2 = def;
1973

    
1974
    if (IS_IMM) { /* immediate */
1975
        target_long simm;
1976

    
1977
        simm = GET_FIELDs(insn, 19, 31);
1978
        r_rs2 = tcg_const_tl(simm); // XXX how to free?
1979
    } else { /* register */
1980
        unsigned int rs2;
1981

    
1982
        rs2 = GET_FIELD(insn, 27, 31);
1983
        if (rs2 == 0)
1984
            r_rs2 = tcg_const_tl(0); // XXX how to free?
1985
        else if (rs2 < 8)
1986
            r_rs2 = cpu_gregs[rs2];
1987
        else
1988
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1989
    }
1990
    return r_rs2;
1991
}
1992

    
1993
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
1994
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1995
        goto illegal_insn;
1996
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1997
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1998
        goto nfpu_insn;
1999

    
2000
/* before an instruction, dc->pc must be static */
2001
static void disas_sparc_insn(DisasContext * dc)
2002
{
2003
    unsigned int insn, opc, rs1, rs2, rd;
2004
    target_long simm;
2005

    
2006
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2007
        tcg_gen_debug_insn_start(dc->pc);
2008
    insn = ldl_code(dc->pc);
2009
    opc = GET_FIELD(insn, 0, 1);
2010

    
2011
    rd = GET_FIELD(insn, 2, 6);
2012

    
2013
    cpu_src1 = tcg_temp_new(); // const
2014
    cpu_src2 = tcg_temp_new(); // const
2015

    
2016
    switch (opc) {
2017
    case 0:                     /* branches/sethi */
2018
        {
2019
            unsigned int xop = GET_FIELD(insn, 7, 9);
2020
            int32_t target;
2021
            switch (xop) {
2022
#ifdef TARGET_SPARC64
2023
            case 0x1:           /* V9 BPcc */
2024
                {
2025
                    int cc;
2026

    
2027
                    target = GET_FIELD_SP(insn, 0, 18);
2028
                    target = sign_extend(target, 18);
2029
                    target <<= 2;
2030
                    cc = GET_FIELD_SP(insn, 20, 21);
2031
                    if (cc == 0)
2032
                        do_branch(dc, target, insn, 0, cpu_cond);
2033
                    else if (cc == 2)
2034
                        do_branch(dc, target, insn, 1, cpu_cond);
2035
                    else
2036
                        goto illegal_insn;
2037
                    goto jmp_insn;
2038
                }
2039
            case 0x3:           /* V9 BPr */
2040
                {
2041
                    target = GET_FIELD_SP(insn, 0, 13) |
2042
                        (GET_FIELD_SP(insn, 20, 21) << 14);
2043
                    target = sign_extend(target, 16);
2044
                    target <<= 2;
2045
                    cpu_src1 = get_src1(insn, cpu_src1);
2046
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2047
                    goto jmp_insn;
2048
                }
2049
            case 0x5:           /* V9 FBPcc */
2050
                {
2051
                    int cc = GET_FIELD_SP(insn, 20, 21);
2052
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2053
                        goto jmp_insn;
2054
                    target = GET_FIELD_SP(insn, 0, 18);
2055
                    target = sign_extend(target, 19);
2056
                    target <<= 2;
2057
                    do_fbranch(dc, target, insn, cc, cpu_cond);
2058
                    goto jmp_insn;
2059
                }
2060
#else
2061
            case 0x7:           /* CBN+x */
2062
                {
2063
                    goto ncp_insn;
2064
                }
2065
#endif
2066
            case 0x2:           /* BN+x */
2067
                {
2068
                    target = GET_FIELD(insn, 10, 31);
2069
                    target = sign_extend(target, 22);
2070
                    target <<= 2;
2071
                    do_branch(dc, target, insn, 0, cpu_cond);
2072
                    goto jmp_insn;
2073
                }
2074
            case 0x6:           /* FBN+x */
2075
                {
2076
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2077
                        goto jmp_insn;
2078
                    target = GET_FIELD(insn, 10, 31);
2079
                    target = sign_extend(target, 22);
2080
                    target <<= 2;
2081
                    do_fbranch(dc, target, insn, 0, cpu_cond);
2082
                    goto jmp_insn;
2083
                }
2084
            case 0x4:           /* SETHI */
2085
                if (rd) { // nop
2086
                    uint32_t value = GET_FIELD(insn, 10, 31);
2087
                    TCGv r_const;
2088

    
2089
                    r_const = tcg_const_tl(value << 10);
2090
                    gen_movl_TN_reg(rd, r_const);
2091
                    tcg_temp_free(r_const);
2092
                }
2093
                break;
2094
            case 0x0:           /* UNIMPL */
2095
            default:
2096
                goto illegal_insn;
2097
            }
2098
            break;
2099
        }
2100
        break;
2101
    case 1:                     /*CALL*/
2102
        {
2103
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
2104
            TCGv r_const;
2105

    
2106
            r_const = tcg_const_tl(dc->pc);
2107
            gen_movl_TN_reg(15, r_const);
2108
            tcg_temp_free(r_const);
2109
            target += dc->pc;
2110
            gen_mov_pc_npc(dc, cpu_cond);
2111
            dc->npc = target;
2112
        }
2113
        goto jmp_insn;
2114
    case 2:                     /* FPU & Logical Operations */
2115
        {
2116
            unsigned int xop = GET_FIELD(insn, 7, 12);
2117
            if (xop == 0x3a) {  /* generate trap */
2118
                int cond;
2119

    
2120
                cpu_src1 = get_src1(insn, cpu_src1);
2121
                if (IS_IMM) {
2122
                    rs2 = GET_FIELD(insn, 25, 31);
2123
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2124
                } else {
2125
                    rs2 = GET_FIELD(insn, 27, 31);
2126
                    if (rs2 != 0) {
2127
                        gen_movl_reg_TN(rs2, cpu_src2);
2128
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2129
                    } else
2130
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
2131
                }
2132
                cond = GET_FIELD(insn, 3, 6);
2133
                if (cond == 0x8) {
2134
                    save_state(dc, cpu_cond);
2135
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2136
                        supervisor(dc))
2137
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2138
                    else
2139
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2140
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2141
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2142
                    gen_helper_raise_exception(cpu_tmp32);
2143
                } else if (cond != 0) {
2144
                    TCGv r_cond = tcg_temp_new();
2145
                    int l1;
2146
#ifdef TARGET_SPARC64
2147
                    /* V9 icc/xcc */
2148
                    int cc = GET_FIELD_SP(insn, 11, 12);
2149

    
2150
                    save_state(dc, cpu_cond);
2151
                    if (cc == 0)
2152
                        gen_cond(r_cond, 0, cond, dc);
2153
                    else if (cc == 2)
2154
                        gen_cond(r_cond, 1, cond, dc);
2155
                    else
2156
                        goto illegal_insn;
2157
#else
2158
                    save_state(dc, cpu_cond);
2159
                    gen_cond(r_cond, 0, cond, dc);
2160
#endif
2161
                    l1 = gen_new_label();
2162
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2163

    
2164
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
2165
                        supervisor(dc))
2166
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2167
                    else
2168
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2169
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2170
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2171
                    gen_helper_raise_exception(cpu_tmp32);
2172

    
2173
                    gen_set_label(l1);
2174
                    tcg_temp_free(r_cond);
2175
                }
2176
                gen_op_next_insn();
2177
                tcg_gen_exit_tb(0);
2178
                dc->is_br = 1;
2179
                goto jmp_insn;
2180
            } else if (xop == 0x28) {
2181
                rs1 = GET_FIELD(insn, 13, 17);
2182
                switch(rs1) {
2183
                case 0: /* rdy */
2184
#ifndef TARGET_SPARC64
2185
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
2186
                                       manual, rdy on the microSPARC
2187
                                       II */
2188
                case 0x0f:          /* stbar in the SPARCv8 manual,
2189
                                       rdy on the microSPARC II */
2190
                case 0x10 ... 0x1f: /* implementation-dependent in the
2191
                                       SPARCv8 manual, rdy on the
2192
                                       microSPARC II */
2193
#endif
2194
                    gen_movl_TN_reg(rd, cpu_y);
2195
                    break;
2196
#ifdef TARGET_SPARC64
2197
                case 0x2: /* V9 rdccr */
2198
                    gen_helper_compute_psr();
2199
                    gen_helper_rdccr(cpu_dst);
2200
                    gen_movl_TN_reg(rd, cpu_dst);
2201
                    break;
2202
                case 0x3: /* V9 rdasi */
2203
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2204
                    gen_movl_TN_reg(rd, cpu_dst);
2205
                    break;
2206
                case 0x4: /* V9 rdtick */
2207
                    {
2208
                        TCGv_ptr r_tickptr;
2209

    
2210
                        r_tickptr = tcg_temp_new_ptr();
2211
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2212
                                       offsetof(CPUState, tick));
2213
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2214
                        tcg_temp_free_ptr(r_tickptr);
2215
                        gen_movl_TN_reg(rd, cpu_dst);
2216
                    }
2217
                    break;
2218
                case 0x5: /* V9 rdpc */
2219
                    {
2220
                        TCGv r_const;
2221

    
2222
                        r_const = tcg_const_tl(dc->pc);
2223
                        gen_movl_TN_reg(rd, r_const);
2224
                        tcg_temp_free(r_const);
2225
                    }
2226
                    break;
2227
                case 0x6: /* V9 rdfprs */
2228
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2229
                    gen_movl_TN_reg(rd, cpu_dst);
2230
                    break;
2231
                case 0xf: /* V9 membar */
2232
                    break; /* no effect */
2233
                case 0x13: /* Graphics Status */
2234
                    if (gen_trap_ifnofpu(dc, cpu_cond))
2235
                        goto jmp_insn;
2236
                    gen_movl_TN_reg(rd, cpu_gsr);
2237
                    break;
2238
                case 0x16: /* Softint */
2239
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2240
                    gen_movl_TN_reg(rd, cpu_dst);
2241
                    break;
2242
                case 0x17: /* Tick compare */
2243
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
2244
                    break;
2245
                case 0x18: /* System tick */
2246
                    {
2247
                        TCGv_ptr r_tickptr;
2248

    
2249
                        r_tickptr = tcg_temp_new_ptr();
2250
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2251
                                       offsetof(CPUState, stick));
2252
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
2253
                        tcg_temp_free_ptr(r_tickptr);
2254
                        gen_movl_TN_reg(rd, cpu_dst);
2255
                    }
2256
                    break;
2257
                case 0x19: /* System tick compare */
2258
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
2259
                    break;
2260
                case 0x10: /* Performance Control */
2261
                case 0x11: /* Performance Instrumentation Counter */
2262
                case 0x12: /* Dispatch Control */
2263
                case 0x14: /* Softint set, WO */
2264
                case 0x15: /* Softint clear, WO */
2265
#endif
2266
                default:
2267
                    goto illegal_insn;
2268
                }
2269
#if !defined(CONFIG_USER_ONLY)
2270
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2271
#ifndef TARGET_SPARC64
2272
                if (!supervisor(dc))
2273
                    goto priv_insn;
2274
                gen_helper_compute_psr();
2275
                dc->cc_op = CC_OP_FLAGS;
2276
                gen_helper_rdpsr(cpu_dst);
2277
#else
2278
                CHECK_IU_FEATURE(dc, HYPV);
2279
                if (!hypervisor(dc))
2280
                    goto priv_insn;
2281
                rs1 = GET_FIELD(insn, 13, 17);
2282
                switch (rs1) {
2283
                case 0: // hpstate
2284
                    // gen_op_rdhpstate();
2285
                    break;
2286
                case 1: // htstate
2287
                    // gen_op_rdhtstate();
2288
                    break;
2289
                case 3: // hintp
2290
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2291
                    break;
2292
                case 5: // htba
2293
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
2294
                    break;
2295
                case 6: // hver
2296
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
2297
                    break;
2298
                case 31: // hstick_cmpr
2299
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2300
                    break;
2301
                default:
2302
                    goto illegal_insn;
2303
                }
2304
#endif
2305
                gen_movl_TN_reg(rd, cpu_dst);
2306
                break;
2307
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2308
                if (!supervisor(dc))
2309
                    goto priv_insn;
2310
#ifdef TARGET_SPARC64
2311
                rs1 = GET_FIELD(insn, 13, 17);
2312
                switch (rs1) {
2313
                case 0: // tpc
2314
                    {
2315
                        TCGv_ptr r_tsptr;
2316

    
2317
                        r_tsptr = tcg_temp_new_ptr();
2318
                        tcg_gen_ld_ptr(r_tsptr, cpu_env,
2319
                                       offsetof(CPUState, tsptr));
2320
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2321
                                      offsetof(trap_state, tpc));
2322
                        tcg_temp_free_ptr(r_tsptr);
2323
                    }
2324
                    break;
2325
                case 1: // tnpc
2326
                    {
2327
                        TCGv_ptr r_tsptr;
2328

    
2329
                        r_tsptr = tcg_temp_new_ptr();
2330
                        tcg_gen_ld_ptr(r_tsptr, cpu_env,
2331
                                       offsetof(CPUState, tsptr));
2332
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2333
                                      offsetof(trap_state, tnpc));
2334
                        tcg_temp_free_ptr(r_tsptr);
2335
                    }
2336
                    break;
2337
                case 2: // tstate
2338
                    {
2339
                        TCGv_ptr r_tsptr;
2340

    
2341
                        r_tsptr = tcg_temp_new_ptr();
2342
                        tcg_gen_ld_ptr(r_tsptr, cpu_env,
2343
                                       offsetof(CPUState, tsptr));
2344
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2345
                                      offsetof(trap_state, tstate));
2346
                        tcg_temp_free_ptr(r_tsptr);
2347
                    }
2348
                    break;
2349
                case 3: // tt
2350
                    {
2351
                        TCGv_ptr r_tsptr;
2352

    
2353
                        r_tsptr = tcg_temp_new_ptr();
2354
                        tcg_gen_ld_ptr(r_tsptr, cpu_env,
2355
                                       offsetof(CPUState, tsptr));
2356
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2357
                                       offsetof(trap_state, tt));
2358
                        tcg_temp_free_ptr(r_tsptr);
2359
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2360
                    }
2361
                    break;
2362
                case 4: // tick
2363
                    {
2364
                        TCGv_ptr r_tickptr;
2365

    
2366
                        r_tickptr = tcg_temp_new_ptr();
2367
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2368
                                       offsetof(CPUState, tick));
2369
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2370
                        gen_movl_TN_reg(rd, cpu_tmp0);
2371
                        tcg_temp_free_ptr(r_tickptr);
2372
                    }
2373
                    break;
2374
                case 5: // tba
2375
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2376
                    break;
2377
                case 6: // pstate
2378
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2379
                                   offsetof(CPUSPARCState, pstate));
2380
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2381
                    break;
2382
                case 7: // tl
2383
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2384
                                   offsetof(CPUSPARCState, tl));
2385
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2386
                    break;
2387
                case 8: // pil
2388
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2389
                                   offsetof(CPUSPARCState, psrpil));
2390
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2391
                    break;
2392
                case 9: // cwp
2393
                    gen_helper_rdcwp(cpu_tmp0);
2394
                    break;
2395
                case 10: // cansave
2396
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2397
                                   offsetof(CPUSPARCState, cansave));
2398
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2399
                    break;
2400
                case 11: // canrestore
2401
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2402
                                   offsetof(CPUSPARCState, canrestore));
2403
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2404
                    break;
2405
                case 12: // cleanwin
2406
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2407
                                   offsetof(CPUSPARCState, cleanwin));
2408
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2409
                    break;
2410
                case 13: // otherwin
2411
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2412
                                   offsetof(CPUSPARCState, otherwin));
2413
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2414
                    break;
2415
                case 14: // wstate
2416
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2417
                                   offsetof(CPUSPARCState, wstate));
2418
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2419
                    break;
2420
                case 16: // UA2005 gl
2421
                    CHECK_IU_FEATURE(dc, GL);
2422
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2423
                                   offsetof(CPUSPARCState, gl));
2424
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2425
                    break;
2426
                case 26: // UA2005 strand status
2427
                    CHECK_IU_FEATURE(dc, HYPV);
2428
                    if (!hypervisor(dc))
2429
                        goto priv_insn;
2430
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2431
                    break;
2432
                case 31: // ver
2433
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2434
                    break;
2435
                case 15: // fq
2436
                default:
2437
                    goto illegal_insn;
2438
                }
2439
#else
2440
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2441
#endif
2442
                gen_movl_TN_reg(rd, cpu_tmp0);
2443
                break;
2444
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2445
#ifdef TARGET_SPARC64
2446
                save_state(dc, cpu_cond);
2447
                gen_helper_flushw();
2448
#else
2449
                if (!supervisor(dc))
2450
                    goto priv_insn;
2451
                gen_movl_TN_reg(rd, cpu_tbr);
2452
#endif
2453
                break;
2454
#endif
2455
            } else if (xop == 0x34) {   /* FPU Operations */
2456
                if (gen_trap_ifnofpu(dc, cpu_cond))
2457
                    goto jmp_insn;
2458
                gen_op_clear_ieee_excp_and_FTT();
2459
                rs1 = GET_FIELD(insn, 13, 17);
2460
                rs2 = GET_FIELD(insn, 27, 31);
2461
                xop = GET_FIELD(insn, 18, 26);
2462
                switch (xop) {
2463
                case 0x1: /* fmovs */
2464
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2465
                    break;
2466
                case 0x5: /* fnegs */
2467
                    gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2468
                    break;
2469
                case 0x9: /* fabss */
2470
                    gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2471
                    break;
2472
                case 0x29: /* fsqrts */
2473
                    CHECK_FPU_FEATURE(dc, FSQRT);
2474
                    gen_clear_float_exceptions();
2475
                    gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2476
                    gen_helper_check_ieee_exceptions();
2477
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2478
                    break;
2479
                case 0x2a: /* fsqrtd */
2480
                    CHECK_FPU_FEATURE(dc, FSQRT);
2481
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2482
                    gen_clear_float_exceptions();
2483
                    gen_helper_fsqrtd();
2484
                    gen_helper_check_ieee_exceptions();
2485
                    gen_op_store_DT0_fpr(DFPREG(rd));
2486
                    break;
2487
                case 0x2b: /* fsqrtq */
2488
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2489
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2490
                    gen_clear_float_exceptions();
2491
                    gen_helper_fsqrtq();
2492
                    gen_helper_check_ieee_exceptions();
2493
                    gen_op_store_QT0_fpr(QFPREG(rd));
2494
                    break;
2495
                case 0x41: /* fadds */
2496
                    gen_clear_float_exceptions();
2497
                    gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2498
                    gen_helper_check_ieee_exceptions();
2499
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2500
                    break;
2501
                case 0x42: /* faddd */
2502
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2503
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2504
                    gen_clear_float_exceptions();
2505
                    gen_helper_faddd();
2506
                    gen_helper_check_ieee_exceptions();
2507
                    gen_op_store_DT0_fpr(DFPREG(rd));
2508
                    break;
2509
                case 0x43: /* faddq */
2510
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2511
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2512
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2513
                    gen_clear_float_exceptions();
2514
                    gen_helper_faddq();
2515
                    gen_helper_check_ieee_exceptions();
2516
                    gen_op_store_QT0_fpr(QFPREG(rd));
2517
                    break;
2518
                case 0x45: /* fsubs */
2519
                    gen_clear_float_exceptions();
2520
                    gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2521
                    gen_helper_check_ieee_exceptions();
2522
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2523
                    break;
2524
                case 0x46: /* fsubd */
2525
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2526
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2527
                    gen_clear_float_exceptions();
2528
                    gen_helper_fsubd();
2529
                    gen_helper_check_ieee_exceptions();
2530
                    gen_op_store_DT0_fpr(DFPREG(rd));
2531
                    break;
2532
                case 0x47: /* fsubq */
2533
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2534
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2535
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2536
                    gen_clear_float_exceptions();
2537
                    gen_helper_fsubq();
2538
                    gen_helper_check_ieee_exceptions();
2539
                    gen_op_store_QT0_fpr(QFPREG(rd));
2540
                    break;
2541
                case 0x49: /* fmuls */
2542
                    CHECK_FPU_FEATURE(dc, FMUL);
2543
                    gen_clear_float_exceptions();
2544
                    gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2545
                    gen_helper_check_ieee_exceptions();
2546
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2547
                    break;
2548
                case 0x4a: /* fmuld */
2549
                    CHECK_FPU_FEATURE(dc, FMUL);
2550
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2551
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2552
                    gen_clear_float_exceptions();
2553
                    gen_helper_fmuld();
2554
                    gen_helper_check_ieee_exceptions();
2555
                    gen_op_store_DT0_fpr(DFPREG(rd));
2556
                    break;
2557
                case 0x4b: /* fmulq */
2558
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2559
                    CHECK_FPU_FEATURE(dc, FMUL);
2560
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2561
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2562
                    gen_clear_float_exceptions();
2563
                    gen_helper_fmulq();
2564
                    gen_helper_check_ieee_exceptions();
2565
                    gen_op_store_QT0_fpr(QFPREG(rd));
2566
                    break;
2567
                case 0x4d: /* fdivs */
2568
                    gen_clear_float_exceptions();
2569
                    gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2570
                    gen_helper_check_ieee_exceptions();
2571
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2572
                    break;
2573
                case 0x4e: /* fdivd */
2574
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2575
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2576
                    gen_clear_float_exceptions();
2577
                    gen_helper_fdivd();
2578
                    gen_helper_check_ieee_exceptions();
2579
                    gen_op_store_DT0_fpr(DFPREG(rd));
2580
                    break;
2581
                case 0x4f: /* fdivq */
2582
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2583
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2584
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2585
                    gen_clear_float_exceptions();
2586
                    gen_helper_fdivq();
2587
                    gen_helper_check_ieee_exceptions();
2588
                    gen_op_store_QT0_fpr(QFPREG(rd));
2589
                    break;
2590
                case 0x69: /* fsmuld */
2591
                    CHECK_FPU_FEATURE(dc, FSMULD);
2592
                    gen_clear_float_exceptions();
2593
                    gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2594
                    gen_helper_check_ieee_exceptions();
2595
                    gen_op_store_DT0_fpr(DFPREG(rd));
2596
                    break;
2597
                case 0x6e: /* fdmulq */
2598
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2599
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2600
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2601
                    gen_clear_float_exceptions();
2602
                    gen_helper_fdmulq();
2603
                    gen_helper_check_ieee_exceptions();
2604
                    gen_op_store_QT0_fpr(QFPREG(rd));
2605
                    break;
2606
                case 0xc4: /* fitos */
2607
                    gen_clear_float_exceptions();
2608
                    gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2609
                    gen_helper_check_ieee_exceptions();
2610
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2611
                    break;
2612
                case 0xc6: /* fdtos */
2613
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2614
                    gen_clear_float_exceptions();
2615
                    gen_helper_fdtos(cpu_tmp32);
2616
                    gen_helper_check_ieee_exceptions();
2617
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2618
                    break;
2619
                case 0xc7: /* fqtos */
2620
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2621
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2622
                    gen_clear_float_exceptions();
2623
                    gen_helper_fqtos(cpu_tmp32);
2624
                    gen_helper_check_ieee_exceptions();
2625
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2626
                    break;
2627
                case 0xc8: /* fitod */
2628
                    gen_helper_fitod(cpu_fpr[rs2]);
2629
                    gen_op_store_DT0_fpr(DFPREG(rd));
2630
                    break;
2631
                case 0xc9: /* fstod */
2632
                    gen_helper_fstod(cpu_fpr[rs2]);
2633
                    gen_op_store_DT0_fpr(DFPREG(rd));
2634
                    break;
2635
                case 0xcb: /* fqtod */
2636
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2637
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2638
                    gen_clear_float_exceptions();
2639
                    gen_helper_fqtod();
2640
                    gen_helper_check_ieee_exceptions();
2641
                    gen_op_store_DT0_fpr(DFPREG(rd));
2642
                    break;
2643
                case 0xcc: /* fitoq */
2644
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2645
                    gen_helper_fitoq(cpu_fpr[rs2]);
2646
                    gen_op_store_QT0_fpr(QFPREG(rd));
2647
                    break;
2648
                case 0xcd: /* fstoq */
2649
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2650
                    gen_helper_fstoq(cpu_fpr[rs2]);
2651
                    gen_op_store_QT0_fpr(QFPREG(rd));
2652
                    break;
2653
                case 0xce: /* fdtoq */
2654
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2655
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2656
                    gen_helper_fdtoq();
2657
                    gen_op_store_QT0_fpr(QFPREG(rd));
2658
                    break;
2659
                case 0xd1: /* fstoi */
2660
                    gen_clear_float_exceptions();
2661
                    gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2662
                    gen_helper_check_ieee_exceptions();
2663
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2664
                    break;
2665
                case 0xd2: /* fdtoi */
2666
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2667
                    gen_clear_float_exceptions();
2668
                    gen_helper_fdtoi(cpu_tmp32);
2669
                    gen_helper_check_ieee_exceptions();
2670
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2671
                    break;
2672
                case 0xd3: /* fqtoi */
2673
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2674
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2675
                    gen_clear_float_exceptions();
2676
                    gen_helper_fqtoi(cpu_tmp32);
2677
                    gen_helper_check_ieee_exceptions();
2678
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2679
                    break;
2680
#ifdef TARGET_SPARC64
2681
                case 0x2: /* V9 fmovd */
2682
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2683
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2684
                                    cpu_fpr[DFPREG(rs2) + 1]);
2685
                    break;
2686
                case 0x3: /* V9 fmovq */
2687
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2688
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2689
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2690
                                    cpu_fpr[QFPREG(rs2) + 1]);
2691
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2692
                                    cpu_fpr[QFPREG(rs2) + 2]);
2693
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2694
                                    cpu_fpr[QFPREG(rs2) + 3]);
2695
                    break;
2696
                case 0x6: /* V9 fnegd */
2697
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2698
                    gen_helper_fnegd();
2699
                    gen_op_store_DT0_fpr(DFPREG(rd));
2700
                    break;
2701
                case 0x7: /* V9 fnegq */
2702
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2703
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2704
                    gen_helper_fnegq();
2705
                    gen_op_store_QT0_fpr(QFPREG(rd));
2706
                    break;
2707
                case 0xa: /* V9 fabsd */
2708
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2709
                    gen_helper_fabsd();
2710
                    gen_op_store_DT0_fpr(DFPREG(rd));
2711
                    break;
2712
                case 0xb: /* V9 fabsq */
2713
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2714
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2715
                    gen_helper_fabsq();
2716
                    gen_op_store_QT0_fpr(QFPREG(rd));
2717
                    break;
2718
                case 0x81: /* V9 fstox */
2719
                    gen_clear_float_exceptions();
2720
                    gen_helper_fstox(cpu_fpr[rs2]);
2721
                    gen_helper_check_ieee_exceptions();
2722
                    gen_op_store_DT0_fpr(DFPREG(rd));
2723
                    break;
2724
                case 0x82: /* V9 fdtox */
2725
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2726
                    gen_clear_float_exceptions();
2727
                    gen_helper_fdtox();
2728
                    gen_helper_check_ieee_exceptions();
2729
                    gen_op_store_DT0_fpr(DFPREG(rd));
2730
                    break;
2731
                case 0x83: /* V9 fqtox */
2732
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2733
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2734
                    gen_clear_float_exceptions();
2735
                    gen_helper_fqtox();
2736
                    gen_helper_check_ieee_exceptions();
2737
                    gen_op_store_DT0_fpr(DFPREG(rd));
2738
                    break;
2739
                case 0x84: /* V9 fxtos */
2740
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2741
                    gen_clear_float_exceptions();
2742
                    gen_helper_fxtos(cpu_tmp32);
2743
                    gen_helper_check_ieee_exceptions();
2744
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2745
                    break;
2746
                case 0x88: /* V9 fxtod */
2747
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2748
                    gen_clear_float_exceptions();
2749
                    gen_helper_fxtod();
2750
                    gen_helper_check_ieee_exceptions();
2751
                    gen_op_store_DT0_fpr(DFPREG(rd));
2752
                    break;
2753
                case 0x8c: /* V9 fxtoq */
2754
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2755
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2756
                    gen_clear_float_exceptions();
2757
                    gen_helper_fxtoq();
2758
                    gen_helper_check_ieee_exceptions();
2759
                    gen_op_store_QT0_fpr(QFPREG(rd));
2760
                    break;
2761
#endif
2762
                default:
2763
                    goto illegal_insn;
2764
                }
2765
            } else if (xop == 0x35) {   /* FPU Operations */
2766
#ifdef TARGET_SPARC64
2767
                int cond;
2768
#endif
2769
                if (gen_trap_ifnofpu(dc, cpu_cond))
2770
                    goto jmp_insn;
2771
                gen_op_clear_ieee_excp_and_FTT();
2772
                rs1 = GET_FIELD(insn, 13, 17);
2773
                rs2 = GET_FIELD(insn, 27, 31);
2774
                xop = GET_FIELD(insn, 18, 26);
2775
#ifdef TARGET_SPARC64
2776
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2777
                    int l1;
2778

    
2779
                    l1 = gen_new_label();
2780
                    cond = GET_FIELD_SP(insn, 14, 17);
2781
                    cpu_src1 = get_src1(insn, cpu_src1);
2782
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2783
                                       0, l1);
2784
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2785
                    gen_set_label(l1);
2786
                    break;
2787
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2788
                    int l1;
2789

    
2790
                    l1 = gen_new_label();
2791
                    cond = GET_FIELD_SP(insn, 14, 17);
2792
                    cpu_src1 = get_src1(insn, cpu_src1);
2793
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2794
                                       0, l1);
2795
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2796
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2797
                    gen_set_label(l1);
2798
                    break;
2799
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2800
                    int l1;
2801

    
2802
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2803
                    l1 = gen_new_label();
2804
                    cond = GET_FIELD_SP(insn, 14, 17);
2805
                    cpu_src1 = get_src1(insn, cpu_src1);
2806
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2807
                                       0, l1);
2808
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2809
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2810
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2811
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2812
                    gen_set_label(l1);
2813
                    break;
2814
                }
2815
#endif
2816
                switch (xop) {
2817
#ifdef TARGET_SPARC64
2818
#define FMOVSCC(fcc)                                                    \
2819
                    {                                                   \
2820
                        TCGv r_cond;                                    \
2821
                        int l1;                                         \
2822
                                                                        \
2823
                        l1 = gen_new_label();                           \
2824
                        r_cond = tcg_temp_new();                        \
2825
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2826
                        gen_fcond(r_cond, fcc, cond);                   \
2827
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2828
                                           0, l1);                      \
2829
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2830
                        gen_set_label(l1);                              \
2831
                        tcg_temp_free(r_cond);                          \
2832
                    }
2833
#define FMOVDCC(fcc)                                                    \
2834
                    {                                                   \
2835
                        TCGv r_cond;                                    \
2836
                        int l1;                                         \
2837
                                                                        \
2838
                        l1 = gen_new_label();                           \
2839
                        r_cond = tcg_temp_new();                        \
2840
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2841
                        gen_fcond(r_cond, fcc, cond);                   \
2842
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2843
                                           0, l1);                      \
2844
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2845
                                        cpu_fpr[DFPREG(rs2)]);          \
2846
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2847
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2848
                        gen_set_label(l1);                              \
2849
                        tcg_temp_free(r_cond);                          \
2850
                    }
2851
#define FMOVQCC(fcc)                                                    \
2852
                    {                                                   \
2853
                        TCGv r_cond;                                    \
2854
                        int l1;                                         \
2855
                                                                        \
2856
                        l1 = gen_new_label();                           \
2857
                        r_cond = tcg_temp_new();                        \
2858
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2859
                        gen_fcond(r_cond, fcc, cond);                   \
2860
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2861
                                           0, l1);                      \
2862
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2863
                                        cpu_fpr[QFPREG(rs2)]);          \
2864
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2865
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2866
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2867
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2868
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2869
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2870
                        gen_set_label(l1);                              \
2871
                        tcg_temp_free(r_cond);                          \
2872
                    }
2873
                    case 0x001: /* V9 fmovscc %fcc0 */
2874
                        FMOVSCC(0);
2875
                        break;
2876
                    case 0x002: /* V9 fmovdcc %fcc0 */
2877
                        FMOVDCC(0);
2878
                        break;
2879
                    case 0x003: /* V9 fmovqcc %fcc0 */
2880
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2881
                        FMOVQCC(0);
2882
                        break;
2883
                    case 0x041: /* V9 fmovscc %fcc1 */
2884
                        FMOVSCC(1);
2885
                        break;
2886
                    case 0x042: /* V9 fmovdcc %fcc1 */
2887
                        FMOVDCC(1);
2888
                        break;
2889
                    case 0x043: /* V9 fmovqcc %fcc1 */
2890
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2891
                        FMOVQCC(1);
2892
                        break;
2893
                    case 0x081: /* V9 fmovscc %fcc2 */
2894
                        FMOVSCC(2);
2895
                        break;
2896
                    case 0x082: /* V9 fmovdcc %fcc2 */
2897
                        FMOVDCC(2);
2898
                        break;
2899
                    case 0x083: /* V9 fmovqcc %fcc2 */
2900
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2901
                        FMOVQCC(2);
2902
                        break;
2903
                    case 0x0c1: /* V9 fmovscc %fcc3 */
2904
                        FMOVSCC(3);
2905
                        break;
2906
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
2907
                        FMOVDCC(3);
2908
                        break;
2909
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
2910
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2911
                        FMOVQCC(3);
2912
                        break;
2913
#undef FMOVSCC
2914
#undef FMOVDCC
2915
#undef FMOVQCC
2916
#define FMOVSCC(icc)                                                    \
2917
                    {                                                   \
2918
                        TCGv r_cond;                                    \
2919
                        int l1;                                         \
2920
                                                                        \
2921
                        l1 = gen_new_label();                           \
2922
                        r_cond = tcg_temp_new();                        \
2923
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2924
                        gen_cond(r_cond, icc, cond, dc);                \
2925
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2926
                                           0, l1);                      \
2927
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2928
                        gen_set_label(l1);                              \
2929
                        tcg_temp_free(r_cond);                          \
2930
                    }
2931
#define FMOVDCC(icc)                                                    \
2932
                    {                                                   \
2933
                        TCGv r_cond;                                    \
2934
                        int l1;                                         \
2935
                                                                        \
2936
                        l1 = gen_new_label();                           \
2937
                        r_cond = tcg_temp_new();                        \
2938
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2939
                        gen_cond(r_cond, icc, cond, dc);                \
2940
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2941
                                           0, l1);                      \
2942
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2943
                                        cpu_fpr[DFPREG(rs2)]);          \
2944
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2945
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2946
                        gen_set_label(l1);                              \
2947
                        tcg_temp_free(r_cond);                          \
2948
                    }
2949
#define FMOVQCC(icc)                                                    \
2950
                    {                                                   \
2951
                        TCGv r_cond;                                    \
2952
                        int l1;                                         \
2953
                                                                        \
2954
                        l1 = gen_new_label();                           \
2955
                        r_cond = tcg_temp_new();                        \
2956
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2957
                        gen_cond(r_cond, icc, cond, dc);                \
2958
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2959
                                           0, l1);                      \
2960
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2961
                                        cpu_fpr[QFPREG(rs2)]);          \
2962
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2963
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2964
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2965
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2966
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2967
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2968
                        gen_set_label(l1);                              \
2969
                        tcg_temp_free(r_cond);                          \
2970
                    }
2971

    
2972
                    case 0x101: /* V9 fmovscc %icc */
2973
                        FMOVSCC(0);
2974
                        break;
2975
                    case 0x102: /* V9 fmovdcc %icc */
2976
                        FMOVDCC(0);
2977
                    case 0x103: /* V9 fmovqcc %icc */
2978
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2979
                        FMOVQCC(0);
2980
                        break;
2981
                    case 0x181: /* V9 fmovscc %xcc */
2982
                        FMOVSCC(1);
2983
                        break;
2984
                    case 0x182: /* V9 fmovdcc %xcc */
2985
                        FMOVDCC(1);
2986
                        break;
2987
                    case 0x183: /* V9 fmovqcc %xcc */
2988
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2989
                        FMOVQCC(1);
2990
                        break;
2991
#undef FMOVSCC
2992
#undef FMOVDCC
2993
#undef FMOVQCC
2994
#endif
2995
                    case 0x51: /* fcmps, V9 %fcc */
2996
                        gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2997
                        break;
2998
                    case 0x52: /* fcmpd, V9 %fcc */
2999
                        gen_op_load_fpr_DT0(DFPREG(rs1));
3000
                        gen_op_load_fpr_DT1(DFPREG(rs2));
3001
                        gen_op_fcmpd(rd & 3);
3002
                        break;
3003
                    case 0x53: /* fcmpq, V9 %fcc */
3004
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3005
                        gen_op_load_fpr_QT0(QFPREG(rs1));
3006
                        gen_op_load_fpr_QT1(QFPREG(rs2));
3007
                        gen_op_fcmpq(rd & 3);
3008
                        break;
3009
                    case 0x55: /* fcmpes, V9 %fcc */
3010
                        gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
3011
                        break;
3012
                    case 0x56: /* fcmped, V9 %fcc */
3013
                        gen_op_load_fpr_DT0(DFPREG(rs1));
3014
                        gen_op_load_fpr_DT1(DFPREG(rs2));
3015
                        gen_op_fcmped(rd & 3);
3016
                        break;
3017
                    case 0x57: /* fcmpeq, V9 %fcc */
3018
                        CHECK_FPU_FEATURE(dc, FLOAT128);
3019
                        gen_op_load_fpr_QT0(QFPREG(rs1));
3020
                        gen_op_load_fpr_QT1(QFPREG(rs2));
3021
                        gen_op_fcmpeq(rd & 3);
3022
                        break;
3023
                    default:
3024
                        goto illegal_insn;
3025
                }
3026
            } else if (xop == 0x2) {
3027
                // clr/mov shortcut
3028

    
3029
                rs1 = GET_FIELD(insn, 13, 17);
3030
                if (rs1 == 0) {
3031
                    // or %g0, x, y -> mov T0, x; mov y, T0
3032
                    if (IS_IMM) {       /* immediate */
3033
                        TCGv r_const;
3034

    
3035
                        simm = GET_FIELDs(insn, 19, 31);
3036
                        r_const = tcg_const_tl(simm);
3037
                        gen_movl_TN_reg(rd, r_const);
3038
                        tcg_temp_free(r_const);
3039
                    } else {            /* register */
3040
                        rs2 = GET_FIELD(insn, 27, 31);
3041
                        gen_movl_reg_TN(rs2, cpu_dst);
3042
                        gen_movl_TN_reg(rd, cpu_dst);
3043
                    }
3044
                } else {
3045
                    cpu_src1 = get_src1(insn, cpu_src1);
3046
                    if (IS_IMM) {       /* immediate */
3047
                        simm = GET_FIELDs(insn, 19, 31);
3048
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3049
                        gen_movl_TN_reg(rd, cpu_dst);
3050
                    } else {            /* register */
3051
                        // or x, %g0, y -> mov T1, x; mov y, T1
3052
                        rs2 = GET_FIELD(insn, 27, 31);
3053
                        if (rs2 != 0) {
3054
                            gen_movl_reg_TN(rs2, cpu_src2);
3055
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3056
                            gen_movl_TN_reg(rd, cpu_dst);
3057
                        } else
3058
                            gen_movl_TN_reg(rd, cpu_src1);
3059
                    }
3060
                }
3061
#ifdef TARGET_SPARC64
3062
            } else if (xop == 0x25) { /* sll, V9 sllx */
3063
                cpu_src1 = get_src1(insn, cpu_src1);
3064
                if (IS_IMM) {   /* immediate */
3065
                    simm = GET_FIELDs(insn, 20, 31);
3066
                    if (insn & (1 << 12)) {
3067
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3068
                    } else {
3069
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3070
                    }
3071
                } else {                /* register */
3072
                    rs2 = GET_FIELD(insn, 27, 31);
3073
                    gen_movl_reg_TN(rs2, cpu_src2);
3074
                    if (insn & (1 << 12)) {
3075
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3076
                    } else {
3077
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3078
                    }
3079
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3080
                }
3081
                gen_movl_TN_reg(rd, cpu_dst);
3082
            } else if (xop == 0x26) { /* srl, V9 srlx */
3083
                cpu_src1 = get_src1(insn, cpu_src1);
3084
                if (IS_IMM) {   /* immediate */
3085
                    simm = GET_FIELDs(insn, 20, 31);
3086
                    if (insn & (1 << 12)) {
3087
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3088
                    } else {
3089
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3090
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3091
                    }
3092
                } else {                /* register */
3093
                    rs2 = GET_FIELD(insn, 27, 31);
3094
                    gen_movl_reg_TN(rs2, cpu_src2);
3095
                    if (insn & (1 << 12)) {
3096
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3097
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3098
                    } else {
3099
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3100
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3101
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3102
                    }
3103
                }
3104
                gen_movl_TN_reg(rd, cpu_dst);
3105
            } else if (xop == 0x27) { /* sra, V9 srax */
3106
                cpu_src1 = get_src1(insn, cpu_src1);
3107
                if (IS_IMM) {   /* immediate */
3108
                    simm = GET_FIELDs(insn, 20, 31);
3109
                    if (insn & (1 << 12)) {
3110
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3111
                    } else {
3112
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3113
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3114
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3115
                    }
3116
                } else {                /* register */
3117
                    rs2 = GET_FIELD(insn, 27, 31);
3118
                    gen_movl_reg_TN(rs2, cpu_src2);
3119
                    if (insn & (1 << 12)) {
3120
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3121
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3122
                    } else {
3123
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3124
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3125
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3126
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3127
                    }
3128
                }
3129
                gen_movl_TN_reg(rd, cpu_dst);
3130
#endif
3131
            } else if (xop < 0x36) {
3132
                if (xop < 0x20) {
3133
                    cpu_src1 = get_src1(insn, cpu_src1);
3134
                    cpu_src2 = get_src2(insn, cpu_src2);
3135
                    switch (xop & ~0x10) {
3136
                    case 0x0: /* add */
3137
                        if (IS_IMM) {
3138
                            simm = GET_FIELDs(insn, 19, 31);
3139
                            if (xop & 0x10) {
3140
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3141
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3142
                                dc->cc_op = CC_OP_ADD;
3143
                            } else {
3144
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3145
                            }
3146
                        } else {
3147
                            if (xop & 0x10) {
3148
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3149
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3150
                                dc->cc_op = CC_OP_ADD;
3151
                            } else {
3152
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3153
                            }
3154
                        }
3155
                        break;
3156
                    case 0x1: /* and */
3157
                        if (IS_IMM) {
3158
                            simm = GET_FIELDs(insn, 19, 31);
3159
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3160
                        } else {
3161
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3162
                        }
3163
                        if (xop & 0x10) {
3164
                            gen_op_logic_cc(cpu_dst);
3165
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3166
                            dc->cc_op = CC_OP_FLAGS;
3167
                        }
3168
                        break;
3169
                    case 0x2: /* or */
3170
                        if (IS_IMM) {
3171
                            simm = GET_FIELDs(insn, 19, 31);
3172
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3173
                        } else {
3174
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3175
                        }
3176
                        if (xop & 0x10) {
3177
                            gen_op_logic_cc(cpu_dst);
3178
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3179
                            dc->cc_op = CC_OP_FLAGS;
3180
                        }
3181
                        break;
3182
                    case 0x3: /* xor */
3183
                        if (IS_IMM) {
3184
                            simm = GET_FIELDs(insn, 19, 31);
3185
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3186
                        } else {
3187
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3188
                        }
3189
                        if (xop & 0x10) {
3190
                            gen_op_logic_cc(cpu_dst);
3191
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3192
                            dc->cc_op = CC_OP_FLAGS;
3193
                        }
3194
                        break;
3195
                    case 0x4: /* sub */
3196
                        if (IS_IMM) {
3197
                            simm = GET_FIELDs(insn, 19, 31);
3198
                            if (xop & 0x10) {
3199
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm);
3200
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3201
                                dc->cc_op = CC_OP_FLAGS;
3202
                            } else {
3203
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3204
                            }
3205
                        } else {
3206
                            if (xop & 0x10) {
3207
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3208
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3209
                                dc->cc_op = CC_OP_FLAGS;
3210
                            } else {
3211
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3212
                            }
3213
                        }
3214
                        break;
3215
                    case 0x5: /* andn */
3216
                        if (IS_IMM) {
3217
                            simm = GET_FIELDs(insn, 19, 31);
3218
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3219
                        } else {
3220
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3221
                        }
3222
                        if (xop & 0x10) {
3223
                            gen_op_logic_cc(cpu_dst);
3224
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3225
                            dc->cc_op = CC_OP_FLAGS;
3226
                        }
3227
                        break;
3228
                    case 0x6: /* orn */
3229
                        if (IS_IMM) {
3230
                            simm = GET_FIELDs(insn, 19, 31);
3231
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3232
                        } else {
3233
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3234
                        }
3235
                        if (xop & 0x10) {
3236
                            gen_op_logic_cc(cpu_dst);
3237
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3238
                            dc->cc_op = CC_OP_FLAGS;
3239
                        }
3240
                        break;
3241
                    case 0x7: /* xorn */
3242
                        if (IS_IMM) {
3243
                            simm = GET_FIELDs(insn, 19, 31);
3244
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3245
                        } else {
3246
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3247
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3248
                        }
3249
                        if (xop & 0x10) {
3250
                            gen_op_logic_cc(cpu_dst);
3251
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3252
                            dc->cc_op = CC_OP_FLAGS;
3253
                        }
3254
                        break;
3255
                    case 0x8: /* addx, V9 addc */
3256
                        if (IS_IMM) {
3257
                            simm = GET_FIELDs(insn, 19, 31);
3258
                            if (xop & 0x10) {
3259
                                gen_helper_compute_psr();
3260
                                gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3261
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3262
                                dc->cc_op = CC_OP_FLAGS;
3263
                            } else {
3264
                                gen_helper_compute_psr();
3265
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3266
                                tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3267
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3268
                            }
3269
                        } else {
3270
                            if (xop & 0x10) {
3271
                                gen_helper_compute_psr();
3272
                                gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3273
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3274
                                dc->cc_op = CC_OP_FLAGS;
3275
                            } else {
3276
                                gen_helper_compute_psr();
3277
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3278
                                tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3279
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3280
                            }
3281
                        }
3282
                        break;
3283
#ifdef TARGET_SPARC64
3284
                    case 0x9: /* V9 mulx */
3285
                        if (IS_IMM) {
3286
                            simm = GET_FIELDs(insn, 19, 31);
3287
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3288
                        } else {
3289
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3290
                        }
3291
                        break;
3292
#endif
3293
                    case 0xa: /* umul */
3294
                        CHECK_IU_FEATURE(dc, MUL);
3295
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3296
                        if (xop & 0x10) {
3297
                            gen_op_logic_cc(cpu_dst);
3298
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3299
                            dc->cc_op = CC_OP_FLAGS;
3300
                        }
3301
                        break;
3302
                    case 0xb: /* smul */
3303
                        CHECK_IU_FEATURE(dc, MUL);
3304
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3305
                        if (xop & 0x10) {
3306
                            gen_op_logic_cc(cpu_dst);
3307
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3308
                            dc->cc_op = CC_OP_FLAGS;
3309
                        }
3310
                        break;
3311
                    case 0xc: /* subx, V9 subc */
3312
                        if (IS_IMM) {
3313
                            simm = GET_FIELDs(insn, 19, 31);
3314
                            if (xop & 0x10) {
3315
                                gen_helper_compute_psr();
3316
                                gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3317
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3318
                                dc->cc_op = CC_OP_FLAGS;
3319
                            } else {
3320
                                gen_helper_compute_psr();
3321
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3322
                                tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3323
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3324
                            }
3325
                        } else {
3326
                            if (xop & 0x10) {
3327
                                gen_helper_compute_psr();
3328
                                gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3329
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3330
                                dc->cc_op = CC_OP_FLAGS;
3331
                            } else {
3332
                                gen_helper_compute_psr();
3333
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3334
                                tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3335
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3336
                            }
3337
                        }
3338
                        break;
3339
#ifdef TARGET_SPARC64
3340
                    case 0xd: /* V9 udivx */
3341
                        tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3342
                        tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3343
                        gen_trap_ifdivzero_tl(cpu_cc_src2);
3344
                        tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3345
                        break;
3346
#endif
3347
                    case 0xe: /* udiv */
3348
                        CHECK_IU_FEATURE(dc, DIV);
3349
                        gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3350
                        if (xop & 0x10) {
3351
                            gen_op_div_cc(cpu_dst);
3352
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3353
                            dc->cc_op = CC_OP_FLAGS;
3354
                        }
3355
                        break;
3356
                    case 0xf: /* sdiv */
3357
                        CHECK_IU_FEATURE(dc, DIV);
3358
                        gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3359
                        if (xop & 0x10) {
3360
                            gen_op_div_cc(cpu_dst);
3361
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3362
                            dc->cc_op = CC_OP_FLAGS;
3363
                        }
3364
                        break;
3365
                    default:
3366
                        goto illegal_insn;
3367
                    }
3368
                    gen_movl_TN_reg(rd, cpu_dst);
3369
                } else {
3370
                    cpu_src1 = get_src1(insn, cpu_src1);
3371
                    cpu_src2 = get_src2(insn, cpu_src2);
3372
                    switch (xop) {
3373
                    case 0x20: /* taddcc */
3374
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3375
                        gen_movl_TN_reg(rd, cpu_dst);
3376
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3377
                        dc->cc_op = CC_OP_FLAGS;
3378
                        break;
3379
                    case 0x21: /* tsubcc */
3380
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3381
                        gen_movl_TN_reg(rd, cpu_dst);
3382
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3383
                        dc->cc_op = CC_OP_FLAGS;
3384
                        break;
3385
                    case 0x22: /* taddcctv */
3386
                        save_state(dc, cpu_cond);
3387
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3388
                        gen_movl_TN_reg(rd, cpu_dst);
3389
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3390
                        dc->cc_op = CC_OP_FLAGS;
3391
                        break;
3392
                    case 0x23: /* tsubcctv */
3393
                        save_state(dc, cpu_cond);
3394
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3395
                        gen_movl_TN_reg(rd, cpu_dst);
3396
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3397
                        dc->cc_op = CC_OP_FLAGS;
3398
                        break;
3399
                    case 0x24: /* mulscc */
3400
                        gen_helper_compute_psr();
3401
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3402
                        gen_movl_TN_reg(rd, cpu_dst);
3403
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3404
                        dc->cc_op = CC_OP_FLAGS;
3405
                        break;
3406
#ifndef TARGET_SPARC64
3407
                    case 0x25:  /* sll */
3408
                        if (IS_IMM) { /* immediate */
3409
                            simm = GET_FIELDs(insn, 20, 31);
3410
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3411
                        } else { /* register */
3412
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3413
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3414
                        }
3415
                        gen_movl_TN_reg(rd, cpu_dst);
3416
                        break;
3417
                    case 0x26:  /* srl */
3418
                        if (IS_IMM) { /* immediate */
3419
                            simm = GET_FIELDs(insn, 20, 31);
3420
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3421
                        } else { /* register */
3422
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3423
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3424
                        }
3425
                        gen_movl_TN_reg(rd, cpu_dst);
3426
                        break;
3427
                    case 0x27:  /* sra */
3428
                        if (IS_IMM) { /* immediate */
3429
                            simm = GET_FIELDs(insn, 20, 31);
3430
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3431
                        } else { /* register */
3432
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3433
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3434
                        }
3435
                        gen_movl_TN_reg(rd, cpu_dst);
3436
                        break;
3437
#endif
3438
                    case 0x30:
3439
                        {
3440
                            switch(rd) {
3441
                            case 0: /* wry */
3442
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3443
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3444
                                break;
3445
#ifndef TARGET_SPARC64
3446
                            case 0x01 ... 0x0f: /* undefined in the
3447
                                                   SPARCv8 manual, nop
3448
                                                   on the microSPARC
3449
                                                   II */
3450
                            case 0x10 ... 0x1f: /* implementation-dependent
3451
                                                   in the SPARCv8
3452
                                                   manual, nop on the
3453
                                                   microSPARC II */
3454
                                break;
3455
#else
3456
                            case 0x2: /* V9 wrccr */
3457
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3458
                                gen_helper_wrccr(cpu_dst);
3459
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3460
                                dc->cc_op = CC_OP_FLAGS;
3461
                                break;
3462
                            case 0x3: /* V9 wrasi */
3463
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3464
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3465
                                break;
3466
                            case 0x6: /* V9 wrfprs */
3467
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3468
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3469
                                save_state(dc, cpu_cond);
3470
                                gen_op_next_insn();
3471
                                tcg_gen_exit_tb(0);
3472
                                dc->is_br = 1;
3473
                                break;
3474
                            case 0xf: /* V9 sir, nop if user */
3475
#if !defined(CONFIG_USER_ONLY)
3476
                                if (supervisor(dc))
3477
                                    ; // XXX
3478
#endif
3479
                                break;
3480
                            case 0x13: /* Graphics Status */
3481
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3482
                                    goto jmp_insn;
3483
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3484
                                break;
3485
                            case 0x14: /* Softint set */
3486
                                if (!supervisor(dc))
3487
                                    goto illegal_insn;
3488
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3489
                                gen_helper_set_softint(cpu_tmp64);
3490
                                break;
3491
                            case 0x15: /* Softint clear */
3492
                                if (!supervisor(dc))
3493
                                    goto illegal_insn;
3494
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3495
                                gen_helper_clear_softint(cpu_tmp64);
3496
                                break;
3497
                            case 0x16: /* Softint write */
3498
                                if (!supervisor(dc))
3499
                                    goto illegal_insn;
3500
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3501
                                gen_helper_write_softint(cpu_tmp64);
3502
                                break;
3503
                            case 0x17: /* Tick compare */
3504
#if !defined(CONFIG_USER_ONLY)
3505
                                if (!supervisor(dc))
3506
                                    goto illegal_insn;
3507
#endif
3508
                                {
3509
                                    TCGv_ptr r_tickptr;
3510

    
3511
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3512
                                                   cpu_src2);
3513
                                    r_tickptr = tcg_temp_new_ptr();
3514
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3515
                                                   offsetof(CPUState, tick));
3516
                                    gen_helper_tick_set_limit(r_tickptr,
3517
                                                              cpu_tick_cmpr);
3518
                                    tcg_temp_free_ptr(r_tickptr);
3519
                                }
3520
                                break;
3521
                            case 0x18: /* System tick */
3522
#if !defined(CONFIG_USER_ONLY)
3523
                                if (!supervisor(dc))
3524
                                    goto illegal_insn;
3525
#endif
3526
                                {
3527
                                    TCGv_ptr r_tickptr;
3528

    
3529
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3530
                                                   cpu_src2);
3531
                                    r_tickptr = tcg_temp_new_ptr();
3532
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3533
                                                   offsetof(CPUState, stick));
3534
                                    gen_helper_tick_set_count(r_tickptr,
3535
                                                              cpu_dst);
3536
                                    tcg_temp_free_ptr(r_tickptr);
3537
                                }
3538
                                break;
3539
                            case 0x19: /* System tick compare */
3540
#if !defined(CONFIG_USER_ONLY)
3541
                                if (!supervisor(dc))
3542
                                    goto illegal_insn;
3543
#endif
3544
                                {
3545
                                    TCGv_ptr r_tickptr;
3546

    
3547
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3548
                                                   cpu_src2);
3549
                                    r_tickptr = tcg_temp_new_ptr();
3550
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3551
                                                   offsetof(CPUState, stick));
3552
                                    gen_helper_tick_set_limit(r_tickptr,
3553
                                                              cpu_stick_cmpr);
3554
                                    tcg_temp_free_ptr(r_tickptr);
3555
                                }
3556
                                break;
3557

    
3558
                            case 0x10: /* Performance Control */
3559
                            case 0x11: /* Performance Instrumentation
3560
                                          Counter */
3561
                            case 0x12: /* Dispatch Control */
3562
#endif
3563
                            default:
3564
                                goto illegal_insn;
3565
                            }
3566
                        }
3567
                        break;
3568
#if !defined(CONFIG_USER_ONLY)
3569
                    case 0x31: /* wrpsr, V9 saved, restored */
3570
                        {
3571
                            if (!supervisor(dc))
3572
                                goto priv_insn;
3573
#ifdef TARGET_SPARC64
3574
                            switch (rd) {
3575
                            case 0:
3576
                                gen_helper_saved();
3577
                                break;
3578
                            case 1:
3579
                                gen_helper_restored();
3580
                                break;
3581
                            case 2: /* UA2005 allclean */
3582
                            case 3: /* UA2005 otherw */
3583
                            case 4: /* UA2005 normalw */
3584
                            case 5: /* UA2005 invalw */
3585
                                // XXX
3586
                            default:
3587
                                goto illegal_insn;
3588
                            }
3589
#else
3590
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3591
                            gen_helper_wrpsr(cpu_dst);
3592
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3593
                            dc->cc_op = CC_OP_FLAGS;
3594
                            save_state(dc, cpu_cond);
3595
                            gen_op_next_insn();
3596
                            tcg_gen_exit_tb(0);
3597
                            dc->is_br = 1;
3598
#endif
3599
                        }
3600
                        break;
3601
                    case 0x32: /* wrwim, V9 wrpr */
3602
                        {
3603
                            if (!supervisor(dc))
3604
                                goto priv_insn;
3605
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3606
#ifdef TARGET_SPARC64
3607
                            switch (rd) {
3608
                            case 0: // tpc
3609
                                {
3610
                                    TCGv_ptr r_tsptr;
3611

    
3612
                                    r_tsptr = tcg_temp_new_ptr();
3613
                                    tcg_gen_ld_ptr(r_tsptr, cpu_env,
3614
                                                   offsetof(CPUState, tsptr));
3615
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3616
                                                  offsetof(trap_state, tpc));
3617
                                    tcg_temp_free_ptr(r_tsptr);
3618
                                }
3619
                                break;
3620
                            case 1: // tnpc
3621
                                {
3622
                                    TCGv_ptr r_tsptr;
3623

    
3624
                                    r_tsptr = tcg_temp_new_ptr();
3625
                                    tcg_gen_ld_ptr(r_tsptr, cpu_env,
3626
                                                   offsetof(CPUState, tsptr));
3627
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3628
                                                  offsetof(trap_state, tnpc));
3629
                                    tcg_temp_free_ptr(r_tsptr);
3630
                                }
3631
                                break;
3632
                            case 2: // tstate
3633
                                {
3634
                                    TCGv_ptr r_tsptr;
3635

    
3636
                                    r_tsptr = tcg_temp_new_ptr();
3637
                                    tcg_gen_ld_ptr(r_tsptr, cpu_env,
3638
                                                   offsetof(CPUState, tsptr));
3639
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3640
                                                  offsetof(trap_state,
3641
                                                           tstate));
3642
                                    tcg_temp_free_ptr(r_tsptr);
3643
                                }
3644
                                break;
3645
                            case 3: // tt
3646
                                {
3647
                                    TCGv_ptr r_tsptr;
3648

    
3649
                                    r_tsptr = tcg_temp_new_ptr();
3650
                                    tcg_gen_ld_ptr(r_tsptr, cpu_env,
3651
                                                   offsetof(CPUState, tsptr));
3652
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3653
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3654
                                                   offsetof(trap_state, tt));
3655
                                    tcg_temp_free_ptr(r_tsptr);
3656
                                }
3657
                                break;
3658
                            case 4: // tick
3659
                                {
3660
                                    TCGv_ptr r_tickptr;
3661

    
3662
                                    r_tickptr = tcg_temp_new_ptr();
3663
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3664
                                                   offsetof(CPUState, tick));
3665
                                    gen_helper_tick_set_count(r_tickptr,
3666
                                                              cpu_tmp0);
3667
                                    tcg_temp_free_ptr(r_tickptr);
3668
                                }
3669
                                break;
3670
                            case 5: // tba
3671
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3672
                                break;
3673
                            case 6: // pstate
3674
                                save_state(dc, cpu_cond);
3675
                                gen_helper_wrpstate(cpu_tmp0);
3676
                                gen_op_next_insn();
3677
                                tcg_gen_exit_tb(0);
3678
                                dc->is_br = 1;
3679
                                break;
3680
                            case 7: // tl
3681
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3682
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3683
                                               offsetof(CPUSPARCState, tl));
3684
                                break;
3685
                            case 8: // pil
3686
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3687
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3688
                                               offsetof(CPUSPARCState,
3689
                                                        psrpil));
3690
                                break;
3691
                            case 9: // cwp
3692
                                gen_helper_wrcwp(cpu_tmp0);
3693
                                break;
3694
                            case 10: // cansave
3695
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3696
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3697
                                               offsetof(CPUSPARCState,
3698
                                                        cansave));
3699
                                break;
3700
                            case 11: // canrestore
3701
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3702
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3703
                                               offsetof(CPUSPARCState,
3704
                                                        canrestore));
3705
                                break;
3706
                            case 12: // cleanwin
3707
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3708
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3709
                                               offsetof(CPUSPARCState,
3710
                                                        cleanwin));
3711
                                break;
3712
                            case 13: // otherwin
3713
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3714
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3715
                                               offsetof(CPUSPARCState,
3716
                                                        otherwin));
3717
                                break;
3718
                            case 14: // wstate
3719
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3720
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3721
                                               offsetof(CPUSPARCState,
3722
                                                        wstate));
3723
                                break;
3724
                            case 16: // UA2005 gl
3725
                                CHECK_IU_FEATURE(dc, GL);
3726
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3727
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3728
                                               offsetof(CPUSPARCState, gl));
3729
                                break;
3730
                            case 26: // UA2005 strand status
3731
                                CHECK_IU_FEATURE(dc, HYPV);
3732
                                if (!hypervisor(dc))
3733
                                    goto priv_insn;
3734
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3735
                                break;
3736
                            default:
3737
                                goto illegal_insn;
3738
                            }
3739
#else
3740
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3741
                            if (dc->def->nwindows != 32)
3742
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3743
                                                (1 << dc->def->nwindows) - 1);
3744
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3745
#endif
3746
                        }
3747
                        break;
3748
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3749
                        {
3750
#ifndef TARGET_SPARC64
3751
                            if (!supervisor(dc))
3752
                                goto priv_insn;
3753
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3754
#else
3755
                            CHECK_IU_FEATURE(dc, HYPV);
3756
                            if (!hypervisor(dc))
3757
                                goto priv_insn;
3758
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3759
                            switch (rd) {
3760
                            case 0: // hpstate
3761
                                // XXX gen_op_wrhpstate();
3762
                                save_state(dc, cpu_cond);
3763
                                gen_op_next_insn();
3764
                                tcg_gen_exit_tb(0);
3765
                                dc->is_br = 1;
3766
                                break;
3767
                            case 1: // htstate
3768
                                // XXX gen_op_wrhtstate();
3769
                                break;
3770
                            case 3: // hintp
3771
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3772
                                break;
3773
                            case 5: // htba
3774
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3775
                                break;
3776
                            case 31: // hstick_cmpr
3777
                                {
3778
                                    TCGv_ptr r_tickptr;
3779

    
3780
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3781
                                    r_tickptr = tcg_temp_new_ptr();
3782
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3783
                                                   offsetof(CPUState, hstick));
3784
                                    gen_helper_tick_set_limit(r_tickptr,
3785
                                                              cpu_hstick_cmpr);
3786
                                    tcg_temp_free_ptr(r_tickptr);
3787
                                }
3788
                                break;
3789
                            case 6: // hver readonly
3790
                            default:
3791
                                goto illegal_insn;
3792
                            }
3793
#endif
3794
                        }
3795
                        break;
3796
#endif
3797
#ifdef TARGET_SPARC64
3798
                    case 0x2c: /* V9 movcc */
3799
                        {
3800
                            int cc = GET_FIELD_SP(insn, 11, 12);
3801
                            int cond = GET_FIELD_SP(insn, 14, 17);
3802
                            TCGv r_cond;
3803
                            int l1;
3804

    
3805
                            r_cond = tcg_temp_new();
3806
                            if (insn & (1 << 18)) {
3807
                                if (cc == 0)
3808
                                    gen_cond(r_cond, 0, cond, dc);
3809
                                else if (cc == 2)
3810
                                    gen_cond(r_cond, 1, cond, dc);
3811
                                else
3812
                                    goto illegal_insn;
3813
                            } else {
3814
                                gen_fcond(r_cond, cc, cond);
3815
                            }
3816

    
3817
                            l1 = gen_new_label();
3818

    
3819
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3820
                            if (IS_IMM) {       /* immediate */
3821
                                TCGv r_const;
3822

    
3823
                                simm = GET_FIELD_SPs(insn, 0, 10);
3824
                                r_const = tcg_const_tl(simm);
3825
                                gen_movl_TN_reg(rd, r_const);
3826
                                tcg_temp_free(r_const);
3827
                            } else {
3828
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3829
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3830
                                gen_movl_TN_reg(rd, cpu_tmp0);
3831
                            }
3832
                            gen_set_label(l1);
3833
                            tcg_temp_free(r_cond);
3834
                            break;
3835
                        }
3836
                    case 0x2d: /* V9 sdivx */
3837
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3838
                        gen_movl_TN_reg(rd, cpu_dst);
3839
                        break;
3840
                    case 0x2e: /* V9 popc */
3841
                        {
3842
                            cpu_src2 = get_src2(insn, cpu_src2);
3843
                            gen_helper_popc(cpu_dst, cpu_src2);
3844
                            gen_movl_TN_reg(rd, cpu_dst);
3845
                        }
3846
                    case 0x2f: /* V9 movr */
3847
                        {
3848
                            int cond = GET_FIELD_SP(insn, 10, 12);
3849
                            int l1;
3850

    
3851
                            cpu_src1 = get_src1(insn, cpu_src1);
3852

    
3853
                            l1 = gen_new_label();
3854

    
3855
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3856
                                              cpu_src1, 0, l1);
3857
                            if (IS_IMM) {       /* immediate */
3858
                                TCGv r_const;
3859

    
3860
                                simm = GET_FIELD_SPs(insn, 0, 9);
3861
                                r_const = tcg_const_tl(simm);
3862
                                gen_movl_TN_reg(rd, r_const);
3863
                                tcg_temp_free(r_const);
3864
                            } else {
3865
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3866
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3867
                                gen_movl_TN_reg(rd, cpu_tmp0);
3868
                            }
3869
                            gen_set_label(l1);
3870
                            break;
3871
                        }
3872
#endif
3873
                    default:
3874
                        goto illegal_insn;
3875
                    }
3876
                }
3877
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3878
#ifdef TARGET_SPARC64
3879
                int opf = GET_FIELD_SP(insn, 5, 13);
3880
                rs1 = GET_FIELD(insn, 13, 17);
3881
                rs2 = GET_FIELD(insn, 27, 31);
3882
                if (gen_trap_ifnofpu(dc, cpu_cond))
3883
                    goto jmp_insn;
3884

    
3885
                switch (opf) {
3886
                case 0x000: /* VIS I edge8cc */
3887
                case 0x001: /* VIS II edge8n */
3888
                case 0x002: /* VIS I edge8lcc */
3889
                case 0x003: /* VIS II edge8ln */
3890
                case 0x004: /* VIS I edge16cc */
3891
                case 0x005: /* VIS II edge16n */
3892
                case 0x006: /* VIS I edge16lcc */
3893
                case 0x007: /* VIS II edge16ln */
3894
                case 0x008: /* VIS I edge32cc */
3895
                case 0x009: /* VIS II edge32n */
3896
                case 0x00a: /* VIS I edge32lcc */
3897
                case 0x00b: /* VIS II edge32ln */
3898
                    // XXX
3899
                    goto illegal_insn;
3900
                case 0x010: /* VIS I array8 */
3901
                    CHECK_FPU_FEATURE(dc, VIS1);
3902
                    cpu_src1 = get_src1(insn, cpu_src1);
3903
                    gen_movl_reg_TN(rs2, cpu_src2);
3904
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3905
                    gen_movl_TN_reg(rd, cpu_dst);
3906
                    break;
3907
                case 0x012: /* VIS I array16 */
3908
                    CHECK_FPU_FEATURE(dc, VIS1);
3909
                    cpu_src1 = get_src1(insn, cpu_src1);
3910
                    gen_movl_reg_TN(rs2, cpu_src2);
3911
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3912
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3913
                    gen_movl_TN_reg(rd, cpu_dst);
3914
                    break;
3915
                case 0x014: /* VIS I array32 */
3916
                    CHECK_FPU_FEATURE(dc, VIS1);
3917
                    cpu_src1 = get_src1(insn, cpu_src1);
3918
                    gen_movl_reg_TN(rs2, cpu_src2);
3919
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3920
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3921
                    gen_movl_TN_reg(rd, cpu_dst);
3922
                    break;
3923
                case 0x018: /* VIS I alignaddr */
3924
                    CHECK_FPU_FEATURE(dc, VIS1);
3925
                    cpu_src1 = get_src1(insn, cpu_src1);
3926
                    gen_movl_reg_TN(rs2, cpu_src2);
3927
                    gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3928
                    gen_movl_TN_reg(rd, cpu_dst);
3929
                    break;
3930
                case 0x019: /* VIS II bmask */
3931
                case 0x01a: /* VIS I alignaddrl */
3932
                    // XXX
3933
                    goto illegal_insn;
3934
                case 0x020: /* VIS I fcmple16 */
3935
                    CHECK_FPU_FEATURE(dc, VIS1);
3936
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3937
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3938
                    gen_helper_fcmple16();
3939
                    gen_op_store_DT0_fpr(DFPREG(rd));
3940
                    break;
3941
                case 0x022: /* VIS I fcmpne16 */
3942
                    CHECK_FPU_FEATURE(dc, VIS1);
3943
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3944
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3945
                    gen_helper_fcmpne16();
3946
                    gen_op_store_DT0_fpr(DFPREG(rd));
3947
                    break;
3948
                case 0x024: /* VIS I fcmple32 */
3949
                    CHECK_FPU_FEATURE(dc, VIS1);
3950
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3951
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3952
                    gen_helper_fcmple32();
3953
                    gen_op_store_DT0_fpr(DFPREG(rd));
3954
                    break;
3955
                case 0x026: /* VIS I fcmpne32 */
3956
                    CHECK_FPU_FEATURE(dc, VIS1);
3957
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3958
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3959
                    gen_helper_fcmpne32();
3960
                    gen_op_store_DT0_fpr(DFPREG(rd));
3961
                    break;
3962
                case 0x028: /* VIS I fcmpgt16 */
3963
                    CHECK_FPU_FEATURE(dc, VIS1);
3964
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3965
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3966
                    gen_helper_fcmpgt16();
3967
                    gen_op_store_DT0_fpr(DFPREG(rd));
3968
                    break;
3969
                case 0x02a: /* VIS I fcmpeq16 */
3970
                    CHECK_FPU_FEATURE(dc, VIS1);
3971
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3972
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3973
                    gen_helper_fcmpeq16();
3974
                    gen_op_store_DT0_fpr(DFPREG(rd));
3975
                    break;
3976
                case 0x02c: /* VIS I fcmpgt32 */
3977
                    CHECK_FPU_FEATURE(dc, VIS1);
3978
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3979
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3980
                    gen_helper_fcmpgt32();
3981
                    gen_op_store_DT0_fpr(DFPREG(rd));
3982
                    break;
3983
                case 0x02e: /* VIS I fcmpeq32 */
3984
                    CHECK_FPU_FEATURE(dc, VIS1);
3985
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3986
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3987
                    gen_helper_fcmpeq32();
3988
                    gen_op_store_DT0_fpr(DFPREG(rd));
3989
                    break;
3990
                case 0x031: /* VIS I fmul8x16 */
3991
                    CHECK_FPU_FEATURE(dc, VIS1);
3992
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3993
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3994
                    gen_helper_fmul8x16();
3995
                    gen_op_store_DT0_fpr(DFPREG(rd));
3996
                    break;
3997
                case 0x033: /* VIS I fmul8x16au */
3998
                    CHECK_FPU_FEATURE(dc, VIS1);
3999
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4000
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4001
                    gen_helper_fmul8x16au();
4002
                    gen_op_store_DT0_fpr(DFPREG(rd));
4003
                    break;
4004
                case 0x035: /* VIS I fmul8x16al */
4005
                    CHECK_FPU_FEATURE(dc, VIS1);
4006
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4007
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4008
                    gen_helper_fmul8x16al();
4009
                    gen_op_store_DT0_fpr(DFPREG(rd));
4010
                    break;
4011
                case 0x036: /* VIS I fmul8sux16 */
4012
                    CHECK_FPU_FEATURE(dc, VIS1);
4013
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4014
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4015
                    gen_helper_fmul8sux16();
4016
                    gen_op_store_DT0_fpr(DFPREG(rd));
4017
                    break;
4018
                case 0x037: /* VIS I fmul8ulx16 */
4019
                    CHECK_FPU_FEATURE(dc, VIS1);
4020
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4021
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4022
                    gen_helper_fmul8ulx16();
4023
                    gen_op_store_DT0_fpr(DFPREG(rd));
4024
                    break;
4025
                case 0x038: /* VIS I fmuld8sux16 */
4026
                    CHECK_FPU_FEATURE(dc, VIS1);
4027
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4028
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4029
                    gen_helper_fmuld8sux16();
4030
                    gen_op_store_DT0_fpr(DFPREG(rd));
4031
                    break;
4032
                case 0x039: /* VIS I fmuld8ulx16 */
4033
                    CHECK_FPU_FEATURE(dc, VIS1);
4034
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4035
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4036
                    gen_helper_fmuld8ulx16();
4037
                    gen_op_store_DT0_fpr(DFPREG(rd));
4038
                    break;
4039
                case 0x03a: /* VIS I fpack32 */
4040
                case 0x03b: /* VIS I fpack16 */
4041
                case 0x03d: /* VIS I fpackfix */
4042
                case 0x03e: /* VIS I pdist */
4043
                    // XXX
4044
                    goto illegal_insn;
4045
                case 0x048: /* VIS I faligndata */
4046
                    CHECK_FPU_FEATURE(dc, VIS1);
4047
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4048
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4049
                    gen_helper_faligndata();
4050
                    gen_op_store_DT0_fpr(DFPREG(rd));
4051
                    break;
4052
                case 0x04b: /* VIS I fpmerge */
4053
                    CHECK_FPU_FEATURE(dc, VIS1);
4054
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4055
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4056
                    gen_helper_fpmerge();
4057
                    gen_op_store_DT0_fpr(DFPREG(rd));
4058
                    break;
4059
                case 0x04c: /* VIS II bshuffle */
4060
                    // XXX
4061
                    goto illegal_insn;
4062
                case 0x04d: /* VIS I fexpand */
4063
                    CHECK_FPU_FEATURE(dc, VIS1);
4064
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4065
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4066
                    gen_helper_fexpand();
4067
                    gen_op_store_DT0_fpr(DFPREG(rd));
4068
                    break;
4069
                case 0x050: /* VIS I fpadd16 */
4070
                    CHECK_FPU_FEATURE(dc, VIS1);
4071
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4072
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4073
                    gen_helper_fpadd16();
4074
                    gen_op_store_DT0_fpr(DFPREG(rd));
4075
                    break;
4076
                case 0x051: /* VIS I fpadd16s */
4077
                    CHECK_FPU_FEATURE(dc, VIS1);
4078
                    gen_helper_fpadd16s(cpu_fpr[rd],
4079
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
4080
                    break;
4081
                case 0x052: /* VIS I fpadd32 */
4082
                    CHECK_FPU_FEATURE(dc, VIS1);
4083
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4084
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4085
                    gen_helper_fpadd32();
4086
                    gen_op_store_DT0_fpr(DFPREG(rd));
4087
                    break;
4088
                case 0x053: /* VIS I fpadd32s */
4089
                    CHECK_FPU_FEATURE(dc, VIS1);
4090
                    gen_helper_fpadd32s(cpu_fpr[rd],
4091
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
4092
                    break;
4093
                case 0x054: /* VIS I fpsub16 */
4094
                    CHECK_FPU_FEATURE(dc, VIS1);
4095
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4096
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4097
                    gen_helper_fpsub16();
4098
                    gen_op_store_DT0_fpr(DFPREG(rd));
4099
                    break;
4100
                case 0x055: /* VIS I fpsub16s */
4101
                    CHECK_FPU_FEATURE(dc, VIS1);
4102
                    gen_helper_fpsub16s(cpu_fpr[rd],
4103
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
4104
                    break;
4105
                case 0x056: /* VIS I fpsub32 */
4106
                    CHECK_FPU_FEATURE(dc, VIS1);
4107
                    gen_op_load_fpr_DT0(DFPREG(rs1));
4108
                    gen_op_load_fpr_DT1(DFPREG(rs2));
4109
                    gen_helper_fpsub32();
4110
                    gen_op_store_DT0_fpr(DFPREG(rd));
4111
                    break;
4112
                case 0x057: /* VIS I fpsub32s */
4113
                    CHECK_FPU_FEATURE(dc, VIS1);
4114
                    gen_helper_fpsub32s(cpu_fpr[rd],
4115
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
4116
                    break;
4117
                case 0x060: /* VIS I fzero */
4118
                    CHECK_FPU_FEATURE(dc, VIS1);
4119
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4120
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4121
                    break;
4122
                case 0x061: /* VIS I fzeros */
4123
                    CHECK_FPU_FEATURE(dc, VIS1);
4124
                    tcg_gen_movi_i32(cpu_fpr[rd], 0);
4125
                    break;
4126
                case 0x062: /* VIS I fnor */
4127
                    CHECK_FPU_FEATURE(dc, VIS1);
4128
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4129
                                    cpu_fpr[DFPREG(rs2)]);
4130
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4131
                                    cpu_fpr[DFPREG(rs2) + 1]);
4132
                    break;
4133
                case 0x063: /* VIS I fnors */
4134
                    CHECK_FPU_FEATURE(dc, VIS1);
4135
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4136
                    break;
4137
                case 0x064: /* VIS I fandnot2 */
4138
                    CHECK_FPU_FEATURE(dc, VIS1);
4139
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4140
                                     cpu_fpr[DFPREG(rs2)]);
4141
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4142
                                     cpu_fpr[DFPREG(rs1) + 1],
4143
                                     cpu_fpr[DFPREG(rs2) + 1]);
4144
                    break;
4145
                case 0x065: /* VIS I fandnot2s */
4146
                    CHECK_FPU_FEATURE(dc, VIS1);
4147
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4148
                    break;
4149
                case 0x066: /* VIS I fnot2 */
4150
                    CHECK_FPU_FEATURE(dc, VIS1);
4151
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4152
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4153
                                    cpu_fpr[DFPREG(rs2) + 1]);
4154
                    break;
4155
                case 0x067: /* VIS I fnot2s */
4156
                    CHECK_FPU_FEATURE(dc, VIS1);
4157
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4158
                    break;
4159
                case 0x068: /* VIS I fandnot1 */
4160
                    CHECK_FPU_FEATURE(dc, VIS1);
4161
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4162
                                     cpu_fpr[DFPREG(rs1)]);
4163
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4164
                                     cpu_fpr[DFPREG(rs2) + 1],
4165
                                     cpu_fpr[DFPREG(rs1) + 1]);
4166
                    break;
4167
                case 0x069: /* VIS I fandnot1s */
4168
                    CHECK_FPU_FEATURE(dc, VIS1);
4169
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4170
                    break;
4171
                case 0x06a: /* VIS I fnot1 */
4172
                    CHECK_FPU_FEATURE(dc, VIS1);
4173
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4174
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4175
                                    cpu_fpr[DFPREG(rs1) + 1]);
4176
                    break;
4177
                case 0x06b: /* VIS I fnot1s */
4178
                    CHECK_FPU_FEATURE(dc, VIS1);
4179
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4180
                    break;
4181
                case 0x06c: /* VIS I fxor */
4182
                    CHECK_FPU_FEATURE(dc, VIS1);
4183
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4184
                                    cpu_fpr[DFPREG(rs2)]);
4185
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4186
                                    cpu_fpr[DFPREG(rs1) + 1],
4187
                                    cpu_fpr[DFPREG(rs2) + 1]);
4188
                    break;
4189
                case 0x06d: /* VIS I fxors */
4190
                    CHECK_FPU_FEATURE(dc, VIS1);
4191
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4192
                    break;
4193
                case 0x06e: /* VIS I fnand */
4194
                    CHECK_FPU_FEATURE(dc, VIS1);
4195
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4196
                                     cpu_fpr[DFPREG(rs2)]);
4197
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4198
                                     cpu_fpr[DFPREG(rs2) + 1]);
4199
                    break;
4200
                case 0x06f: /* VIS I fnands */
4201
                    CHECK_FPU_FEATURE(dc, VIS1);
4202
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4203
                    break;
4204
                case 0x070: /* VIS I fand */
4205
                    CHECK_FPU_FEATURE(dc, VIS1);
4206
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4207
                                    cpu_fpr[DFPREG(rs2)]);
4208
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4209
                                    cpu_fpr[DFPREG(rs1) + 1],
4210
                                    cpu_fpr[DFPREG(rs2) + 1]);
4211
                    break;
4212
                case 0x071: /* VIS I fands */
4213
                    CHECK_FPU_FEATURE(dc, VIS1);
4214
                    tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4215
                    break;
4216
                case 0x072: /* VIS I fxnor */
4217
                    CHECK_FPU_FEATURE(dc, VIS1);
4218
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4219
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4220
                                    cpu_fpr[DFPREG(rs1)]);
4221
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4222
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4223
                                    cpu_fpr[DFPREG(rs1) + 1]);
4224
                    break;
4225
                case 0x073: /* VIS I fxnors */
4226
                    CHECK_FPU_FEATURE(dc, VIS1);
4227
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4228
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4229
                    break;
4230
                case 0x074: /* VIS I fsrc1 */
4231
                    CHECK_FPU_FEATURE(dc, VIS1);
4232
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4233
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4234
                                    cpu_fpr[DFPREG(rs1) + 1]);
4235
                    break;
4236
                case 0x075: /* VIS I fsrc1s */
4237
                    CHECK_FPU_FEATURE(dc, VIS1);
4238
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4239
                    break;
4240
                case 0x076: /* VIS I fornot2 */
4241
                    CHECK_FPU_FEATURE(dc, VIS1);
4242
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4243
                                    cpu_fpr[DFPREG(rs2)]);
4244
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4245
                                    cpu_fpr[DFPREG(rs1) + 1],
4246
                                    cpu_fpr[DFPREG(rs2) + 1]);
4247
                    break;
4248
                case 0x077: /* VIS I fornot2s */
4249
                    CHECK_FPU_FEATURE(dc, VIS1);
4250
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4251
                    break;
4252
                case 0x078: /* VIS I fsrc2 */
4253
                    CHECK_FPU_FEATURE(dc, VIS1);
4254
                    gen_op_load_fpr_DT0(DFPREG(rs2));
4255
                    gen_op_store_DT0_fpr(DFPREG(rd));
4256
                    break;
4257
                case 0x079: /* VIS I fsrc2s */
4258
                    CHECK_FPU_FEATURE(dc, VIS1);
4259
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4260
                    break;
4261
                case 0x07a: /* VIS I fornot1 */
4262
                    CHECK_FPU_FEATURE(dc, VIS1);
4263
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4264
                                    cpu_fpr[DFPREG(rs1)]);
4265
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4266
                                    cpu_fpr[DFPREG(rs2) + 1],
4267
                                    cpu_fpr[DFPREG(rs1) + 1]);
4268
                    break;
4269
                case 0x07b: /* VIS I fornot1s */
4270
                    CHECK_FPU_FEATURE(dc, VIS1);
4271
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4272
                    break;
4273
                case 0x07c: /* VIS I for */
4274
                    CHECK_FPU_FEATURE(dc, VIS1);
4275
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4276
                                   cpu_fpr[DFPREG(rs2)]);
4277
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4278
                                   cpu_fpr[DFPREG(rs1) + 1],
4279
                                   cpu_fpr[DFPREG(rs2) + 1]);
4280
                    break;
4281
                case 0x07d: /* VIS I fors */
4282
                    CHECK_FPU_FEATURE(dc, VIS1);
4283
                    tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4284
                    break;
4285
                case 0x07e: /* VIS I fone */
4286
                    CHECK_FPU_FEATURE(dc, VIS1);
4287
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4288
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4289
                    break;
4290
                case 0x07f: /* VIS I fones */
4291
                    CHECK_FPU_FEATURE(dc, VIS1);
4292
                    tcg_gen_movi_i32(cpu_fpr[rd], -1);
4293
                    break;
4294
                case 0x080: /* VIS I shutdown */
4295
                case 0x081: /* VIS II siam */
4296
                    // XXX
4297
                    goto illegal_insn;
4298
                default:
4299
                    goto illegal_insn;
4300
                }
4301
#else
4302
                goto ncp_insn;
4303
#endif
4304
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4305
#ifdef TARGET_SPARC64
4306
                goto illegal_insn;
4307
#else
4308
                goto ncp_insn;
4309
#endif
4310
#ifdef TARGET_SPARC64
4311
            } else if (xop == 0x39) { /* V9 return */
4312
                TCGv_i32 r_const;
4313

    
4314
                save_state(dc, cpu_cond);
4315
                cpu_src1 = get_src1(insn, cpu_src1);
4316
                if (IS_IMM) {   /* immediate */
4317
                    simm = GET_FIELDs(insn, 19, 31);
4318
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4319
                } else {                /* register */
4320
                    rs2 = GET_FIELD(insn, 27, 31);
4321
                    if (rs2) {
4322
                        gen_movl_reg_TN(rs2, cpu_src2);
4323
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4324
                    } else
4325
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4326
                }
4327
                gen_helper_restore();
4328
                gen_mov_pc_npc(dc, cpu_cond);
4329
                r_const = tcg_const_i32(3);
4330
                gen_helper_check_align(cpu_dst, r_const);
4331
                tcg_temp_free_i32(r_const);
4332
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4333
                dc->npc = DYNAMIC_PC;
4334
                goto jmp_insn;
4335
#endif
4336
            } else {
4337
                cpu_src1 = get_src1(insn, cpu_src1);
4338
                if (IS_IMM) {   /* immediate */
4339
                    simm = GET_FIELDs(insn, 19, 31);
4340
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4341
                } else {                /* register */
4342
                    rs2 = GET_FIELD(insn, 27, 31);
4343
                    if (rs2) {
4344
                        gen_movl_reg_TN(rs2, cpu_src2);
4345
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4346
                    } else
4347
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4348
                }
4349
                switch (xop) {
4350
                case 0x38:      /* jmpl */
4351
                    {
4352
                        TCGv r_pc;
4353
                        TCGv_i32 r_const;
4354

    
4355
                        r_pc = tcg_const_tl(dc->pc);
4356
                        gen_movl_TN_reg(rd, r_pc);
4357
                        tcg_temp_free(r_pc);
4358
                        gen_mov_pc_npc(dc, cpu_cond);
4359
                        r_const = tcg_const_i32(3);
4360
                        gen_helper_check_align(cpu_dst, r_const);
4361
                        tcg_temp_free_i32(r_const);
4362
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4363
                        dc->npc = DYNAMIC_PC;
4364
                    }
4365
                    goto jmp_insn;
4366
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4367
                case 0x39:      /* rett, V9 return */
4368
                    {
4369
                        TCGv_i32 r_const;
4370

    
4371
                        if (!supervisor(dc))
4372
                            goto priv_insn;
4373
                        gen_mov_pc_npc(dc, cpu_cond);
4374
                        r_const = tcg_const_i32(3);
4375
                        gen_helper_check_align(cpu_dst, r_const);
4376
                        tcg_temp_free_i32(r_const);
4377
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4378
                        dc->npc = DYNAMIC_PC;
4379
                        gen_helper_rett();
4380
                    }
4381
                    goto jmp_insn;
4382
#endif
4383
                case 0x3b: /* flush */
4384
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4385
                        goto unimp_flush;
4386
                    gen_helper_flush(cpu_dst);
4387
                    break;
4388
                case 0x3c:      /* save */
4389
                    save_state(dc, cpu_cond);
4390
                    gen_helper_save();
4391
                    gen_movl_TN_reg(rd, cpu_dst);
4392
                    break;
4393
                case 0x3d:      /* restore */
4394
                    save_state(dc, cpu_cond);
4395
                    gen_helper_restore();
4396
                    gen_movl_TN_reg(rd, cpu_dst);
4397
                    break;
4398
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4399
                case 0x3e:      /* V9 done/retry */
4400
                    {
4401
                        switch (rd) {
4402
                        case 0:
4403
                            if (!supervisor(dc))
4404
                                goto priv_insn;
4405
                            dc->npc = DYNAMIC_PC;
4406
                            dc->pc = DYNAMIC_PC;
4407
                            gen_helper_done();
4408
                            goto jmp_insn;
4409
                        case 1:
4410
                            if (!supervisor(dc))
4411
                                goto priv_insn;
4412
                            dc->npc = DYNAMIC_PC;
4413
                            dc->pc = DYNAMIC_PC;
4414
                            gen_helper_retry();
4415
                            goto jmp_insn;
4416
                        default:
4417
                            goto illegal_insn;
4418
                        }
4419
                    }
4420
                    break;
4421
#endif
4422
                default:
4423
                    goto illegal_insn;
4424
                }
4425
            }
4426
            break;
4427
        }
4428
        break;
4429
    case 3:                     /* load/store instructions */
4430
        {
4431
            unsigned int xop = GET_FIELD(insn, 7, 12);
4432

    
4433
            cpu_src1 = get_src1(insn, cpu_src1);
4434
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4435
                rs2 = GET_FIELD(insn, 27, 31);
4436
                gen_movl_reg_TN(rs2, cpu_src2);
4437
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4438
            } else if (IS_IMM) {     /* immediate */
4439
                simm = GET_FIELDs(insn, 19, 31);
4440
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4441
            } else {            /* register */
4442
                rs2 = GET_FIELD(insn, 27, 31);
4443
                if (rs2 != 0) {
4444
                    gen_movl_reg_TN(rs2, cpu_src2);
4445
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4446
                } else
4447
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4448
            }
4449
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4450
                (xop > 0x17 && xop <= 0x1d ) ||
4451
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4452
                switch (xop) {
4453
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4454
                    gen_address_mask(dc, cpu_addr);
4455
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4456
                    break;
4457
                case 0x1:       /* ldub, load unsigned byte */
4458
                    gen_address_mask(dc, cpu_addr);
4459
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4460
                    break;
4461
                case 0x2:       /* lduh, load unsigned halfword */
4462
                    gen_address_mask(dc, cpu_addr);
4463
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4464
                    break;
4465
                case 0x3:       /* ldd, load double word */
4466
                    if (rd & 1)
4467
                        goto illegal_insn;
4468
                    else {
4469
                        TCGv_i32 r_const;
4470

    
4471
                        save_state(dc, cpu_cond);
4472
                        r_const = tcg_const_i32(7);
4473
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4474
                        tcg_temp_free_i32(r_const);
4475
                        gen_address_mask(dc, cpu_addr);
4476
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4477
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4478
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4479
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4480
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4481
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4482
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4483
                    }
4484
                    break;
4485
                case 0x9:       /* ldsb, load signed byte */
4486
                    gen_address_mask(dc, cpu_addr);
4487
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4488
                    break;
4489
                case 0xa:       /* ldsh, load signed halfword */
4490
                    gen_address_mask(dc, cpu_addr);
4491
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4492
                    break;
4493
                case 0xd:       /* ldstub -- XXX: should be atomically */
4494
                    {
4495
                        TCGv r_const;
4496

    
4497
                        gen_address_mask(dc, cpu_addr);
4498
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4499
                        r_const = tcg_const_tl(0xff);
4500
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4501
                        tcg_temp_free(r_const);
4502
                    }
4503
                    break;
4504
                case 0x0f:      /* swap, swap register with memory. Also
4505
                                   atomically */
4506
                    CHECK_IU_FEATURE(dc, SWAP);
4507
                    gen_movl_reg_TN(rd, cpu_val);
4508
                    gen_address_mask(dc, cpu_addr);
4509
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4510
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4511
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4512
                    break;
4513
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4514
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4515
#ifndef TARGET_SPARC64
4516
                    if (IS_IMM)
4517
                        goto illegal_insn;
4518
                    if (!supervisor(dc))
4519
                        goto priv_insn;
4520
#endif
4521
                    save_state(dc, cpu_cond);
4522
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4523
                    break;
4524
                case 0x11:      /* lduba, load unsigned byte alternate */
4525
#ifndef TARGET_SPARC64
4526
                    if (IS_IMM)
4527
                        goto illegal_insn;
4528
                    if (!supervisor(dc))
4529
                        goto priv_insn;
4530
#endif
4531
                    save_state(dc, cpu_cond);
4532
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4533
                    break;
4534
                case 0x12:      /* lduha, load unsigned halfword alternate */
4535
#ifndef TARGET_SPARC64
4536
                    if (IS_IMM)
4537
                        goto illegal_insn;
4538
                    if (!supervisor(dc))
4539
                        goto priv_insn;
4540
#endif
4541
                    save_state(dc, cpu_cond);
4542
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4543
                    break;
4544
                case 0x13:      /* ldda, load double word alternate */
4545
#ifndef TARGET_SPARC64
4546
                    if (IS_IMM)
4547
                        goto illegal_insn;
4548
                    if (!supervisor(dc))
4549
                        goto priv_insn;
4550
#endif
4551
                    if (rd & 1)
4552
                        goto illegal_insn;
4553
                    save_state(dc, cpu_cond);
4554
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4555
                    goto skip_move;
4556
                case 0x19:      /* ldsba, load signed byte alternate */
4557
#ifndef TARGET_SPARC64
4558
                    if (IS_IMM)
4559
                        goto illegal_insn;
4560
                    if (!supervisor(dc))
4561
                        goto priv_insn;
4562
#endif
4563
                    save_state(dc, cpu_cond);
4564
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4565
                    break;
4566
                case 0x1a:      /* ldsha, load signed halfword alternate */
4567
#ifndef TARGET_SPARC64
4568
                    if (IS_IMM)
4569
                        goto illegal_insn;
4570
                    if (!supervisor(dc))
4571
                        goto priv_insn;
4572
#endif
4573
                    save_state(dc, cpu_cond);
4574
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4575
                    break;
4576
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4577
#ifndef TARGET_SPARC64
4578
                    if (IS_IMM)
4579
                        goto illegal_insn;
4580
                    if (!supervisor(dc))
4581
                        goto priv_insn;
4582
#endif
4583
                    save_state(dc, cpu_cond);
4584
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4585
                    break;
4586
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4587
                                   atomically */
4588
                    CHECK_IU_FEATURE(dc, SWAP);
4589
#ifndef TARGET_SPARC64
4590
                    if (IS_IMM)
4591
                        goto illegal_insn;
4592
                    if (!supervisor(dc))
4593
                        goto priv_insn;
4594
#endif
4595
                    save_state(dc, cpu_cond);
4596
                    gen_movl_reg_TN(rd, cpu_val);
4597
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4598
                    break;
4599

    
4600
#ifndef TARGET_SPARC64
4601
                case 0x30: /* ldc */
4602
                case 0x31: /* ldcsr */
4603
                case 0x33: /* lddc */
4604
                    goto ncp_insn;
4605
#endif
4606
#endif
4607
#ifdef TARGET_SPARC64
4608
                case 0x08: /* V9 ldsw */
4609
                    gen_address_mask(dc, cpu_addr);
4610
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4611
                    break;
4612
                case 0x0b: /* V9 ldx */
4613
                    gen_address_mask(dc, cpu_addr);
4614
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4615
                    break;
4616
                case 0x18: /* V9 ldswa */
4617
                    save_state(dc, cpu_cond);
4618
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4619
                    break;
4620
                case 0x1b: /* V9 ldxa */
4621
                    save_state(dc, cpu_cond);
4622
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4623
                    break;
4624
                case 0x2d: /* V9 prefetch, no effect */
4625
                    goto skip_move;
4626
                case 0x30: /* V9 ldfa */
4627
                    save_state(dc, cpu_cond);
4628
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4629
                    goto skip_move;
4630
                case 0x33: /* V9 lddfa */
4631
                    save_state(dc, cpu_cond);
4632
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4633
                    goto skip_move;
4634
                case 0x3d: /* V9 prefetcha, no effect */
4635
                    goto skip_move;
4636
                case 0x32: /* V9 ldqfa */
4637
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4638
                    save_state(dc, cpu_cond);
4639
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4640
                    goto skip_move;
4641
#endif
4642
                default:
4643
                    goto illegal_insn;
4644
                }
4645
                gen_movl_TN_reg(rd, cpu_val);
4646
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4647
            skip_move: ;
4648
#endif
4649
            } else if (xop >= 0x20 && xop < 0x24) {
4650
                if (gen_trap_ifnofpu(dc, cpu_cond))
4651
                    goto jmp_insn;
4652
                save_state(dc, cpu_cond);
4653
                switch (xop) {
4654
                case 0x20:      /* ldf, load fpreg */
4655
                    gen_address_mask(dc, cpu_addr);
4656
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4657
                    tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4658
                    break;
4659
                case 0x21:      /* ldfsr, V9 ldxfsr */
4660
#ifdef TARGET_SPARC64
4661
                    gen_address_mask(dc, cpu_addr);
4662
                    if (rd == 1) {
4663
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4664
                        gen_helper_ldxfsr(cpu_tmp64);
4665
                    } else
4666
#else
4667
                    {
4668
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4669
                        gen_helper_ldfsr(cpu_tmp32);
4670
                    }
4671
#endif
4672
                    break;
4673
                case 0x22:      /* ldqf, load quad fpreg */
4674
                    {
4675
                        TCGv_i32 r_const;
4676

    
4677
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4678
                        r_const = tcg_const_i32(dc->mem_idx);
4679
                        gen_helper_ldqf(cpu_addr, r_const);
4680
                        tcg_temp_free_i32(r_const);
4681
                        gen_op_store_QT0_fpr(QFPREG(rd));
4682
                    }
4683
                    break;
4684
                case 0x23:      /* lddf, load double fpreg */
4685
                    {
4686
                        TCGv_i32 r_const;
4687

    
4688
                        r_const = tcg_const_i32(dc->mem_idx);
4689
                        gen_helper_lddf(cpu_addr, r_const);
4690
                        tcg_temp_free_i32(r_const);
4691
                        gen_op_store_DT0_fpr(DFPREG(rd));
4692
                    }
4693
                    break;
4694
                default:
4695
                    goto illegal_insn;
4696
                }
4697
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4698
                       xop == 0xe || xop == 0x1e) {
4699
                gen_movl_reg_TN(rd, cpu_val);
4700
                switch (xop) {
4701
                case 0x4: /* st, store word */
4702
                    gen_address_mask(dc, cpu_addr);
4703
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4704
                    break;
4705
                case 0x5: /* stb, store byte */
4706
                    gen_address_mask(dc, cpu_addr);
4707
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4708
                    break;
4709
                case 0x6: /* sth, store halfword */
4710
                    gen_address_mask(dc, cpu_addr);
4711
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4712
                    break;
4713
                case 0x7: /* std, store double word */
4714
                    if (rd & 1)
4715
                        goto illegal_insn;
4716
                    else {
4717
                        TCGv_i32 r_const;
4718

    
4719
                        save_state(dc, cpu_cond);
4720
                        gen_address_mask(dc, cpu_addr);
4721
                        r_const = tcg_const_i32(7);
4722
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4723
                        tcg_temp_free_i32(r_const);
4724
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4725
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4726
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4727
                    }
4728
                    break;
4729
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4730
                case 0x14: /* sta, V9 stwa, store word alternate */
4731
#ifndef TARGET_SPARC64
4732
                    if (IS_IMM)
4733
                        goto illegal_insn;
4734
                    if (!supervisor(dc))
4735
                        goto priv_insn;
4736
#endif
4737
                    save_state(dc, cpu_cond);
4738
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4739
                    break;
4740
                case 0x15: /* stba, store byte alternate */
4741
#ifndef TARGET_SPARC64
4742
                    if (IS_IMM)
4743
                        goto illegal_insn;
4744
                    if (!supervisor(dc))
4745
                        goto priv_insn;
4746
#endif
4747
                    save_state(dc, cpu_cond);
4748
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4749
                    break;
4750
                case 0x16: /* stha, store halfword alternate */
4751
#ifndef TARGET_SPARC64
4752
                    if (IS_IMM)
4753
                        goto illegal_insn;
4754
                    if (!supervisor(dc))
4755
                        goto priv_insn;
4756
#endif
4757
                    save_state(dc, cpu_cond);
4758
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4759
                    break;
4760
                case 0x17: /* stda, store double word alternate */
4761
#ifndef TARGET_SPARC64
4762
                    if (IS_IMM)
4763
                        goto illegal_insn;
4764
                    if (!supervisor(dc))
4765
                        goto priv_insn;
4766
#endif
4767
                    if (rd & 1)
4768
                        goto illegal_insn;
4769
                    else {
4770
                        save_state(dc, cpu_cond);
4771
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4772
                    }
4773
                    break;
4774
#endif
4775
#ifdef TARGET_SPARC64
4776
                case 0x0e: /* V9 stx */
4777
                    gen_address_mask(dc, cpu_addr);
4778
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4779
                    break;
4780
                case 0x1e: /* V9 stxa */
4781
                    save_state(dc, cpu_cond);
4782
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4783
                    break;
4784
#endif
4785
                default:
4786
                    goto illegal_insn;
4787
                }
4788
            } else if (xop > 0x23 && xop < 0x28) {
4789
                if (gen_trap_ifnofpu(dc, cpu_cond))
4790
                    goto jmp_insn;
4791
                save_state(dc, cpu_cond);
4792
                switch (xop) {
4793
                case 0x24: /* stf, store fpreg */
4794
                    gen_address_mask(dc, cpu_addr);
4795
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4796
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4797
                    break;
4798
                case 0x25: /* stfsr, V9 stxfsr */
4799
#ifdef TARGET_SPARC64
4800
                    gen_address_mask(dc, cpu_addr);
4801
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4802
                    if (rd == 1)
4803
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4804
                    else
4805
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4806
#else
4807
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4808
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4809
#endif
4810
                    break;
4811
                case 0x26:
4812
#ifdef TARGET_SPARC64
4813
                    /* V9 stqf, store quad fpreg */
4814
                    {
4815
                        TCGv_i32 r_const;
4816

    
4817
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4818
                        gen_op_load_fpr_QT0(QFPREG(rd));
4819
                        r_const = tcg_const_i32(dc->mem_idx);
4820
                        gen_helper_stqf(cpu_addr, r_const);
4821
                        tcg_temp_free_i32(r_const);
4822
                    }
4823
                    break;
4824
#else /* !TARGET_SPARC64 */
4825
                    /* stdfq, store floating point queue */
4826
#if defined(CONFIG_USER_ONLY)
4827
                    goto illegal_insn;
4828
#else
4829
                    if (!supervisor(dc))
4830
                        goto priv_insn;
4831
                    if (gen_trap_ifnofpu(dc, cpu_cond))
4832
                        goto jmp_insn;
4833
                    goto nfq_insn;
4834
#endif
4835
#endif
4836
                case 0x27: /* stdf, store double fpreg */
4837
                    {
4838
                        TCGv_i32 r_const;
4839

    
4840
                        gen_op_load_fpr_DT0(DFPREG(rd));
4841
                        r_const = tcg_const_i32(dc->mem_idx);
4842
                        gen_helper_stdf(cpu_addr, r_const);
4843
                        tcg_temp_free_i32(r_const);
4844
                    }
4845
                    break;
4846
                default:
4847
                    goto illegal_insn;
4848
                }
4849
            } else if (xop > 0x33 && xop < 0x3f) {
4850
                save_state(dc, cpu_cond);
4851
                switch (xop) {
4852
#ifdef TARGET_SPARC64
4853
                case 0x34: /* V9 stfa */
4854
                    gen_stf_asi(cpu_addr, insn, 4, rd);
4855
                    break;
4856
                case 0x36: /* V9 stqfa */
4857
                    {
4858
                        TCGv_i32 r_const;
4859

    
4860
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4861
                        r_const = tcg_const_i32(7);
4862
                        gen_helper_check_align(cpu_addr, r_const);
4863
                        tcg_temp_free_i32(r_const);
4864
                        gen_op_load_fpr_QT0(QFPREG(rd));
4865
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4866
                    }
4867
                    break;
4868
                case 0x37: /* V9 stdfa */
4869
                    gen_op_load_fpr_DT0(DFPREG(rd));
4870
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4871
                    break;
4872
                case 0x3c: /* V9 casa */
4873
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4874
                    gen_movl_TN_reg(rd, cpu_val);
4875
                    break;
4876
                case 0x3e: /* V9 casxa */
4877
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4878
                    gen_movl_TN_reg(rd, cpu_val);
4879
                    break;
4880
#else
4881
                case 0x34: /* stc */
4882
                case 0x35: /* stcsr */
4883
                case 0x36: /* stdcq */
4884
                case 0x37: /* stdc */
4885
                    goto ncp_insn;
4886
#endif
4887
                default:
4888
                    goto illegal_insn;
4889
                }
4890
            } else
4891
                goto illegal_insn;
4892
        }
4893
        break;
4894
    }
4895
    /* default case for non jump instructions */
4896
    if (dc->npc == DYNAMIC_PC) {
4897
        dc->pc = DYNAMIC_PC;
4898
        gen_op_next_insn();
4899
    } else if (dc->npc == JUMP_PC) {
4900
        /* we can do a static jump */
4901
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4902
        dc->is_br = 1;
4903
    } else {
4904
        dc->pc = dc->npc;
4905
        dc->npc = dc->npc + 4;
4906
    }
4907
 jmp_insn:
4908
    return;
4909
 illegal_insn:
4910
    {
4911
        TCGv_i32 r_const;
4912

    
4913
        save_state(dc, cpu_cond);
4914
        r_const = tcg_const_i32(TT_ILL_INSN);
4915
        gen_helper_raise_exception(r_const);
4916
        tcg_temp_free_i32(r_const);
4917
        dc->is_br = 1;
4918
    }
4919
    return;
4920
 unimp_flush:
4921
    {
4922
        TCGv_i32 r_const;
4923

    
4924
        save_state(dc, cpu_cond);
4925
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4926
        gen_helper_raise_exception(r_const);
4927
        tcg_temp_free_i32(r_const);
4928
        dc->is_br = 1;
4929
    }
4930
    return;
4931
#if !defined(CONFIG_USER_ONLY)
4932
 priv_insn:
4933
    {
4934
        TCGv_i32 r_const;
4935

    
4936
        save_state(dc, cpu_cond);
4937
        r_const = tcg_const_i32(TT_PRIV_INSN);
4938
        gen_helper_raise_exception(r_const);
4939
        tcg_temp_free_i32(r_const);
4940
        dc->is_br = 1;
4941
    }
4942
    return;
4943
#endif
4944
 nfpu_insn:
4945
    save_state(dc, cpu_cond);
4946
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4947
    dc->is_br = 1;
4948
    return;
4949
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4950
 nfq_insn:
4951
    save_state(dc, cpu_cond);
4952
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4953
    dc->is_br = 1;
4954
    return;
4955
#endif
4956
#ifndef TARGET_SPARC64
4957
 ncp_insn:
4958
    {
4959
        TCGv r_const;
4960

    
4961
        save_state(dc, cpu_cond);
4962
        r_const = tcg_const_i32(TT_NCP_INSN);
4963
        gen_helper_raise_exception(r_const);
4964
        tcg_temp_free(r_const);
4965
        dc->is_br = 1;
4966
    }
4967
    return;
4968
#endif
4969
}
4970

    
4971
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4972
                                                  int spc, CPUSPARCState *env)
4973
{
4974
    target_ulong pc_start, last_pc;
4975
    uint16_t *gen_opc_end;
4976
    DisasContext dc1, *dc = &dc1;
4977
    CPUBreakpoint *bp;
4978
    int j, lj = -1;
4979
    int num_insns;
4980
    int max_insns;
4981

    
4982
    memset(dc, 0, sizeof(DisasContext));
4983
    dc->tb = tb;
4984
    pc_start = tb->pc;
4985
    dc->pc = pc_start;
4986
    last_pc = dc->pc;
4987
    dc->npc = (target_ulong) tb->cs_base;
4988
    dc->cc_op = CC_OP_DYNAMIC;
4989
    dc->mem_idx = cpu_mmu_index(env);
4990
    dc->def = env->def;
4991
    if ((dc->def->features & CPU_FEATURE_FLOAT))
4992
        dc->fpu_enabled = cpu_fpu_enabled(env);
4993
    else
4994
        dc->fpu_enabled = 0;
4995
#ifdef TARGET_SPARC64
4996
    dc->address_mask_32bit = env->pstate & PS_AM;
4997
#endif
4998
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4999

    
5000
    cpu_tmp0 = tcg_temp_new();
5001
    cpu_tmp32 = tcg_temp_new_i32();
5002
    cpu_tmp64 = tcg_temp_new_i64();
5003

    
5004
    cpu_dst = tcg_temp_local_new();
5005

    
5006
    // loads and stores
5007
    cpu_val = tcg_temp_local_new();
5008
    cpu_addr = tcg_temp_local_new();
5009

    
5010
    num_insns = 0;
5011
    max_insns = tb->cflags & CF_COUNT_MASK;
5012
    if (max_insns == 0)
5013
        max_insns = CF_COUNT_MASK;
5014
    gen_icount_start();
5015
    do {
5016
        if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
5017
            TAILQ_FOREACH(bp, &env->breakpoints, entry) {
5018
                if (bp->pc == dc->pc) {
5019
                    if (dc->pc != pc_start)
5020
                        save_state(dc, cpu_cond);
5021
                    gen_helper_debug();
5022
                    tcg_gen_exit_tb(0);
5023
                    dc->is_br = 1;
5024
                    goto exit_gen_loop;
5025
                }
5026
            }
5027
        }
5028
        if (spc) {
5029
            qemu_log("Search PC...\n");
5030
            j = gen_opc_ptr - gen_opc_buf;
5031
            if (lj < j) {
5032
                lj++;
5033
                while (lj < j)
5034
                    gen_opc_instr_start[lj++] = 0;
5035
                gen_opc_pc[lj] = dc->pc;
5036
                gen_opc_npc[lj] = dc->npc;
5037
                gen_opc_instr_start[lj] = 1;
5038
                gen_opc_icount[lj] = num_insns;
5039
            }
5040
        }
5041
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5042
            gen_io_start();
5043
        last_pc = dc->pc;
5044
        disas_sparc_insn(dc);
5045
        num_insns++;
5046

    
5047
        if (dc->is_br)
5048
            break;
5049
        /* if the next PC is different, we abort now */
5050
        if (dc->pc != (last_pc + 4))
5051
            break;
5052
        /* if we reach a page boundary, we stop generation so that the
5053
           PC of a TT_TFAULT exception is always in the right page */
5054
        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5055
            break;
5056
        /* if single step mode, we generate only one instruction and
5057
           generate an exception */
5058
        if (env->singlestep_enabled || singlestep) {
5059
            tcg_gen_movi_tl(cpu_pc, dc->pc);
5060
            tcg_gen_exit_tb(0);
5061
            break;
5062
        }
5063
    } while ((gen_opc_ptr < gen_opc_end) &&
5064
             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5065
             num_insns < max_insns);
5066

    
5067
 exit_gen_loop:
5068
    tcg_temp_free(cpu_addr);
5069
    tcg_temp_free(cpu_val);
5070
    tcg_temp_free(cpu_dst);
5071
    tcg_temp_free_i64(cpu_tmp64);
5072
    tcg_temp_free_i32(cpu_tmp32);
5073
    tcg_temp_free(cpu_tmp0);
5074
    if (tb->cflags & CF_LAST_IO)
5075
        gen_io_end();
5076
    if (!dc->is_br) {
5077
        if (dc->pc != DYNAMIC_PC &&
5078
            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5079
            /* static PC and NPC: we can use direct chaining */
5080
            gen_goto_tb(dc, 0, dc->pc, dc->npc);
5081
        } else {
5082
            if (dc->pc != DYNAMIC_PC)
5083
                tcg_gen_movi_tl(cpu_pc, dc->pc);
5084
            save_npc(dc, cpu_cond);
5085
            tcg_gen_exit_tb(0);
5086
        }
5087
    }
5088
    gen_icount_end(tb, num_insns);
5089
    *gen_opc_ptr = INDEX_op_end;
5090
    if (spc) {
5091
        j = gen_opc_ptr - gen_opc_buf;
5092
        lj++;
5093
        while (lj <= j)
5094
            gen_opc_instr_start[lj++] = 0;
5095
#if 0
5096
        log_page_dump();
5097
#endif
5098
        gen_opc_jump_pc[0] = dc->jump_pc[0];
5099
        gen_opc_jump_pc[1] = dc->jump_pc[1];
5100
    } else {
5101
        tb->size = last_pc + 4 - pc_start;
5102
        tb->icount = num_insns;
5103
    }
5104
#ifdef DEBUG_DISAS
5105
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5106
        qemu_log("--------------\n");
5107
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
5108
        log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5109
        qemu_log("\n");
5110
    }
5111
#endif
5112
}
5113

    
5114
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5115
{
5116
    gen_intermediate_code_internal(tb, 0, env);
5117
}
5118

    
5119
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5120
{
5121
    gen_intermediate_code_internal(tb, 1, env);
5122
}
5123

    
5124
void gen_intermediate_code_init(CPUSPARCState *env)
5125
{
5126
    unsigned int i;
5127
    static int inited;
5128
    static const char * const gregnames[8] = {
5129
        NULL, // g0 not used
5130
        "g1",
5131
        "g2",
5132
        "g3",
5133
        "g4",
5134
        "g5",
5135
        "g6",
5136
        "g7",
5137
    };
5138
    static const char * const fregnames[64] = {
5139
        "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5140
        "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5141
        "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5142
        "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5143
        "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5144
        "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5145
        "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5146
        "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5147
    };
5148

    
5149
    /* init various static tables */
5150
    if (!inited) {
5151
        inited = 1;
5152

    
5153
        cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5154
        cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5155
                                             offsetof(CPUState, regwptr),
5156
                                             "regwptr");
5157
#ifdef TARGET_SPARC64
5158
        cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5159
                                         "xcc");
5160
        cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5161
                                         "asi");
5162
        cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5163
                                          "fprs");
5164
        cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5165
                                     "gsr");
5166
        cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5167
                                           offsetof(CPUState, tick_cmpr),
5168
                                           "tick_cmpr");
5169
        cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5170
                                            offsetof(CPUState, stick_cmpr),
5171
                                            "stick_cmpr");
5172
        cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5173
                                             offsetof(CPUState, hstick_cmpr),
5174
                                             "hstick_cmpr");
5175
        cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5176
                                       "hintp");
5177
        cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5178
                                      "htba");
5179
        cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5180
                                      "hver");
5181
        cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5182
                                     offsetof(CPUState, ssr), "ssr");
5183
        cpu_ver = tcg_global_mem_new(TCG_AREG0,
5184
                                     offsetof(CPUState, version), "ver");
5185
        cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5186
                                             offsetof(CPUState, softint),
5187
                                             "softint");
5188
#else
5189
        cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5190
                                     "wim");
5191
#endif
5192
        cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5193
                                      "cond");
5194
        cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5195
                                        "cc_src");
5196
        cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5197
                                         offsetof(CPUState, cc_src2),
5198
                                         "cc_src2");
5199
        cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5200
                                        "cc_dst");
5201
        cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5202
                                           "cc_op");
5203
        cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5204
                                         "psr");
5205
        cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5206
                                     "fsr");
5207
        cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5208
                                    "pc");
5209
        cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5210
                                     "npc");
5211
        cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5212
#ifndef CONFIG_USER_ONLY
5213
        cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5214
                                     "tbr");
5215
#endif
5216
        for (i = 1; i < 8; i++)
5217
            cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5218
                                              offsetof(CPUState, gregs[i]),
5219
                                              gregnames[i]);
5220
        for (i = 0; i < TARGET_FPREGS; i++)
5221
            cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5222
                                                offsetof(CPUState, fpr[i]),
5223
                                                fregnames[i]);
5224

    
5225
        /* register helpers */
5226

    
5227
#define GEN_HELPER 2
5228
#include "helper.h"
5229
    }
5230
}
5231

    
5232
void gen_pc_load(CPUState *env, TranslationBlock *tb,
5233
                unsigned long searched_pc, int pc_pos, void *puc)
5234
{
5235
    target_ulong npc;
5236
    env->pc = gen_opc_pc[pc_pos];
5237
    npc = gen_opc_npc[pc_pos];
5238
    if (npc == 1) {
5239
        /* dynamic NPC: already stored */
5240
    } else if (npc == 2) {
5241
        target_ulong t2 = (target_ulong)(unsigned long)puc;
5242
        /* jump PC: use T2 and the jump targets of the translation */
5243
        if (t2)
5244
            env->npc = gen_opc_jump_pc[0];
5245
        else
5246
            env->npc = gen_opc_jump_pc[1];
5247
    } else {
5248
        env->npc = npc;
5249
    }
5250
}