Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ bc57c114

History | View | Annotate | Download (187.6 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20

    
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26

    
27
#include "cpu.h"
28
#include "exec-all.h"
29
#include "disas.h"
30
#include "helper.h"
31
#include "tcg-op.h"
32

    
33
#define GEN_HELPER 1
34
#include "helper.h"
35

    
36
#define DEBUG_DISAS
37

    
38
#define DYNAMIC_PC  1 /* dynamic pc value */
39
#define JUMP_PC     2 /* dynamic pc value which takes only two values
40
                         according to jump_pc[T2] */
41

    
42
/* global register indexes */
43
static TCGv_ptr cpu_env, cpu_regwptr;
44
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45
static TCGv_i32 cpu_cc_op;
46
static TCGv_i32 cpu_psr;
47
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48
static TCGv cpu_y;
49
#ifndef CONFIG_USER_ONLY
50
static TCGv cpu_tbr;
51
#endif
52
static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53
#ifdef TARGET_SPARC64
54
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55
static TCGv cpu_gsr;
56
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58
static TCGv_i32 cpu_softint;
59
#else
60
static TCGv cpu_wim;
61
#endif
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0;
64
static TCGv_i32 cpu_tmp32;
65
static TCGv_i64 cpu_tmp64;
66
/* Floating point registers */
67
static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68

    
69
#include "gen-icount.h"
70

    
71
typedef struct DisasContext {
72
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
73
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75
    int is_br;
76
    int mem_idx;
77
    int fpu_enabled;
78
    int address_mask_32bit;
79
    uint32_t cc_op;  /* current CC operation */
80
    struct TranslationBlock *tb;
81
    sparc_def_t *def;
82
} DisasContext;
83

    
84
// This function uses non-native bit order
85
#define GET_FIELD(X, FROM, TO)                                  \
86
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87

    
88
// This function uses the order in the manuals, i.e. bit 0 is 2^0
89
#define GET_FIELD_SP(X, FROM, TO)               \
90
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
91

    
92
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94

    
95
#ifdef TARGET_SPARC64
96
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98
#else
99
#define DFPREG(r) (r & 0x1e)
100
#define QFPREG(r) (r & 0x1c)
101
#endif
102

    
103
#define UA2005_HTRAP_MASK 0xff
104
#define V8_TRAP_MASK 0x7f
105

    
106
static int sign_extend(int x, int len)
107
{
108
    len = 32 - len;
109
    return (x << len) >> len;
110
}
111

    
112
#define IS_IMM (insn & (1<<13))
113

    
114
/* floating point registers moves */
115
static void gen_op_load_fpr_DT0(unsigned int src)
116
{
117
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118
                   offsetof(CPU_DoubleU, l.upper));
119
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120
                   offsetof(CPU_DoubleU, l.lower));
121
}
122

    
123
static void gen_op_load_fpr_DT1(unsigned int src)
124
{
125
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126
                   offsetof(CPU_DoubleU, l.upper));
127
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128
                   offsetof(CPU_DoubleU, l.lower));
129
}
130

    
131
static void gen_op_store_DT0_fpr(unsigned int dst)
132
{
133
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134
                   offsetof(CPU_DoubleU, l.upper));
135
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136
                   offsetof(CPU_DoubleU, l.lower));
137
}
138

    
139
static void gen_op_load_fpr_QT0(unsigned int src)
140
{
141
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142
                   offsetof(CPU_QuadU, l.upmost));
143
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144
                   offsetof(CPU_QuadU, l.upper));
145
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146
                   offsetof(CPU_QuadU, l.lower));
147
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148
                   offsetof(CPU_QuadU, l.lowest));
149
}
150

    
151
static void gen_op_load_fpr_QT1(unsigned int src)
152
{
153
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154
                   offsetof(CPU_QuadU, l.upmost));
155
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156
                   offsetof(CPU_QuadU, l.upper));
157
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158
                   offsetof(CPU_QuadU, l.lower));
159
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160
                   offsetof(CPU_QuadU, l.lowest));
161
}
162

    
163
static void gen_op_store_QT0_fpr(unsigned int dst)
164
{
165
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166
                   offsetof(CPU_QuadU, l.upmost));
167
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168
                   offsetof(CPU_QuadU, l.upper));
169
    tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170
                   offsetof(CPU_QuadU, l.lower));
171
    tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172
                   offsetof(CPU_QuadU, l.lowest));
173
}
174

    
175
/* moves */
176
#ifdef CONFIG_USER_ONLY
177
#define supervisor(dc) 0
178
#ifdef TARGET_SPARC64
179
#define hypervisor(dc) 0
180
#endif
181
#else
182
#define supervisor(dc) (dc->mem_idx >= 1)
183
#ifdef TARGET_SPARC64
184
#define hypervisor(dc) (dc->mem_idx == 2)
185
#else
186
#endif
187
#endif
188

    
189
#ifdef TARGET_SPARC64
190
#ifndef TARGET_ABI32
191
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
192
#else
193
#define AM_CHECK(dc) (1)
194
#endif
195
#endif
196

    
197
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
198
{
199
#ifdef TARGET_SPARC64
200
    if (AM_CHECK(dc))
201
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202
#endif
203
}
204

    
205
static inline void gen_movl_reg_TN(int reg, TCGv tn)
206
{
207
    if (reg == 0)
208
        tcg_gen_movi_tl(tn, 0);
209
    else if (reg < 8)
210
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211
    else {
212
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
213
    }
214
}
215

    
216
static inline void gen_movl_TN_reg(int reg, TCGv tn)
217
{
218
    if (reg == 0)
219
        return;
220
    else if (reg < 8)
221
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
222
    else {
223
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
224
    }
225
}
226

    
227
static inline void gen_goto_tb(DisasContext *s, int tb_num,
228
                               target_ulong pc, target_ulong npc)
229
{
230
    TranslationBlock *tb;
231

    
232
    tb = s->tb;
233
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK))  {
235
        /* jump to same page: we can use a direct jump */
236
        tcg_gen_goto_tb(tb_num);
237
        tcg_gen_movi_tl(cpu_pc, pc);
238
        tcg_gen_movi_tl(cpu_npc, npc);
239
        tcg_gen_exit_tb((long)tb + tb_num);
240
    } else {
241
        /* jump to another page: currently not optimized */
242
        tcg_gen_movi_tl(cpu_pc, pc);
243
        tcg_gen_movi_tl(cpu_npc, npc);
244
        tcg_gen_exit_tb(0);
245
    }
246
}
247

    
248
// XXX suboptimal
249
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
250
{
251
    tcg_gen_extu_i32_tl(reg, src);
252
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253
    tcg_gen_andi_tl(reg, reg, 0x1);
254
}
255

    
256
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
257
{
258
    tcg_gen_extu_i32_tl(reg, src);
259
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260
    tcg_gen_andi_tl(reg, reg, 0x1);
261
}
262

    
263
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
264
{
265
    tcg_gen_extu_i32_tl(reg, src);
266
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267
    tcg_gen_andi_tl(reg, reg, 0x1);
268
}
269

    
270
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
271
{
272
    tcg_gen_extu_i32_tl(reg, src);
273
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274
    tcg_gen_andi_tl(reg, reg, 0x1);
275
}
276

    
277
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
278
{
279
    TCGv r_temp;
280
    TCGv_i32 r_const;
281
    int l1;
282

    
283
    l1 = gen_new_label();
284

    
285
    r_temp = tcg_temp_new();
286
    tcg_gen_xor_tl(r_temp, src1, src2);
287
    tcg_gen_not_tl(r_temp, r_temp);
288
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
289
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
290
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
291
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
292
    r_const = tcg_const_i32(TT_TOVF);
293
    gen_helper_raise_exception(r_const);
294
    tcg_temp_free_i32(r_const);
295
    gen_set_label(l1);
296
    tcg_temp_free(r_temp);
297
}
298

    
299
static inline void gen_tag_tv(TCGv src1, TCGv src2)
300
{
301
    int l1;
302
    TCGv_i32 r_const;
303

    
304
    l1 = gen_new_label();
305
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
306
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
307
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
308
    r_const = tcg_const_i32(TT_TOVF);
309
    gen_helper_raise_exception(r_const);
310
    tcg_temp_free_i32(r_const);
311
    gen_set_label(l1);
312
}
313

    
314
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
315
{
316
    tcg_gen_mov_tl(cpu_cc_src, src1);
317
    tcg_gen_movi_tl(cpu_cc_src2, src2);
318
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
319
    tcg_gen_mov_tl(dst, cpu_cc_dst);
320
}
321

    
322
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
323
{
324
    tcg_gen_mov_tl(cpu_cc_src, src1);
325
    tcg_gen_mov_tl(cpu_cc_src2, src2);
326
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
327
    tcg_gen_mov_tl(dst, cpu_cc_dst);
328
}
329

    
330
static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
331
{
332
    tcg_gen_mov_tl(cpu_cc_src, src1);
333
    tcg_gen_movi_tl(cpu_cc_src2, src2);
334
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
335
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
336
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
337
    tcg_gen_mov_tl(dst, cpu_cc_dst);
338
}
339

    
340
static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
341
{
342
    tcg_gen_mov_tl(cpu_cc_src, src1);
343
    tcg_gen_mov_tl(cpu_cc_src2, src2);
344
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
345
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
346
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
347
    tcg_gen_mov_tl(dst, cpu_cc_dst);
348
}
349

    
350
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
351
{
352
    tcg_gen_mov_tl(cpu_cc_src, src1);
353
    tcg_gen_mov_tl(cpu_cc_src2, src2);
354
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
355
    tcg_gen_mov_tl(dst, cpu_cc_dst);
356
}
357

    
358
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
359
{
360
    tcg_gen_mov_tl(cpu_cc_src, src1);
361
    tcg_gen_mov_tl(cpu_cc_src2, src2);
362
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
363
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
364
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
365
    tcg_gen_mov_tl(dst, cpu_cc_dst);
366
}
367

    
368
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
369
{
370
    TCGv r_temp;
371
    TCGv_i32 r_const;
372
    int l1;
373

    
374
    l1 = gen_new_label();
375

    
376
    r_temp = tcg_temp_new();
377
    tcg_gen_xor_tl(r_temp, src1, src2);
378
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
379
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
380
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
381
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
382
    r_const = tcg_const_i32(TT_TOVF);
383
    gen_helper_raise_exception(r_const);
384
    tcg_temp_free_i32(r_const);
385
    gen_set_label(l1);
386
    tcg_temp_free(r_temp);
387
}
388

    
389
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
390
{
391
    tcg_gen_mov_tl(cpu_cc_src, src1);
392
    tcg_gen_movi_tl(cpu_cc_src2, src2);
393
    if (src2 == 0) {
394
        tcg_gen_mov_tl(cpu_cc_dst, src1);
395
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
396
        dc->cc_op = CC_OP_LOGIC;
397
    } else {
398
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
399
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
400
        dc->cc_op = CC_OP_SUB;
401
    }
402
    tcg_gen_mov_tl(dst, cpu_cc_dst);
403
}
404

    
405
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
406
{
407
    tcg_gen_mov_tl(cpu_cc_src, src1);
408
    tcg_gen_mov_tl(cpu_cc_src2, src2);
409
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
410
    tcg_gen_mov_tl(dst, cpu_cc_dst);
411
}
412

    
413
static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
414
{
415
    tcg_gen_mov_tl(cpu_cc_src, src1);
416
    tcg_gen_movi_tl(cpu_cc_src2, src2);
417
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
418
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
419
    tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
420
    tcg_gen_mov_tl(dst, cpu_cc_dst);
421
}
422

    
423
static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
424
{
425
    tcg_gen_mov_tl(cpu_cc_src, src1);
426
    tcg_gen_mov_tl(cpu_cc_src2, src2);
427
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
428
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
429
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
430
    tcg_gen_mov_tl(dst, cpu_cc_dst);
431
}
432

    
433
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
434
{
435
    tcg_gen_mov_tl(cpu_cc_src, src1);
436
    tcg_gen_mov_tl(cpu_cc_src2, src2);
437
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
438
    tcg_gen_mov_tl(dst, cpu_cc_dst);
439
}
440

    
441
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
442
{
443
    tcg_gen_mov_tl(cpu_cc_src, src1);
444
    tcg_gen_mov_tl(cpu_cc_src2, src2);
445
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
446
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
447
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
448
    tcg_gen_mov_tl(dst, cpu_cc_dst);
449
}
450

    
451
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
452
{
453
    TCGv r_temp;
454
    int l1;
455

    
456
    l1 = gen_new_label();
457
    r_temp = tcg_temp_new();
458

    
459
    /* old op:
460
    if (!(env->y & 1))
461
        T1 = 0;
462
    */
463
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
464
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
465
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
466
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
467
    tcg_gen_movi_tl(cpu_cc_src2, 0);
468
    gen_set_label(l1);
469

    
470
    // b2 = T0 & 1;
471
    // env->y = (b2 << 31) | (env->y >> 1);
472
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
473
    tcg_gen_shli_tl(r_temp, r_temp, 31);
474
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
475
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
476
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
477
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
478

    
479
    // b1 = N ^ V;
480
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
481
    gen_mov_reg_V(r_temp, cpu_psr);
482
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
483
    tcg_temp_free(r_temp);
484

    
485
    // T0 = (b1 << 31) | (T0 >> 1);
486
    // src1 = T0;
487
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
488
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
489
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
490

    
491
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
492

    
493
    tcg_gen_mov_tl(dst, cpu_cc_dst);
494
}
495

    
496
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
497
{
498
    TCGv_i64 r_temp, r_temp2;
499

    
500
    r_temp = tcg_temp_new_i64();
501
    r_temp2 = tcg_temp_new_i64();
502

    
503
    tcg_gen_extu_tl_i64(r_temp, src2);
504
    tcg_gen_extu_tl_i64(r_temp2, src1);
505
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
506

    
507
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
508
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
509
    tcg_temp_free_i64(r_temp);
510
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
511
#ifdef TARGET_SPARC64
512
    tcg_gen_mov_i64(dst, r_temp2);
513
#else
514
    tcg_gen_trunc_i64_tl(dst, r_temp2);
515
#endif
516
    tcg_temp_free_i64(r_temp2);
517
}
518

    
519
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
520
{
521
    TCGv_i64 r_temp, r_temp2;
522

    
523
    r_temp = tcg_temp_new_i64();
524
    r_temp2 = tcg_temp_new_i64();
525

    
526
    tcg_gen_ext_tl_i64(r_temp, src2);
527
    tcg_gen_ext_tl_i64(r_temp2, src1);
528
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
529

    
530
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
531
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
532
    tcg_temp_free_i64(r_temp);
533
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
534
#ifdef TARGET_SPARC64
535
    tcg_gen_mov_i64(dst, r_temp2);
536
#else
537
    tcg_gen_trunc_i64_tl(dst, r_temp2);
538
#endif
539
    tcg_temp_free_i64(r_temp2);
540
}
541

    
542
#ifdef TARGET_SPARC64
543
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
544
{
545
    TCGv_i32 r_const;
546
    int l1;
547

    
548
    l1 = gen_new_label();
549
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
550
    r_const = tcg_const_i32(TT_DIV_ZERO);
551
    gen_helper_raise_exception(r_const);
552
    tcg_temp_free_i32(r_const);
553
    gen_set_label(l1);
554
}
555

    
556
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
557
{
558
    int l1, l2;
559

    
560
    l1 = gen_new_label();
561
    l2 = gen_new_label();
562
    tcg_gen_mov_tl(cpu_cc_src, src1);
563
    tcg_gen_mov_tl(cpu_cc_src2, src2);
564
    gen_trap_ifdivzero_tl(cpu_cc_src2);
565
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
566
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
567
    tcg_gen_movi_i64(dst, INT64_MIN);
568
    tcg_gen_br(l2);
569
    gen_set_label(l1);
570
    tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
571
    gen_set_label(l2);
572
}
573
#endif
574

    
575
// 1
576
static inline void gen_op_eval_ba(TCGv dst)
577
{
578
    tcg_gen_movi_tl(dst, 1);
579
}
580

    
581
// Z
582
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
583
{
584
    gen_mov_reg_Z(dst, src);
585
}
586

    
587
// Z | (N ^ V)
588
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
589
{
590
    gen_mov_reg_N(cpu_tmp0, src);
591
    gen_mov_reg_V(dst, src);
592
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
593
    gen_mov_reg_Z(cpu_tmp0, src);
594
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
595
}
596

    
597
// N ^ V
598
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
599
{
600
    gen_mov_reg_V(cpu_tmp0, src);
601
    gen_mov_reg_N(dst, src);
602
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
603
}
604

    
605
// C | Z
606
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
607
{
608
    gen_mov_reg_Z(cpu_tmp0, src);
609
    gen_mov_reg_C(dst, src);
610
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
611
}
612

    
613
// C
614
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
615
{
616
    gen_mov_reg_C(dst, src);
617
}
618

    
619
// V
620
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
621
{
622
    gen_mov_reg_V(dst, src);
623
}
624

    
625
// 0
626
static inline void gen_op_eval_bn(TCGv dst)
627
{
628
    tcg_gen_movi_tl(dst, 0);
629
}
630

    
631
// N
632
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
633
{
634
    gen_mov_reg_N(dst, src);
635
}
636

    
637
// !Z
638
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
639
{
640
    gen_mov_reg_Z(dst, src);
641
    tcg_gen_xori_tl(dst, dst, 0x1);
642
}
643

    
644
// !(Z | (N ^ V))
645
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
646
{
647
    gen_mov_reg_N(cpu_tmp0, src);
648
    gen_mov_reg_V(dst, src);
649
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
650
    gen_mov_reg_Z(cpu_tmp0, src);
651
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
652
    tcg_gen_xori_tl(dst, dst, 0x1);
653
}
654

    
655
// !(N ^ V)
656
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
657
{
658
    gen_mov_reg_V(cpu_tmp0, src);
659
    gen_mov_reg_N(dst, src);
660
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
661
    tcg_gen_xori_tl(dst, dst, 0x1);
662
}
663

    
664
// !(C | Z)
665
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
666
{
667
    gen_mov_reg_Z(cpu_tmp0, src);
668
    gen_mov_reg_C(dst, src);
669
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
670
    tcg_gen_xori_tl(dst, dst, 0x1);
671
}
672

    
673
// !C
674
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
675
{
676
    gen_mov_reg_C(dst, src);
677
    tcg_gen_xori_tl(dst, dst, 0x1);
678
}
679

    
680
// !N
681
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
682
{
683
    gen_mov_reg_N(dst, src);
684
    tcg_gen_xori_tl(dst, dst, 0x1);
685
}
686

    
687
// !V
688
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
689
{
690
    gen_mov_reg_V(dst, src);
691
    tcg_gen_xori_tl(dst, dst, 0x1);
692
}
693

    
694
/*
695
  FPSR bit field FCC1 | FCC0:
696
   0 =
697
   1 <
698
   2 >
699
   3 unordered
700
*/
701
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
702
                                    unsigned int fcc_offset)
703
{
704
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
705
    tcg_gen_andi_tl(reg, reg, 0x1);
706
}
707

    
708
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
709
                                    unsigned int fcc_offset)
710
{
711
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
712
    tcg_gen_andi_tl(reg, reg, 0x1);
713
}
714

    
715
// !0: FCC0 | FCC1
716
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
717
                                    unsigned int fcc_offset)
718
{
719
    gen_mov_reg_FCC0(dst, src, fcc_offset);
720
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
721
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
722
}
723

    
724
// 1 or 2: FCC0 ^ FCC1
725
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
726
                                    unsigned int fcc_offset)
727
{
728
    gen_mov_reg_FCC0(dst, src, fcc_offset);
729
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
730
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
731
}
732

    
733
// 1 or 3: FCC0
734
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
735
                                    unsigned int fcc_offset)
736
{
737
    gen_mov_reg_FCC0(dst, src, fcc_offset);
738
}
739

    
740
// 1: FCC0 & !FCC1
741
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
742
                                    unsigned int fcc_offset)
743
{
744
    gen_mov_reg_FCC0(dst, src, fcc_offset);
745
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
746
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
747
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
748
}
749

    
750
// 2 or 3: FCC1
751
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
752
                                    unsigned int fcc_offset)
753
{
754
    gen_mov_reg_FCC1(dst, src, fcc_offset);
755
}
756

    
757
// 2: !FCC0 & FCC1
758
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
759
                                    unsigned int fcc_offset)
760
{
761
    gen_mov_reg_FCC0(dst, src, fcc_offset);
762
    tcg_gen_xori_tl(dst, dst, 0x1);
763
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
764
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
765
}
766

    
767
// 3: FCC0 & FCC1
768
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
769
                                    unsigned int fcc_offset)
770
{
771
    gen_mov_reg_FCC0(dst, src, fcc_offset);
772
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
773
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
774
}
775

    
776
// 0: !(FCC0 | FCC1)
777
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
778
                                    unsigned int fcc_offset)
779
{
780
    gen_mov_reg_FCC0(dst, src, fcc_offset);
781
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
782
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
783
    tcg_gen_xori_tl(dst, dst, 0x1);
784
}
785

    
786
// 0 or 3: !(FCC0 ^ FCC1)
787
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
788
                                    unsigned int fcc_offset)
789
{
790
    gen_mov_reg_FCC0(dst, src, fcc_offset);
791
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
792
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
793
    tcg_gen_xori_tl(dst, dst, 0x1);
794
}
795

    
796
// 0 or 2: !FCC0
797
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
798
                                    unsigned int fcc_offset)
799
{
800
    gen_mov_reg_FCC0(dst, src, fcc_offset);
801
    tcg_gen_xori_tl(dst, dst, 0x1);
802
}
803

    
804
// !1: !(FCC0 & !FCC1)
805
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
806
                                    unsigned int fcc_offset)
807
{
808
    gen_mov_reg_FCC0(dst, src, fcc_offset);
809
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
810
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
811
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
812
    tcg_gen_xori_tl(dst, dst, 0x1);
813
}
814

    
815
// 0 or 1: !FCC1
816
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
817
                                    unsigned int fcc_offset)
818
{
819
    gen_mov_reg_FCC1(dst, src, fcc_offset);
820
    tcg_gen_xori_tl(dst, dst, 0x1);
821
}
822

    
823
// !2: !(!FCC0 & FCC1)
824
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
825
                                    unsigned int fcc_offset)
826
{
827
    gen_mov_reg_FCC0(dst, src, fcc_offset);
828
    tcg_gen_xori_tl(dst, dst, 0x1);
829
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
830
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
831
    tcg_gen_xori_tl(dst, dst, 0x1);
832
}
833

    
834
// !3: !(FCC0 & FCC1)
835
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
836
                                    unsigned int fcc_offset)
837
{
838
    gen_mov_reg_FCC0(dst, src, fcc_offset);
839
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
840
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
841
    tcg_gen_xori_tl(dst, dst, 0x1);
842
}
843

    
844
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
845
                               target_ulong pc2, TCGv r_cond)
846
{
847
    int l1;
848

    
849
    l1 = gen_new_label();
850

    
851
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
852

    
853
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
854

    
855
    gen_set_label(l1);
856
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
857
}
858

    
859
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
860
                                target_ulong pc2, TCGv r_cond)
861
{
862
    int l1;
863

    
864
    l1 = gen_new_label();
865

    
866
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
867

    
868
    gen_goto_tb(dc, 0, pc2, pc1);
869

    
870
    gen_set_label(l1);
871
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
872
}
873

    
874
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
875
                                      TCGv r_cond)
876
{
877
    int l1, l2;
878

    
879
    l1 = gen_new_label();
880
    l2 = gen_new_label();
881

    
882
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
883

    
884
    tcg_gen_movi_tl(cpu_npc, npc1);
885
    tcg_gen_br(l2);
886

    
887
    gen_set_label(l1);
888
    tcg_gen_movi_tl(cpu_npc, npc2);
889
    gen_set_label(l2);
890
}
891

    
892
/* call this function before using the condition register as it may
893
   have been set for a jump */
894
static inline void flush_cond(DisasContext *dc, TCGv cond)
895
{
896
    if (dc->npc == JUMP_PC) {
897
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
898
        dc->npc = DYNAMIC_PC;
899
    }
900
}
901

    
902
static inline void save_npc(DisasContext *dc, TCGv cond)
903
{
904
    if (dc->npc == JUMP_PC) {
905
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
906
        dc->npc = DYNAMIC_PC;
907
    } else if (dc->npc != DYNAMIC_PC) {
908
        tcg_gen_movi_tl(cpu_npc, dc->npc);
909
    }
910
}
911

    
912
static inline void save_state(DisasContext *dc, TCGv cond)
913
{
914
    tcg_gen_movi_tl(cpu_pc, dc->pc);
915
    /* flush pending conditional evaluations before exposing cpu state */
916
    if (dc->cc_op != CC_OP_FLAGS) {
917
        dc->cc_op = CC_OP_FLAGS;
918
        gen_helper_compute_psr();
919
    }
920
    save_npc(dc, cond);
921
}
922

    
923
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
924
{
925
    if (dc->npc == JUMP_PC) {
926
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
927
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
928
        dc->pc = DYNAMIC_PC;
929
    } else if (dc->npc == DYNAMIC_PC) {
930
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
931
        dc->pc = DYNAMIC_PC;
932
    } else {
933
        dc->pc = dc->npc;
934
    }
935
}
936

    
937
static inline void gen_op_next_insn(void)
938
{
939
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
940
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
941
}
942

    
943
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
944
                            DisasContext *dc)
945
{
946
    TCGv_i32 r_src;
947

    
948
#ifdef TARGET_SPARC64
949
    if (cc)
950
        r_src = cpu_xcc;
951
    else
952
        r_src = cpu_psr;
953
#else
954
    r_src = cpu_psr;
955
#endif
956
    switch (dc->cc_op) {
957
    case CC_OP_FLAGS:
958
        break;
959
    default:
960
        gen_helper_compute_psr();
961
        dc->cc_op = CC_OP_FLAGS;
962
        break;
963
    }
964
    switch (cond) {
965
    case 0x0:
966
        gen_op_eval_bn(r_dst);
967
        break;
968
    case 0x1:
969
        gen_op_eval_be(r_dst, r_src);
970
        break;
971
    case 0x2:
972
        gen_op_eval_ble(r_dst, r_src);
973
        break;
974
    case 0x3:
975
        gen_op_eval_bl(r_dst, r_src);
976
        break;
977
    case 0x4:
978
        gen_op_eval_bleu(r_dst, r_src);
979
        break;
980
    case 0x5:
981
        gen_op_eval_bcs(r_dst, r_src);
982
        break;
983
    case 0x6:
984
        gen_op_eval_bneg(r_dst, r_src);
985
        break;
986
    case 0x7:
987
        gen_op_eval_bvs(r_dst, r_src);
988
        break;
989
    case 0x8:
990
        gen_op_eval_ba(r_dst);
991
        break;
992
    case 0x9:
993
        gen_op_eval_bne(r_dst, r_src);
994
        break;
995
    case 0xa:
996
        gen_op_eval_bg(r_dst, r_src);
997
        break;
998
    case 0xb:
999
        gen_op_eval_bge(r_dst, r_src);
1000
        break;
1001
    case 0xc:
1002
        gen_op_eval_bgu(r_dst, r_src);
1003
        break;
1004
    case 0xd:
1005
        gen_op_eval_bcc(r_dst, r_src);
1006
        break;
1007
    case 0xe:
1008
        gen_op_eval_bpos(r_dst, r_src);
1009
        break;
1010
    case 0xf:
1011
        gen_op_eval_bvc(r_dst, r_src);
1012
        break;
1013
    }
1014
}
1015

    
1016
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1017
{
1018
    unsigned int offset;
1019

    
1020
    switch (cc) {
1021
    default:
1022
    case 0x0:
1023
        offset = 0;
1024
        break;
1025
    case 0x1:
1026
        offset = 32 - 10;
1027
        break;
1028
    case 0x2:
1029
        offset = 34 - 10;
1030
        break;
1031
    case 0x3:
1032
        offset = 36 - 10;
1033
        break;
1034
    }
1035

    
1036
    switch (cond) {
1037
    case 0x0:
1038
        gen_op_eval_bn(r_dst);
1039
        break;
1040
    case 0x1:
1041
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1042
        break;
1043
    case 0x2:
1044
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1045
        break;
1046
    case 0x3:
1047
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1048
        break;
1049
    case 0x4:
1050
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1051
        break;
1052
    case 0x5:
1053
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1054
        break;
1055
    case 0x6:
1056
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1057
        break;
1058
    case 0x7:
1059
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1060
        break;
1061
    case 0x8:
1062
        gen_op_eval_ba(r_dst);
1063
        break;
1064
    case 0x9:
1065
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1066
        break;
1067
    case 0xa:
1068
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1069
        break;
1070
    case 0xb:
1071
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1072
        break;
1073
    case 0xc:
1074
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1075
        break;
1076
    case 0xd:
1077
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1078
        break;
1079
    case 0xe:
1080
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1081
        break;
1082
    case 0xf:
1083
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1084
        break;
1085
    }
1086
}
1087

    
1088
#ifdef TARGET_SPARC64
1089
// Inverted logic
1090
static const int gen_tcg_cond_reg[8] = {
1091
    -1,
1092
    TCG_COND_NE,
1093
    TCG_COND_GT,
1094
    TCG_COND_GE,
1095
    -1,
1096
    TCG_COND_EQ,
1097
    TCG_COND_LE,
1098
    TCG_COND_LT,
1099
};
1100

    
1101
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1102
{
1103
    int l1;
1104

    
1105
    l1 = gen_new_label();
1106
    tcg_gen_movi_tl(r_dst, 0);
1107
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1108
    tcg_gen_movi_tl(r_dst, 1);
1109
    gen_set_label(l1);
1110
}
1111
#endif
1112

    
1113
/* XXX: potentially incorrect if dynamic npc */
1114
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1115
                      TCGv r_cond)
1116
{
1117
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1118
    target_ulong target = dc->pc + offset;
1119

    
1120
    if (cond == 0x0) {
1121
        /* unconditional not taken */
1122
        if (a) {
1123
            dc->pc = dc->npc + 4;
1124
            dc->npc = dc->pc + 4;
1125
        } else {
1126
            dc->pc = dc->npc;
1127
            dc->npc = dc->pc + 4;
1128
        }
1129
    } else if (cond == 0x8) {
1130
        /* unconditional taken */
1131
        if (a) {
1132
            dc->pc = target;
1133
            dc->npc = dc->pc + 4;
1134
        } else {
1135
            dc->pc = dc->npc;
1136
            dc->npc = target;
1137
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1138
        }
1139
    } else {
1140
        flush_cond(dc, r_cond);
1141
        gen_cond(r_cond, cc, cond, dc);
1142
        if (a) {
1143
            gen_branch_a(dc, target, dc->npc, r_cond);
1144
            dc->is_br = 1;
1145
        } else {
1146
            dc->pc = dc->npc;
1147
            dc->jump_pc[0] = target;
1148
            dc->jump_pc[1] = dc->npc + 4;
1149
            dc->npc = JUMP_PC;
1150
        }
1151
    }
1152
}
1153

    
1154
/* XXX: potentially incorrect if dynamic npc */
1155
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1156
                      TCGv r_cond)
1157
{
1158
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1159
    target_ulong target = dc->pc + offset;
1160

    
1161
    if (cond == 0x0) {
1162
        /* unconditional not taken */
1163
        if (a) {
1164
            dc->pc = dc->npc + 4;
1165
            dc->npc = dc->pc + 4;
1166
        } else {
1167
            dc->pc = dc->npc;
1168
            dc->npc = dc->pc + 4;
1169
        }
1170
    } else if (cond == 0x8) {
1171
        /* unconditional taken */
1172
        if (a) {
1173
            dc->pc = target;
1174
            dc->npc = dc->pc + 4;
1175
        } else {
1176
            dc->pc = dc->npc;
1177
            dc->npc = target;
1178
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1179
        }
1180
    } else {
1181
        flush_cond(dc, r_cond);
1182
        gen_fcond(r_cond, cc, cond);
1183
        if (a) {
1184
            gen_branch_a(dc, target, dc->npc, r_cond);
1185
            dc->is_br = 1;
1186
        } else {
1187
            dc->pc = dc->npc;
1188
            dc->jump_pc[0] = target;
1189
            dc->jump_pc[1] = dc->npc + 4;
1190
            dc->npc = JUMP_PC;
1191
        }
1192
    }
1193
}
1194

    
1195
#ifdef TARGET_SPARC64
1196
/* XXX: potentially incorrect if dynamic npc */
1197
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1198
                          TCGv r_cond, TCGv r_reg)
1199
{
1200
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1201
    target_ulong target = dc->pc + offset;
1202

    
1203
    flush_cond(dc, r_cond);
1204
    gen_cond_reg(r_cond, cond, r_reg);
1205
    if (a) {
1206
        gen_branch_a(dc, target, dc->npc, r_cond);
1207
        dc->is_br = 1;
1208
    } else {
1209
        dc->pc = dc->npc;
1210
        dc->jump_pc[0] = target;
1211
        dc->jump_pc[1] = dc->npc + 4;
1212
        dc->npc = JUMP_PC;
1213
    }
1214
}
1215

    
1216
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1217
{
1218
    switch (fccno) {
1219
    case 0:
1220
        gen_helper_fcmps(r_rs1, r_rs2);
1221
        break;
1222
    case 1:
1223
        gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1224
        break;
1225
    case 2:
1226
        gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1227
        break;
1228
    case 3:
1229
        gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1230
        break;
1231
    }
1232
}
1233

    
1234
static inline void gen_op_fcmpd(int fccno)
1235
{
1236
    switch (fccno) {
1237
    case 0:
1238
        gen_helper_fcmpd();
1239
        break;
1240
    case 1:
1241
        gen_helper_fcmpd_fcc1();
1242
        break;
1243
    case 2:
1244
        gen_helper_fcmpd_fcc2();
1245
        break;
1246
    case 3:
1247
        gen_helper_fcmpd_fcc3();
1248
        break;
1249
    }
1250
}
1251

    
1252
static inline void gen_op_fcmpq(int fccno)
1253
{
1254
    switch (fccno) {
1255
    case 0:
1256
        gen_helper_fcmpq();
1257
        break;
1258
    case 1:
1259
        gen_helper_fcmpq_fcc1();
1260
        break;
1261
    case 2:
1262
        gen_helper_fcmpq_fcc2();
1263
        break;
1264
    case 3:
1265
        gen_helper_fcmpq_fcc3();
1266
        break;
1267
    }
1268
}
1269

    
1270
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1271
{
1272
    switch (fccno) {
1273
    case 0:
1274
        gen_helper_fcmpes(r_rs1, r_rs2);
1275
        break;
1276
    case 1:
1277
        gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1278
        break;
1279
    case 2:
1280
        gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1281
        break;
1282
    case 3:
1283
        gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1284
        break;
1285
    }
1286
}
1287

    
1288
static inline void gen_op_fcmped(int fccno)
1289
{
1290
    switch (fccno) {
1291
    case 0:
1292
        gen_helper_fcmped();
1293
        break;
1294
    case 1:
1295
        gen_helper_fcmped_fcc1();
1296
        break;
1297
    case 2:
1298
        gen_helper_fcmped_fcc2();
1299
        break;
1300
    case 3:
1301
        gen_helper_fcmped_fcc3();
1302
        break;
1303
    }
1304
}
1305

    
1306
static inline void gen_op_fcmpeq(int fccno)
1307
{
1308
    switch (fccno) {
1309
    case 0:
1310
        gen_helper_fcmpeq();
1311
        break;
1312
    case 1:
1313
        gen_helper_fcmpeq_fcc1();
1314
        break;
1315
    case 2:
1316
        gen_helper_fcmpeq_fcc2();
1317
        break;
1318
    case 3:
1319
        gen_helper_fcmpeq_fcc3();
1320
        break;
1321
    }
1322
}
1323

    
1324
#else
1325

    
1326
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1327
{
1328
    gen_helper_fcmps(r_rs1, r_rs2);
1329
}
1330

    
1331
static inline void gen_op_fcmpd(int fccno)
1332
{
1333
    gen_helper_fcmpd();
1334
}
1335

    
1336
static inline void gen_op_fcmpq(int fccno)
1337
{
1338
    gen_helper_fcmpq();
1339
}
1340

    
1341
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1342
{
1343
    gen_helper_fcmpes(r_rs1, r_rs2);
1344
}
1345

    
1346
static inline void gen_op_fcmped(int fccno)
1347
{
1348
    gen_helper_fcmped();
1349
}
1350

    
1351
static inline void gen_op_fcmpeq(int fccno)
1352
{
1353
    gen_helper_fcmpeq();
1354
}
1355
#endif
1356

    
1357
static inline void gen_op_fpexception_im(int fsr_flags)
1358
{
1359
    TCGv_i32 r_const;
1360

    
1361
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1362
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1363
    r_const = tcg_const_i32(TT_FP_EXCP);
1364
    gen_helper_raise_exception(r_const);
1365
    tcg_temp_free_i32(r_const);
1366
}
1367

    
1368
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1369
{
1370
#if !defined(CONFIG_USER_ONLY)
1371
    if (!dc->fpu_enabled) {
1372
        TCGv_i32 r_const;
1373

    
1374
        save_state(dc, r_cond);
1375
        r_const = tcg_const_i32(TT_NFPU_INSN);
1376
        gen_helper_raise_exception(r_const);
1377
        tcg_temp_free_i32(r_const);
1378
        dc->is_br = 1;
1379
        return 1;
1380
    }
1381
#endif
1382
    return 0;
1383
}
1384

    
1385
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1386
{
1387
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1388
}
1389

    
1390
static inline void gen_clear_float_exceptions(void)
1391
{
1392
    gen_helper_clear_float_exceptions();
1393
}
1394

    
1395
/* asi moves */
1396
#ifdef TARGET_SPARC64
1397
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1398
{
1399
    int asi;
1400
    TCGv_i32 r_asi;
1401

    
1402
    if (IS_IMM) {
1403
        r_asi = tcg_temp_new_i32();
1404
        tcg_gen_mov_i32(r_asi, cpu_asi);
1405
    } else {
1406
        asi = GET_FIELD(insn, 19, 26);
1407
        r_asi = tcg_const_i32(asi);
1408
    }
1409
    return r_asi;
1410
}
1411

    
1412
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1413
                              int sign)
1414
{
1415
    TCGv_i32 r_asi, r_size, r_sign;
1416

    
1417
    r_asi = gen_get_asi(insn, addr);
1418
    r_size = tcg_const_i32(size);
1419
    r_sign = tcg_const_i32(sign);
1420
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1421
    tcg_temp_free_i32(r_sign);
1422
    tcg_temp_free_i32(r_size);
1423
    tcg_temp_free_i32(r_asi);
1424
}
1425

    
1426
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1427
{
1428
    TCGv_i32 r_asi, r_size;
1429

    
1430
    r_asi = gen_get_asi(insn, addr);
1431
    r_size = tcg_const_i32(size);
1432
    gen_helper_st_asi(addr, src, r_asi, r_size);
1433
    tcg_temp_free_i32(r_size);
1434
    tcg_temp_free_i32(r_asi);
1435
}
1436

    
1437
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1438
{
1439
    TCGv_i32 r_asi, r_size, r_rd;
1440

    
1441
    r_asi = gen_get_asi(insn, addr);
1442
    r_size = tcg_const_i32(size);
1443
    r_rd = tcg_const_i32(rd);
1444
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1445
    tcg_temp_free_i32(r_rd);
1446
    tcg_temp_free_i32(r_size);
1447
    tcg_temp_free_i32(r_asi);
1448
}
1449

    
1450
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1451
{
1452
    TCGv_i32 r_asi, r_size, r_rd;
1453

    
1454
    r_asi = gen_get_asi(insn, addr);
1455
    r_size = tcg_const_i32(size);
1456
    r_rd = tcg_const_i32(rd);
1457
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1458
    tcg_temp_free_i32(r_rd);
1459
    tcg_temp_free_i32(r_size);
1460
    tcg_temp_free_i32(r_asi);
1461
}
1462

    
1463
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1464
{
1465
    TCGv_i32 r_asi, r_size, r_sign;
1466

    
1467
    r_asi = gen_get_asi(insn, addr);
1468
    r_size = tcg_const_i32(4);
1469
    r_sign = tcg_const_i32(0);
1470
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1471
    tcg_temp_free_i32(r_sign);
1472
    gen_helper_st_asi(addr, dst, r_asi, r_size);
1473
    tcg_temp_free_i32(r_size);
1474
    tcg_temp_free_i32(r_asi);
1475
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1476
}
1477

    
1478
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1479
{
1480
    TCGv_i32 r_asi, r_rd;
1481

    
1482
    r_asi = gen_get_asi(insn, addr);
1483
    r_rd = tcg_const_i32(rd);
1484
    gen_helper_ldda_asi(addr, r_asi, r_rd);
1485
    tcg_temp_free_i32(r_rd);
1486
    tcg_temp_free_i32(r_asi);
1487
}
1488

    
1489
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1490
{
1491
    TCGv_i32 r_asi, r_size;
1492

    
1493
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1494
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1495
    r_asi = gen_get_asi(insn, addr);
1496
    r_size = tcg_const_i32(8);
1497
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1498
    tcg_temp_free_i32(r_size);
1499
    tcg_temp_free_i32(r_asi);
1500
}
1501

    
1502
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1503
                               int rd)
1504
{
1505
    TCGv r_val1;
1506
    TCGv_i32 r_asi;
1507

    
1508
    r_val1 = tcg_temp_new();
1509
    gen_movl_reg_TN(rd, r_val1);
1510
    r_asi = gen_get_asi(insn, addr);
1511
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1512
    tcg_temp_free_i32(r_asi);
1513
    tcg_temp_free(r_val1);
1514
}
1515

    
1516
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1517
                                int rd)
1518
{
1519
    TCGv_i32 r_asi;
1520

    
1521
    gen_movl_reg_TN(rd, cpu_tmp64);
1522
    r_asi = gen_get_asi(insn, addr);
1523
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1524
    tcg_temp_free_i32(r_asi);
1525
}
1526

    
1527
#elif !defined(CONFIG_USER_ONLY)
1528

    
1529
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1530
                              int sign)
1531
{
1532
    TCGv_i32 r_asi, r_size, r_sign;
1533

    
1534
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1535
    r_size = tcg_const_i32(size);
1536
    r_sign = tcg_const_i32(sign);
1537
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1538
    tcg_temp_free(r_sign);
1539
    tcg_temp_free(r_size);
1540
    tcg_temp_free(r_asi);
1541
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1542
}
1543

    
1544
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1545
{
1546
    TCGv_i32 r_asi, r_size;
1547

    
1548
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
1549
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1550
    r_size = tcg_const_i32(size);
1551
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1552
    tcg_temp_free(r_size);
1553
    tcg_temp_free(r_asi);
1554
}
1555

    
1556
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1557
{
1558
    TCGv_i32 r_asi, r_size, r_sign;
1559
    TCGv_i64 r_val;
1560

    
1561
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1562
    r_size = tcg_const_i32(4);
1563
    r_sign = tcg_const_i32(0);
1564
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1565
    tcg_temp_free(r_sign);
1566
    r_val = tcg_temp_new_i64();
1567
    tcg_gen_extu_tl_i64(r_val, dst);
1568
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1569
    tcg_temp_free_i64(r_val);
1570
    tcg_temp_free(r_size);
1571
    tcg_temp_free(r_asi);
1572
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1573
}
1574

    
1575
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1576
{
1577
    TCGv_i32 r_asi, r_size, r_sign;
1578

    
1579
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1580
    r_size = tcg_const_i32(8);
1581
    r_sign = tcg_const_i32(0);
1582
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1583
    tcg_temp_free(r_sign);
1584
    tcg_temp_free(r_size);
1585
    tcg_temp_free(r_asi);
1586
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1587
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
1588
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1589
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1590
    gen_movl_TN_reg(rd, hi);
1591
}
1592

    
1593
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1594
{
1595
    TCGv_i32 r_asi, r_size;
1596

    
1597
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1598
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1599
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1600
    r_size = tcg_const_i32(8);
1601
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1602
    tcg_temp_free(r_size);
1603
    tcg_temp_free(r_asi);
1604
}
1605
#endif
1606

    
1607
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1608
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1609
{
1610
    TCGv_i64 r_val;
1611
    TCGv_i32 r_asi, r_size;
1612

    
1613
    gen_ld_asi(dst, addr, insn, 1, 0);
1614

    
1615
    r_val = tcg_const_i64(0xffULL);
1616
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1617
    r_size = tcg_const_i32(1);
1618
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1619
    tcg_temp_free_i32(r_size);
1620
    tcg_temp_free_i32(r_asi);
1621
    tcg_temp_free_i64(r_val);
1622
}
1623
#endif
1624

    
1625
static inline TCGv get_src1(unsigned int insn, TCGv def)
1626
{
1627
    TCGv r_rs1 = def;
1628
    unsigned int rs1;
1629

    
1630
    rs1 = GET_FIELD(insn, 13, 17);
1631
    if (rs1 == 0)
1632
        r_rs1 = tcg_const_tl(0); // XXX how to free?
1633
    else if (rs1 < 8)
1634
        r_rs1 = cpu_gregs[rs1];
1635
    else
1636
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1637
    return r_rs1;
1638
}
1639

    
1640
static inline TCGv get_src2(unsigned int insn, TCGv def)
1641
{
1642
    TCGv r_rs2 = def;
1643

    
1644
    if (IS_IMM) { /* immediate */
1645
        target_long simm;
1646

    
1647
        simm = GET_FIELDs(insn, 19, 31);
1648
        r_rs2 = tcg_const_tl(simm); // XXX how to free?
1649
    } else { /* register */
1650
        unsigned int rs2;
1651

    
1652
        rs2 = GET_FIELD(insn, 27, 31);
1653
        if (rs2 == 0)
1654
            r_rs2 = tcg_const_tl(0); // XXX how to free?
1655
        else if (rs2 < 8)
1656
            r_rs2 = cpu_gregs[rs2];
1657
        else
1658
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1659
    }
1660
    return r_rs2;
1661
}
1662

    
1663
#ifdef TARGET_SPARC64
1664
static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1665
{
1666
    TCGv_i32 r_tl = tcg_temp_new_i32();
1667

    
1668
    /* load env->tl into r_tl */
1669
    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1670

    
1671
    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1672
    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1673

    
1674
    /* calculate offset to current trap state from env->ts, reuse r_tl */
1675
    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1676
    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1677

    
1678
    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1679
    {
1680
        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1681
        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1682
        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1683
        tcg_temp_free_ptr(r_tl_tmp);
1684
    }
1685

    
1686
    tcg_temp_free_i32(r_tl);
1687
}
1688
#endif
1689

    
1690
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
1691
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1692
        goto illegal_insn;
1693
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1694
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1695
        goto nfpu_insn;
1696

    
1697
/* before an instruction, dc->pc must be static */
1698
static void disas_sparc_insn(DisasContext * dc)
1699
{
1700
    unsigned int insn, opc, rs1, rs2, rd;
1701
    target_long simm;
1702

    
1703
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1704
        tcg_gen_debug_insn_start(dc->pc);
1705
    insn = ldl_code(dc->pc);
1706
    opc = GET_FIELD(insn, 0, 1);
1707

    
1708
    rd = GET_FIELD(insn, 2, 6);
1709

    
1710
    cpu_src1 = tcg_temp_new(); // const
1711
    cpu_src2 = tcg_temp_new(); // const
1712

    
1713
    switch (opc) {
1714
    case 0:                     /* branches/sethi */
1715
        {
1716
            unsigned int xop = GET_FIELD(insn, 7, 9);
1717
            int32_t target;
1718
            switch (xop) {
1719
#ifdef TARGET_SPARC64
1720
            case 0x1:           /* V9 BPcc */
1721
                {
1722
                    int cc;
1723

    
1724
                    target = GET_FIELD_SP(insn, 0, 18);
1725
                    target = sign_extend(target, 18);
1726
                    target <<= 2;
1727
                    cc = GET_FIELD_SP(insn, 20, 21);
1728
                    if (cc == 0)
1729
                        do_branch(dc, target, insn, 0, cpu_cond);
1730
                    else if (cc == 2)
1731
                        do_branch(dc, target, insn, 1, cpu_cond);
1732
                    else
1733
                        goto illegal_insn;
1734
                    goto jmp_insn;
1735
                }
1736
            case 0x3:           /* V9 BPr */
1737
                {
1738
                    target = GET_FIELD_SP(insn, 0, 13) |
1739
                        (GET_FIELD_SP(insn, 20, 21) << 14);
1740
                    target = sign_extend(target, 16);
1741
                    target <<= 2;
1742
                    cpu_src1 = get_src1(insn, cpu_src1);
1743
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1744
                    goto jmp_insn;
1745
                }
1746
            case 0x5:           /* V9 FBPcc */
1747
                {
1748
                    int cc = GET_FIELD_SP(insn, 20, 21);
1749
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1750
                        goto jmp_insn;
1751
                    target = GET_FIELD_SP(insn, 0, 18);
1752
                    target = sign_extend(target, 19);
1753
                    target <<= 2;
1754
                    do_fbranch(dc, target, insn, cc, cpu_cond);
1755
                    goto jmp_insn;
1756
                }
1757
#else
1758
            case 0x7:           /* CBN+x */
1759
                {
1760
                    goto ncp_insn;
1761
                }
1762
#endif
1763
            case 0x2:           /* BN+x */
1764
                {
1765
                    target = GET_FIELD(insn, 10, 31);
1766
                    target = sign_extend(target, 22);
1767
                    target <<= 2;
1768
                    do_branch(dc, target, insn, 0, cpu_cond);
1769
                    goto jmp_insn;
1770
                }
1771
            case 0x6:           /* FBN+x */
1772
                {
1773
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1774
                        goto jmp_insn;
1775
                    target = GET_FIELD(insn, 10, 31);
1776
                    target = sign_extend(target, 22);
1777
                    target <<= 2;
1778
                    do_fbranch(dc, target, insn, 0, cpu_cond);
1779
                    goto jmp_insn;
1780
                }
1781
            case 0x4:           /* SETHI */
1782
                if (rd) { // nop
1783
                    uint32_t value = GET_FIELD(insn, 10, 31);
1784
                    TCGv r_const;
1785

    
1786
                    r_const = tcg_const_tl(value << 10);
1787
                    gen_movl_TN_reg(rd, r_const);
1788
                    tcg_temp_free(r_const);
1789
                }
1790
                break;
1791
            case 0x0:           /* UNIMPL */
1792
            default:
1793
                goto illegal_insn;
1794
            }
1795
            break;
1796
        }
1797
        break;
1798
    case 1:                     /*CALL*/
1799
        {
1800
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
1801
            TCGv r_const;
1802

    
1803
            r_const = tcg_const_tl(dc->pc);
1804
            gen_movl_TN_reg(15, r_const);
1805
            tcg_temp_free(r_const);
1806
            target += dc->pc;
1807
            gen_mov_pc_npc(dc, cpu_cond);
1808
            dc->npc = target;
1809
        }
1810
        goto jmp_insn;
1811
    case 2:                     /* FPU & Logical Operations */
1812
        {
1813
            unsigned int xop = GET_FIELD(insn, 7, 12);
1814
            if (xop == 0x3a) {  /* generate trap */
1815
                int cond;
1816

    
1817
                cpu_src1 = get_src1(insn, cpu_src1);
1818
                if (IS_IMM) {
1819
                    rs2 = GET_FIELD(insn, 25, 31);
1820
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1821
                } else {
1822
                    rs2 = GET_FIELD(insn, 27, 31);
1823
                    if (rs2 != 0) {
1824
                        gen_movl_reg_TN(rs2, cpu_src2);
1825
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1826
                    } else
1827
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
1828
                }
1829
                cond = GET_FIELD(insn, 3, 6);
1830
                if (cond == 0x8) {
1831
                    save_state(dc, cpu_cond);
1832
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
1833
                        supervisor(dc))
1834
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1835
                    else
1836
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1837
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1838
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1839
                    gen_helper_raise_exception(cpu_tmp32);
1840
                } else if (cond != 0) {
1841
                    TCGv r_cond = tcg_temp_new();
1842
                    int l1;
1843
#ifdef TARGET_SPARC64
1844
                    /* V9 icc/xcc */
1845
                    int cc = GET_FIELD_SP(insn, 11, 12);
1846

    
1847
                    save_state(dc, cpu_cond);
1848
                    if (cc == 0)
1849
                        gen_cond(r_cond, 0, cond, dc);
1850
                    else if (cc == 2)
1851
                        gen_cond(r_cond, 1, cond, dc);
1852
                    else
1853
                        goto illegal_insn;
1854
#else
1855
                    save_state(dc, cpu_cond);
1856
                    gen_cond(r_cond, 0, cond, dc);
1857
#endif
1858
                    l1 = gen_new_label();
1859
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1860

    
1861
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
1862
                        supervisor(dc))
1863
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1864
                    else
1865
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1866
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1867
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1868
                    gen_helper_raise_exception(cpu_tmp32);
1869

    
1870
                    gen_set_label(l1);
1871
                    tcg_temp_free(r_cond);
1872
                }
1873
                gen_op_next_insn();
1874
                tcg_gen_exit_tb(0);
1875
                dc->is_br = 1;
1876
                goto jmp_insn;
1877
            } else if (xop == 0x28) {
1878
                rs1 = GET_FIELD(insn, 13, 17);
1879
                switch(rs1) {
1880
                case 0: /* rdy */
1881
#ifndef TARGET_SPARC64
1882
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
1883
                                       manual, rdy on the microSPARC
1884
                                       II */
1885
                case 0x0f:          /* stbar in the SPARCv8 manual,
1886
                                       rdy on the microSPARC II */
1887
                case 0x10 ... 0x1f: /* implementation-dependent in the
1888
                                       SPARCv8 manual, rdy on the
1889
                                       microSPARC II */
1890
#endif
1891
                    gen_movl_TN_reg(rd, cpu_y);
1892
                    break;
1893
#ifdef TARGET_SPARC64
1894
                case 0x2: /* V9 rdccr */
1895
                    gen_helper_compute_psr();
1896
                    gen_helper_rdccr(cpu_dst);
1897
                    gen_movl_TN_reg(rd, cpu_dst);
1898
                    break;
1899
                case 0x3: /* V9 rdasi */
1900
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1901
                    gen_movl_TN_reg(rd, cpu_dst);
1902
                    break;
1903
                case 0x4: /* V9 rdtick */
1904
                    {
1905
                        TCGv_ptr r_tickptr;
1906

    
1907
                        r_tickptr = tcg_temp_new_ptr();
1908
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
1909
                                       offsetof(CPUState, tick));
1910
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
1911
                        tcg_temp_free_ptr(r_tickptr);
1912
                        gen_movl_TN_reg(rd, cpu_dst);
1913
                    }
1914
                    break;
1915
                case 0x5: /* V9 rdpc */
1916
                    {
1917
                        TCGv r_const;
1918

    
1919
                        r_const = tcg_const_tl(dc->pc);
1920
                        gen_movl_TN_reg(rd, r_const);
1921
                        tcg_temp_free(r_const);
1922
                    }
1923
                    break;
1924
                case 0x6: /* V9 rdfprs */
1925
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
1926
                    gen_movl_TN_reg(rd, cpu_dst);
1927
                    break;
1928
                case 0xf: /* V9 membar */
1929
                    break; /* no effect */
1930
                case 0x13: /* Graphics Status */
1931
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1932
                        goto jmp_insn;
1933
                    gen_movl_TN_reg(rd, cpu_gsr);
1934
                    break;
1935
                case 0x16: /* Softint */
1936
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
1937
                    gen_movl_TN_reg(rd, cpu_dst);
1938
                    break;
1939
                case 0x17: /* Tick compare */
1940
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
1941
                    break;
1942
                case 0x18: /* System tick */
1943
                    {
1944
                        TCGv_ptr r_tickptr;
1945

    
1946
                        r_tickptr = tcg_temp_new_ptr();
1947
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
1948
                                       offsetof(CPUState, stick));
1949
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
1950
                        tcg_temp_free_ptr(r_tickptr);
1951
                        gen_movl_TN_reg(rd, cpu_dst);
1952
                    }
1953
                    break;
1954
                case 0x19: /* System tick compare */
1955
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
1956
                    break;
1957
                case 0x10: /* Performance Control */
1958
                case 0x11: /* Performance Instrumentation Counter */
1959
                case 0x12: /* Dispatch Control */
1960
                case 0x14: /* Softint set, WO */
1961
                case 0x15: /* Softint clear, WO */
1962
#endif
1963
                default:
1964
                    goto illegal_insn;
1965
                }
1966
#if !defined(CONFIG_USER_ONLY)
1967
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1968
#ifndef TARGET_SPARC64
1969
                if (!supervisor(dc))
1970
                    goto priv_insn;
1971
                gen_helper_compute_psr();
1972
                dc->cc_op = CC_OP_FLAGS;
1973
                gen_helper_rdpsr(cpu_dst);
1974
#else
1975
                CHECK_IU_FEATURE(dc, HYPV);
1976
                if (!hypervisor(dc))
1977
                    goto priv_insn;
1978
                rs1 = GET_FIELD(insn, 13, 17);
1979
                switch (rs1) {
1980
                case 0: // hpstate
1981
                    // gen_op_rdhpstate();
1982
                    break;
1983
                case 1: // htstate
1984
                    // gen_op_rdhtstate();
1985
                    break;
1986
                case 3: // hintp
1987
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
1988
                    break;
1989
                case 5: // htba
1990
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
1991
                    break;
1992
                case 6: // hver
1993
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
1994
                    break;
1995
                case 31: // hstick_cmpr
1996
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
1997
                    break;
1998
                default:
1999
                    goto illegal_insn;
2000
                }
2001
#endif
2002
                gen_movl_TN_reg(rd, cpu_dst);
2003
                break;
2004
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2005
                if (!supervisor(dc))
2006
                    goto priv_insn;
2007
#ifdef TARGET_SPARC64
2008
                rs1 = GET_FIELD(insn, 13, 17);
2009
                switch (rs1) {
2010
                case 0: // tpc
2011
                    {
2012
                        TCGv_ptr r_tsptr;
2013

    
2014
                        r_tsptr = tcg_temp_new_ptr();
2015
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2016
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2017
                                      offsetof(trap_state, tpc));
2018
                        tcg_temp_free_ptr(r_tsptr);
2019
                    }
2020
                    break;
2021
                case 1: // tnpc
2022
                    {
2023
                        TCGv_ptr r_tsptr;
2024

    
2025
                        r_tsptr = tcg_temp_new_ptr();
2026
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2027
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2028
                                      offsetof(trap_state, tnpc));
2029
                        tcg_temp_free_ptr(r_tsptr);
2030
                    }
2031
                    break;
2032
                case 2: // tstate
2033
                    {
2034
                        TCGv_ptr r_tsptr;
2035

    
2036
                        r_tsptr = tcg_temp_new_ptr();
2037
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2038
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2039
                                      offsetof(trap_state, tstate));
2040
                        tcg_temp_free_ptr(r_tsptr);
2041
                    }
2042
                    break;
2043
                case 3: // tt
2044
                    {
2045
                        TCGv_ptr r_tsptr;
2046

    
2047
                        r_tsptr = tcg_temp_new_ptr();
2048
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2049
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2050
                                       offsetof(trap_state, tt));
2051
                        tcg_temp_free_ptr(r_tsptr);
2052
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2053
                    }
2054
                    break;
2055
                case 4: // tick
2056
                    {
2057
                        TCGv_ptr r_tickptr;
2058

    
2059
                        r_tickptr = tcg_temp_new_ptr();
2060
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2061
                                       offsetof(CPUState, tick));
2062
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2063
                        gen_movl_TN_reg(rd, cpu_tmp0);
2064
                        tcg_temp_free_ptr(r_tickptr);
2065
                    }
2066
                    break;
2067
                case 5: // tba
2068
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2069
                    break;
2070
                case 6: // pstate
2071
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2072
                                   offsetof(CPUSPARCState, pstate));
2073
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2074
                    break;
2075
                case 7: // tl
2076
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2077
                                   offsetof(CPUSPARCState, tl));
2078
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2079
                    break;
2080
                case 8: // pil
2081
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2082
                                   offsetof(CPUSPARCState, psrpil));
2083
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2084
                    break;
2085
                case 9: // cwp
2086
                    gen_helper_rdcwp(cpu_tmp0);
2087
                    break;
2088
                case 10: // cansave
2089
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2090
                                   offsetof(CPUSPARCState, cansave));
2091
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2092
                    break;
2093
                case 11: // canrestore
2094
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2095
                                   offsetof(CPUSPARCState, canrestore));
2096
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2097
                    break;
2098
                case 12: // cleanwin
2099
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2100
                                   offsetof(CPUSPARCState, cleanwin));
2101
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2102
                    break;
2103
                case 13: // otherwin
2104
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2105
                                   offsetof(CPUSPARCState, otherwin));
2106
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2107
                    break;
2108
                case 14: // wstate
2109
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2110
                                   offsetof(CPUSPARCState, wstate));
2111
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2112
                    break;
2113
                case 16: // UA2005 gl
2114
                    CHECK_IU_FEATURE(dc, GL);
2115
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2116
                                   offsetof(CPUSPARCState, gl));
2117
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2118
                    break;
2119
                case 26: // UA2005 strand status
2120
                    CHECK_IU_FEATURE(dc, HYPV);
2121
                    if (!hypervisor(dc))
2122
                        goto priv_insn;
2123
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2124
                    break;
2125
                case 31: // ver
2126
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2127
                    break;
2128
                case 15: // fq
2129
                default:
2130
                    goto illegal_insn;
2131
                }
2132
#else
2133
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2134
#endif
2135
                gen_movl_TN_reg(rd, cpu_tmp0);
2136
                break;
2137
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2138
#ifdef TARGET_SPARC64
2139
                save_state(dc, cpu_cond);
2140
                gen_helper_flushw();
2141
#else
2142
                if (!supervisor(dc))
2143
                    goto priv_insn;
2144
                gen_movl_TN_reg(rd, cpu_tbr);
2145
#endif
2146
                break;
2147
#endif
2148
            } else if (xop == 0x34) {   /* FPU Operations */
2149
                if (gen_trap_ifnofpu(dc, cpu_cond))
2150
                    goto jmp_insn;
2151
                gen_op_clear_ieee_excp_and_FTT();
2152
                rs1 = GET_FIELD(insn, 13, 17);
2153
                rs2 = GET_FIELD(insn, 27, 31);
2154
                xop = GET_FIELD(insn, 18, 26);
2155
                switch (xop) {
2156
                case 0x1: /* fmovs */
2157
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2158
                    break;
2159
                case 0x5: /* fnegs */
2160
                    gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2161
                    break;
2162
                case 0x9: /* fabss */
2163
                    gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2164
                    break;
2165
                case 0x29: /* fsqrts */
2166
                    CHECK_FPU_FEATURE(dc, FSQRT);
2167
                    gen_clear_float_exceptions();
2168
                    gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2169
                    gen_helper_check_ieee_exceptions();
2170
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2171
                    break;
2172
                case 0x2a: /* fsqrtd */
2173
                    CHECK_FPU_FEATURE(dc, FSQRT);
2174
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2175
                    gen_clear_float_exceptions();
2176
                    gen_helper_fsqrtd();
2177
                    gen_helper_check_ieee_exceptions();
2178
                    gen_op_store_DT0_fpr(DFPREG(rd));
2179
                    break;
2180
                case 0x2b: /* fsqrtq */
2181
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2182
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2183
                    gen_clear_float_exceptions();
2184
                    gen_helper_fsqrtq();
2185
                    gen_helper_check_ieee_exceptions();
2186
                    gen_op_store_QT0_fpr(QFPREG(rd));
2187
                    break;
2188
                case 0x41: /* fadds */
2189
                    gen_clear_float_exceptions();
2190
                    gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2191
                    gen_helper_check_ieee_exceptions();
2192
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2193
                    break;
2194
                case 0x42: /* faddd */
2195
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2196
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2197
                    gen_clear_float_exceptions();
2198
                    gen_helper_faddd();
2199
                    gen_helper_check_ieee_exceptions();
2200
                    gen_op_store_DT0_fpr(DFPREG(rd));
2201
                    break;
2202
                case 0x43: /* faddq */
2203
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2204
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2205
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2206
                    gen_clear_float_exceptions();
2207
                    gen_helper_faddq();
2208
                    gen_helper_check_ieee_exceptions();
2209
                    gen_op_store_QT0_fpr(QFPREG(rd));
2210
                    break;
2211
                case 0x45: /* fsubs */
2212
                    gen_clear_float_exceptions();
2213
                    gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2214
                    gen_helper_check_ieee_exceptions();
2215
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2216
                    break;
2217
                case 0x46: /* fsubd */
2218
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2219
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2220
                    gen_clear_float_exceptions();
2221
                    gen_helper_fsubd();
2222
                    gen_helper_check_ieee_exceptions();
2223
                    gen_op_store_DT0_fpr(DFPREG(rd));
2224
                    break;
2225
                case 0x47: /* fsubq */
2226
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2227
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2228
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2229
                    gen_clear_float_exceptions();
2230
                    gen_helper_fsubq();
2231
                    gen_helper_check_ieee_exceptions();
2232
                    gen_op_store_QT0_fpr(QFPREG(rd));
2233
                    break;
2234
                case 0x49: /* fmuls */
2235
                    CHECK_FPU_FEATURE(dc, FMUL);
2236
                    gen_clear_float_exceptions();
2237
                    gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2238
                    gen_helper_check_ieee_exceptions();
2239
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2240
                    break;
2241
                case 0x4a: /* fmuld */
2242
                    CHECK_FPU_FEATURE(dc, FMUL);
2243
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2244
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2245
                    gen_clear_float_exceptions();
2246
                    gen_helper_fmuld();
2247
                    gen_helper_check_ieee_exceptions();
2248
                    gen_op_store_DT0_fpr(DFPREG(rd));
2249
                    break;
2250
                case 0x4b: /* fmulq */
2251
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2252
                    CHECK_FPU_FEATURE(dc, FMUL);
2253
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2254
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2255
                    gen_clear_float_exceptions();
2256
                    gen_helper_fmulq();
2257
                    gen_helper_check_ieee_exceptions();
2258
                    gen_op_store_QT0_fpr(QFPREG(rd));
2259
                    break;
2260
                case 0x4d: /* fdivs */
2261
                    gen_clear_float_exceptions();
2262
                    gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2263
                    gen_helper_check_ieee_exceptions();
2264
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2265
                    break;
2266
                case 0x4e: /* fdivd */
2267
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2268
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2269
                    gen_clear_float_exceptions();
2270
                    gen_helper_fdivd();
2271
                    gen_helper_check_ieee_exceptions();
2272
                    gen_op_store_DT0_fpr(DFPREG(rd));
2273
                    break;
2274
                case 0x4f: /* fdivq */
2275
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2276
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2277
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2278
                    gen_clear_float_exceptions();
2279
                    gen_helper_fdivq();
2280
                    gen_helper_check_ieee_exceptions();
2281
                    gen_op_store_QT0_fpr(QFPREG(rd));
2282
                    break;
2283
                case 0x69: /* fsmuld */
2284
                    CHECK_FPU_FEATURE(dc, FSMULD);
2285
                    gen_clear_float_exceptions();
2286
                    gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2287
                    gen_helper_check_ieee_exceptions();
2288
                    gen_op_store_DT0_fpr(DFPREG(rd));
2289
                    break;
2290
                case 0x6e: /* fdmulq */
2291
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2292
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2293
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2294
                    gen_clear_float_exceptions();
2295
                    gen_helper_fdmulq();
2296
                    gen_helper_check_ieee_exceptions();
2297
                    gen_op_store_QT0_fpr(QFPREG(rd));
2298
                    break;
2299
                case 0xc4: /* fitos */
2300
                    gen_clear_float_exceptions();
2301
                    gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2302
                    gen_helper_check_ieee_exceptions();
2303
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2304
                    break;
2305
                case 0xc6: /* fdtos */
2306
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2307
                    gen_clear_float_exceptions();
2308
                    gen_helper_fdtos(cpu_tmp32);
2309
                    gen_helper_check_ieee_exceptions();
2310
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2311
                    break;
2312
                case 0xc7: /* fqtos */
2313
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2314
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2315
                    gen_clear_float_exceptions();
2316
                    gen_helper_fqtos(cpu_tmp32);
2317
                    gen_helper_check_ieee_exceptions();
2318
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2319
                    break;
2320
                case 0xc8: /* fitod */
2321
                    gen_helper_fitod(cpu_fpr[rs2]);
2322
                    gen_op_store_DT0_fpr(DFPREG(rd));
2323
                    break;
2324
                case 0xc9: /* fstod */
2325
                    gen_helper_fstod(cpu_fpr[rs2]);
2326
                    gen_op_store_DT0_fpr(DFPREG(rd));
2327
                    break;
2328
                case 0xcb: /* fqtod */
2329
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2330
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2331
                    gen_clear_float_exceptions();
2332
                    gen_helper_fqtod();
2333
                    gen_helper_check_ieee_exceptions();
2334
                    gen_op_store_DT0_fpr(DFPREG(rd));
2335
                    break;
2336
                case 0xcc: /* fitoq */
2337
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2338
                    gen_helper_fitoq(cpu_fpr[rs2]);
2339
                    gen_op_store_QT0_fpr(QFPREG(rd));
2340
                    break;
2341
                case 0xcd: /* fstoq */
2342
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2343
                    gen_helper_fstoq(cpu_fpr[rs2]);
2344
                    gen_op_store_QT0_fpr(QFPREG(rd));
2345
                    break;
2346
                case 0xce: /* fdtoq */
2347
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2348
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2349
                    gen_helper_fdtoq();
2350
                    gen_op_store_QT0_fpr(QFPREG(rd));
2351
                    break;
2352
                case 0xd1: /* fstoi */
2353
                    gen_clear_float_exceptions();
2354
                    gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2355
                    gen_helper_check_ieee_exceptions();
2356
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2357
                    break;
2358
                case 0xd2: /* fdtoi */
2359
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2360
                    gen_clear_float_exceptions();
2361
                    gen_helper_fdtoi(cpu_tmp32);
2362
                    gen_helper_check_ieee_exceptions();
2363
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2364
                    break;
2365
                case 0xd3: /* fqtoi */
2366
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2367
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2368
                    gen_clear_float_exceptions();
2369
                    gen_helper_fqtoi(cpu_tmp32);
2370
                    gen_helper_check_ieee_exceptions();
2371
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2372
                    break;
2373
#ifdef TARGET_SPARC64
2374
                case 0x2: /* V9 fmovd */
2375
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2376
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2377
                                    cpu_fpr[DFPREG(rs2) + 1]);
2378
                    break;
2379
                case 0x3: /* V9 fmovq */
2380
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2381
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2382
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2383
                                    cpu_fpr[QFPREG(rs2) + 1]);
2384
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2385
                                    cpu_fpr[QFPREG(rs2) + 2]);
2386
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2387
                                    cpu_fpr[QFPREG(rs2) + 3]);
2388
                    break;
2389
                case 0x6: /* V9 fnegd */
2390
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2391
                    gen_helper_fnegd();
2392
                    gen_op_store_DT0_fpr(DFPREG(rd));
2393
                    break;
2394
                case 0x7: /* V9 fnegq */
2395
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2396
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2397
                    gen_helper_fnegq();
2398
                    gen_op_store_QT0_fpr(QFPREG(rd));
2399
                    break;
2400
                case 0xa: /* V9 fabsd */
2401
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2402
                    gen_helper_fabsd();
2403
                    gen_op_store_DT0_fpr(DFPREG(rd));
2404
                    break;
2405
                case 0xb: /* V9 fabsq */
2406
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2407
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2408
                    gen_helper_fabsq();
2409
                    gen_op_store_QT0_fpr(QFPREG(rd));
2410
                    break;
2411
                case 0x81: /* V9 fstox */
2412
                    gen_clear_float_exceptions();
2413
                    gen_helper_fstox(cpu_fpr[rs2]);
2414
                    gen_helper_check_ieee_exceptions();
2415
                    gen_op_store_DT0_fpr(DFPREG(rd));
2416
                    break;
2417
                case 0x82: /* V9 fdtox */
2418
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2419
                    gen_clear_float_exceptions();
2420
                    gen_helper_fdtox();
2421
                    gen_helper_check_ieee_exceptions();
2422
                    gen_op_store_DT0_fpr(DFPREG(rd));
2423
                    break;
2424
                case 0x83: /* V9 fqtox */
2425
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2426
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2427
                    gen_clear_float_exceptions();
2428
                    gen_helper_fqtox();
2429
                    gen_helper_check_ieee_exceptions();
2430
                    gen_op_store_DT0_fpr(DFPREG(rd));
2431
                    break;
2432
                case 0x84: /* V9 fxtos */
2433
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2434
                    gen_clear_float_exceptions();
2435
                    gen_helper_fxtos(cpu_tmp32);
2436
                    gen_helper_check_ieee_exceptions();
2437
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2438
                    break;
2439
                case 0x88: /* V9 fxtod */
2440
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2441
                    gen_clear_float_exceptions();
2442
                    gen_helper_fxtod();
2443
                    gen_helper_check_ieee_exceptions();
2444
                    gen_op_store_DT0_fpr(DFPREG(rd));
2445
                    break;
2446
                case 0x8c: /* V9 fxtoq */
2447
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2448
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2449
                    gen_clear_float_exceptions();
2450
                    gen_helper_fxtoq();
2451
                    gen_helper_check_ieee_exceptions();
2452
                    gen_op_store_QT0_fpr(QFPREG(rd));
2453
                    break;
2454
#endif
2455
                default:
2456
                    goto illegal_insn;
2457
                }
2458
            } else if (xop == 0x35) {   /* FPU Operations */
2459
#ifdef TARGET_SPARC64
2460
                int cond;
2461
#endif
2462
                if (gen_trap_ifnofpu(dc, cpu_cond))
2463
                    goto jmp_insn;
2464
                gen_op_clear_ieee_excp_and_FTT();
2465
                rs1 = GET_FIELD(insn, 13, 17);
2466
                rs2 = GET_FIELD(insn, 27, 31);
2467
                xop = GET_FIELD(insn, 18, 26);
2468
#ifdef TARGET_SPARC64
2469
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2470
                    int l1;
2471

    
2472
                    l1 = gen_new_label();
2473
                    cond = GET_FIELD_SP(insn, 14, 17);
2474
                    cpu_src1 = get_src1(insn, cpu_src1);
2475
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2476
                                       0, l1);
2477
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2478
                    gen_set_label(l1);
2479
                    break;
2480
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2481
                    int l1;
2482

    
2483
                    l1 = gen_new_label();
2484
                    cond = GET_FIELD_SP(insn, 14, 17);
2485
                    cpu_src1 = get_src1(insn, cpu_src1);
2486
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2487
                                       0, l1);
2488
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2489
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2490
                    gen_set_label(l1);
2491
                    break;
2492
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2493
                    int l1;
2494

    
2495
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2496
                    l1 = gen_new_label();
2497
                    cond = GET_FIELD_SP(insn, 14, 17);
2498
                    cpu_src1 = get_src1(insn, cpu_src1);
2499
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2500
                                       0, l1);
2501
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2502
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2503
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2504
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2505
                    gen_set_label(l1);
2506
                    break;
2507
                }
2508
#endif
2509
                switch (xop) {
2510
#ifdef TARGET_SPARC64
2511
#define FMOVSCC(fcc)                                                    \
2512
                    {                                                   \
2513
                        TCGv r_cond;                                    \
2514
                        int l1;                                         \
2515
                                                                        \
2516
                        l1 = gen_new_label();                           \
2517
                        r_cond = tcg_temp_new();                        \
2518
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2519
                        gen_fcond(r_cond, fcc, cond);                   \
2520
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2521
                                           0, l1);                      \
2522
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2523
                        gen_set_label(l1);                              \
2524
                        tcg_temp_free(r_cond);                          \
2525
                    }
2526
#define FMOVDCC(fcc)                                                    \
2527
                    {                                                   \
2528
                        TCGv r_cond;                                    \
2529
                        int l1;                                         \
2530
                                                                        \
2531
                        l1 = gen_new_label();                           \
2532
                        r_cond = tcg_temp_new();                        \
2533
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2534
                        gen_fcond(r_cond, fcc, cond);                   \
2535
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2536
                                           0, l1);                      \
2537
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2538
                                        cpu_fpr[DFPREG(rs2)]);          \
2539
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2540
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2541
                        gen_set_label(l1);                              \
2542
                        tcg_temp_free(r_cond);                          \
2543
                    }
2544
#define FMOVQCC(fcc)                                                    \
2545
                    {                                                   \
2546
                        TCGv r_cond;                                    \
2547
                        int l1;                                         \
2548
                                                                        \
2549
                        l1 = gen_new_label();                           \
2550
                        r_cond = tcg_temp_new();                        \
2551
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2552
                        gen_fcond(r_cond, fcc, cond);                   \
2553
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2554
                                           0, l1);                      \
2555
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2556
                                        cpu_fpr[QFPREG(rs2)]);          \
2557
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2558
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2559
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2560
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2561
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2562
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2563
                        gen_set_label(l1);                              \
2564
                        tcg_temp_free(r_cond);                          \
2565
                    }
2566
                    case 0x001: /* V9 fmovscc %fcc0 */
2567
                        FMOVSCC(0);
2568
                        break;
2569
                    case 0x002: /* V9 fmovdcc %fcc0 */
2570
                        FMOVDCC(0);
2571
                        break;
2572
                    case 0x003: /* V9 fmovqcc %fcc0 */
2573
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2574
                        FMOVQCC(0);
2575
                        break;
2576
                    case 0x041: /* V9 fmovscc %fcc1 */
2577
                        FMOVSCC(1);
2578
                        break;
2579
                    case 0x042: /* V9 fmovdcc %fcc1 */
2580
                        FMOVDCC(1);
2581
                        break;
2582
                    case 0x043: /* V9 fmovqcc %fcc1 */
2583
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2584
                        FMOVQCC(1);
2585
                        break;
2586
                    case 0x081: /* V9 fmovscc %fcc2 */
2587
                        FMOVSCC(2);
2588
                        break;
2589
                    case 0x082: /* V9 fmovdcc %fcc2 */
2590
                        FMOVDCC(2);
2591
                        break;
2592
                    case 0x083: /* V9 fmovqcc %fcc2 */
2593
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2594
                        FMOVQCC(2);
2595
                        break;
2596
                    case 0x0c1: /* V9 fmovscc %fcc3 */
2597
                        FMOVSCC(3);
2598
                        break;
2599
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
2600
                        FMOVDCC(3);
2601
                        break;
2602
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
2603
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2604
                        FMOVQCC(3);
2605
                        break;
2606
#undef FMOVSCC
2607
#undef FMOVDCC
2608
#undef FMOVQCC
2609
#define FMOVSCC(icc)                                                    \
2610
                    {                                                   \
2611
                        TCGv r_cond;                                    \
2612
                        int l1;                                         \
2613
                                                                        \
2614
                        l1 = gen_new_label();                           \
2615
                        r_cond = tcg_temp_new();                        \
2616
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2617
                        gen_cond(r_cond, icc, cond, dc);                \
2618
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2619
                                           0, l1);                      \
2620
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2621
                        gen_set_label(l1);                              \
2622
                        tcg_temp_free(r_cond);                          \
2623
                    }
2624
#define FMOVDCC(icc)                                                    \
2625
                    {                                                   \
2626
                        TCGv r_cond;                                    \
2627
                        int l1;                                         \
2628
                                                                        \
2629
                        l1 = gen_new_label();                           \
2630
                        r_cond = tcg_temp_new();                        \
2631
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2632
                        gen_cond(r_cond, icc, cond, dc);                \
2633
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2634
                                           0, l1);                      \
2635
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2636
                                        cpu_fpr[DFPREG(rs2)]);          \
2637
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2638
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2639
                        gen_set_label(l1);                              \
2640
                        tcg_temp_free(r_cond);                          \
2641
                    }
2642
#define FMOVQCC(icc)                                                    \
2643
                    {                                                   \
2644
                        TCGv r_cond;                                    \
2645
                        int l1;                                         \
2646
                                                                        \
2647
                        l1 = gen_new_label();                           \
2648
                        r_cond = tcg_temp_new();                        \
2649
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2650
                        gen_cond(r_cond, icc, cond, dc);                \
2651
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2652
                                           0, l1);                      \
2653
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2654
                                        cpu_fpr[QFPREG(rs2)]);          \
2655
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2656
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2657
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2658
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2659
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2660
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2661
                        gen_set_label(l1);                              \
2662
                        tcg_temp_free(r_cond);                          \
2663
                    }
2664

    
2665
                    case 0x101: /* V9 fmovscc %icc */
2666
                        FMOVSCC(0);
2667
                        break;
2668
                    case 0x102: /* V9 fmovdcc %icc */
2669
                        FMOVDCC(0);
2670
                    case 0x103: /* V9 fmovqcc %icc */
2671
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2672
                        FMOVQCC(0);
2673
                        break;
2674
                    case 0x181: /* V9 fmovscc %xcc */
2675
                        FMOVSCC(1);
2676
                        break;
2677
                    case 0x182: /* V9 fmovdcc %xcc */
2678
                        FMOVDCC(1);
2679
                        break;
2680
                    case 0x183: /* V9 fmovqcc %xcc */
2681
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2682
                        FMOVQCC(1);
2683
                        break;
2684
#undef FMOVSCC
2685
#undef FMOVDCC
2686
#undef FMOVQCC
2687
#endif
2688
                    case 0x51: /* fcmps, V9 %fcc */
2689
                        gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2690
                        break;
2691
                    case 0x52: /* fcmpd, V9 %fcc */
2692
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2693
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2694
                        gen_op_fcmpd(rd & 3);
2695
                        break;
2696
                    case 0x53: /* fcmpq, V9 %fcc */
2697
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2698
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2699
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2700
                        gen_op_fcmpq(rd & 3);
2701
                        break;
2702
                    case 0x55: /* fcmpes, V9 %fcc */
2703
                        gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2704
                        break;
2705
                    case 0x56: /* fcmped, V9 %fcc */
2706
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2707
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2708
                        gen_op_fcmped(rd & 3);
2709
                        break;
2710
                    case 0x57: /* fcmpeq, V9 %fcc */
2711
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2712
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2713
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2714
                        gen_op_fcmpeq(rd & 3);
2715
                        break;
2716
                    default:
2717
                        goto illegal_insn;
2718
                }
2719
            } else if (xop == 0x2) {
2720
                // clr/mov shortcut
2721

    
2722
                rs1 = GET_FIELD(insn, 13, 17);
2723
                if (rs1 == 0) {
2724
                    // or %g0, x, y -> mov T0, x; mov y, T0
2725
                    if (IS_IMM) {       /* immediate */
2726
                        TCGv r_const;
2727

    
2728
                        simm = GET_FIELDs(insn, 19, 31);
2729
                        r_const = tcg_const_tl(simm);
2730
                        gen_movl_TN_reg(rd, r_const);
2731
                        tcg_temp_free(r_const);
2732
                    } else {            /* register */
2733
                        rs2 = GET_FIELD(insn, 27, 31);
2734
                        gen_movl_reg_TN(rs2, cpu_dst);
2735
                        gen_movl_TN_reg(rd, cpu_dst);
2736
                    }
2737
                } else {
2738
                    cpu_src1 = get_src1(insn, cpu_src1);
2739
                    if (IS_IMM) {       /* immediate */
2740
                        simm = GET_FIELDs(insn, 19, 31);
2741
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2742
                        gen_movl_TN_reg(rd, cpu_dst);
2743
                    } else {            /* register */
2744
                        // or x, %g0, y -> mov T1, x; mov y, T1
2745
                        rs2 = GET_FIELD(insn, 27, 31);
2746
                        if (rs2 != 0) {
2747
                            gen_movl_reg_TN(rs2, cpu_src2);
2748
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2749
                            gen_movl_TN_reg(rd, cpu_dst);
2750
                        } else
2751
                            gen_movl_TN_reg(rd, cpu_src1);
2752
                    }
2753
                }
2754
#ifdef TARGET_SPARC64
2755
            } else if (xop == 0x25) { /* sll, V9 sllx */
2756
                cpu_src1 = get_src1(insn, cpu_src1);
2757
                if (IS_IMM) {   /* immediate */
2758
                    simm = GET_FIELDs(insn, 20, 31);
2759
                    if (insn & (1 << 12)) {
2760
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2761
                    } else {
2762
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2763
                    }
2764
                } else {                /* register */
2765
                    rs2 = GET_FIELD(insn, 27, 31);
2766
                    gen_movl_reg_TN(rs2, cpu_src2);
2767
                    if (insn & (1 << 12)) {
2768
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2769
                    } else {
2770
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2771
                    }
2772
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2773
                }
2774
                gen_movl_TN_reg(rd, cpu_dst);
2775
            } else if (xop == 0x26) { /* srl, V9 srlx */
2776
                cpu_src1 = get_src1(insn, cpu_src1);
2777
                if (IS_IMM) {   /* immediate */
2778
                    simm = GET_FIELDs(insn, 20, 31);
2779
                    if (insn & (1 << 12)) {
2780
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2781
                    } else {
2782
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2783
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2784
                    }
2785
                } else {                /* register */
2786
                    rs2 = GET_FIELD(insn, 27, 31);
2787
                    gen_movl_reg_TN(rs2, cpu_src2);
2788
                    if (insn & (1 << 12)) {
2789
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2790
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2791
                    } else {
2792
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2793
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2794
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2795
                    }
2796
                }
2797
                gen_movl_TN_reg(rd, cpu_dst);
2798
            } else if (xop == 0x27) { /* sra, V9 srax */
2799
                cpu_src1 = get_src1(insn, cpu_src1);
2800
                if (IS_IMM) {   /* immediate */
2801
                    simm = GET_FIELDs(insn, 20, 31);
2802
                    if (insn & (1 << 12)) {
2803
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2804
                    } else {
2805
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2806
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2807
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2808
                    }
2809
                } else {                /* register */
2810
                    rs2 = GET_FIELD(insn, 27, 31);
2811
                    gen_movl_reg_TN(rs2, cpu_src2);
2812
                    if (insn & (1 << 12)) {
2813
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2814
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2815
                    } else {
2816
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2817
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2818
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2819
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2820
                    }
2821
                }
2822
                gen_movl_TN_reg(rd, cpu_dst);
2823
#endif
2824
            } else if (xop < 0x36) {
2825
                if (xop < 0x20) {
2826
                    cpu_src1 = get_src1(insn, cpu_src1);
2827
                    cpu_src2 = get_src2(insn, cpu_src2);
2828
                    switch (xop & ~0x10) {
2829
                    case 0x0: /* add */
2830
                        if (IS_IMM) {
2831
                            simm = GET_FIELDs(insn, 19, 31);
2832
                            if (xop & 0x10) {
2833
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2834
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2835
                                dc->cc_op = CC_OP_ADD;
2836
                            } else {
2837
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2838
                            }
2839
                        } else {
2840
                            if (xop & 0x10) {
2841
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2842
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2843
                                dc->cc_op = CC_OP_ADD;
2844
                            } else {
2845
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2846
                            }
2847
                        }
2848
                        break;
2849
                    case 0x1: /* and */
2850
                        if (IS_IMM) {
2851
                            simm = GET_FIELDs(insn, 19, 31);
2852
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2853
                        } else {
2854
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2855
                        }
2856
                        if (xop & 0x10) {
2857
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2858
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2859
                            dc->cc_op = CC_OP_LOGIC;
2860
                        }
2861
                        break;
2862
                    case 0x2: /* or */
2863
                        if (IS_IMM) {
2864
                            simm = GET_FIELDs(insn, 19, 31);
2865
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2866
                        } else {
2867
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2868
                        }
2869
                        if (xop & 0x10) {
2870
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2871
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2872
                            dc->cc_op = CC_OP_LOGIC;
2873
                        }
2874
                        break;
2875
                    case 0x3: /* xor */
2876
                        if (IS_IMM) {
2877
                            simm = GET_FIELDs(insn, 19, 31);
2878
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2879
                        } else {
2880
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2881
                        }
2882
                        if (xop & 0x10) {
2883
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2884
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2885
                            dc->cc_op = CC_OP_LOGIC;
2886
                        }
2887
                        break;
2888
                    case 0x4: /* sub */
2889
                        if (IS_IMM) {
2890
                            simm = GET_FIELDs(insn, 19, 31);
2891
                            if (xop & 0x10) {
2892
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2893
                            } else {
2894
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2895
                            }
2896
                        } else {
2897
                            if (xop & 0x10) {
2898
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2899
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2900
                                dc->cc_op = CC_OP_SUB;
2901
                            } else {
2902
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2903
                            }
2904
                        }
2905
                        break;
2906
                    case 0x5: /* andn */
2907
                        if (IS_IMM) {
2908
                            simm = GET_FIELDs(insn, 19, 31);
2909
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
2910
                        } else {
2911
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2912
                        }
2913
                        if (xop & 0x10) {
2914
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2915
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2916
                            dc->cc_op = CC_OP_LOGIC;
2917
                        }
2918
                        break;
2919
                    case 0x6: /* orn */
2920
                        if (IS_IMM) {
2921
                            simm = GET_FIELDs(insn, 19, 31);
2922
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
2923
                        } else {
2924
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2925
                        }
2926
                        if (xop & 0x10) {
2927
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2928
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2929
                            dc->cc_op = CC_OP_LOGIC;
2930
                        }
2931
                        break;
2932
                    case 0x7: /* xorn */
2933
                        if (IS_IMM) {
2934
                            simm = GET_FIELDs(insn, 19, 31);
2935
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
2936
                        } else {
2937
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
2938
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2939
                        }
2940
                        if (xop & 0x10) {
2941
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2942
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2943
                            dc->cc_op = CC_OP_LOGIC;
2944
                        }
2945
                        break;
2946
                    case 0x8: /* addx, V9 addc */
2947
                        if (IS_IMM) {
2948
                            simm = GET_FIELDs(insn, 19, 31);
2949
                            if (xop & 0x10) {
2950
                                gen_helper_compute_psr();
2951
                                gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
2952
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2953
                                dc->cc_op = CC_OP_ADDX;
2954
                            } else {
2955
                                gen_helper_compute_psr();
2956
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
2957
                                tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2958
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2959
                            }
2960
                        } else {
2961
                            if (xop & 0x10) {
2962
                                gen_helper_compute_psr();
2963
                                gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2964
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2965
                                dc->cc_op = CC_OP_ADDX;
2966
                            } else {
2967
                                gen_helper_compute_psr();
2968
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
2969
                                tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2970
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2971
                            }
2972
                        }
2973
                        break;
2974
#ifdef TARGET_SPARC64
2975
                    case 0x9: /* V9 mulx */
2976
                        if (IS_IMM) {
2977
                            simm = GET_FIELDs(insn, 19, 31);
2978
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
2979
                        } else {
2980
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2981
                        }
2982
                        break;
2983
#endif
2984
                    case 0xa: /* umul */
2985
                        CHECK_IU_FEATURE(dc, MUL);
2986
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2987
                        if (xop & 0x10) {
2988
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2989
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2990
                            dc->cc_op = CC_OP_LOGIC;
2991
                        }
2992
                        break;
2993
                    case 0xb: /* smul */
2994
                        CHECK_IU_FEATURE(dc, MUL);
2995
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
2996
                        if (xop & 0x10) {
2997
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2998
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2999
                            dc->cc_op = CC_OP_LOGIC;
3000
                        }
3001
                        break;
3002
                    case 0xc: /* subx, V9 subc */
3003
                        if (IS_IMM) {
3004
                            simm = GET_FIELDs(insn, 19, 31);
3005
                            if (xop & 0x10) {
3006
                                gen_helper_compute_psr();
3007
                                gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3008
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3009
                                dc->cc_op = CC_OP_SUBX;
3010
                            } else {
3011
                                gen_helper_compute_psr();
3012
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3013
                                tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3014
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3015
                            }
3016
                        } else {
3017
                            if (xop & 0x10) {
3018
                                gen_helper_compute_psr();
3019
                                gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3020
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3021
                                dc->cc_op = CC_OP_SUBX;
3022
                            } else {
3023
                                gen_helper_compute_psr();
3024
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3025
                                tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3026
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3027
                            }
3028
                        }
3029
                        break;
3030
#ifdef TARGET_SPARC64
3031
                    case 0xd: /* V9 udivx */
3032
                        tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3033
                        tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3034
                        gen_trap_ifdivzero_tl(cpu_cc_src2);
3035
                        tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3036
                        break;
3037
#endif
3038
                    case 0xe: /* udiv */
3039
                        CHECK_IU_FEATURE(dc, DIV);
3040
                        gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3041
                        if (xop & 0x10) {
3042
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3043
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3044
                            dc->cc_op = CC_OP_DIV;
3045
                        }
3046
                        break;
3047
                    case 0xf: /* sdiv */
3048
                        CHECK_IU_FEATURE(dc, DIV);
3049
                        gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3050
                        if (xop & 0x10) {
3051
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3052
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3053
                            dc->cc_op = CC_OP_DIV;
3054
                        }
3055
                        break;
3056
                    default:
3057
                        goto illegal_insn;
3058
                    }
3059
                    gen_movl_TN_reg(rd, cpu_dst);
3060
                } else {
3061
                    cpu_src1 = get_src1(insn, cpu_src1);
3062
                    cpu_src2 = get_src2(insn, cpu_src2);
3063
                    switch (xop) {
3064
                    case 0x20: /* taddcc */
3065
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3066
                        gen_movl_TN_reg(rd, cpu_dst);
3067
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3068
                        dc->cc_op = CC_OP_TADD;
3069
                        break;
3070
                    case 0x21: /* tsubcc */
3071
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3072
                        gen_movl_TN_reg(rd, cpu_dst);
3073
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3074
                        dc->cc_op = CC_OP_TSUB;
3075
                        break;
3076
                    case 0x22: /* taddcctv */
3077
                        save_state(dc, cpu_cond);
3078
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3079
                        gen_movl_TN_reg(rd, cpu_dst);
3080
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3081
                        dc->cc_op = CC_OP_TADDTV;
3082
                        break;
3083
                    case 0x23: /* tsubcctv */
3084
                        save_state(dc, cpu_cond);
3085
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3086
                        gen_movl_TN_reg(rd, cpu_dst);
3087
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3088
                        dc->cc_op = CC_OP_TSUBTV;
3089
                        break;
3090
                    case 0x24: /* mulscc */
3091
                        gen_helper_compute_psr();
3092
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3093
                        gen_movl_TN_reg(rd, cpu_dst);
3094
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3095
                        dc->cc_op = CC_OP_ADD;
3096
                        break;
3097
#ifndef TARGET_SPARC64
3098
                    case 0x25:  /* sll */
3099
                        if (IS_IMM) { /* immediate */
3100
                            simm = GET_FIELDs(insn, 20, 31);
3101
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3102
                        } else { /* register */
3103
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3104
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3105
                        }
3106
                        gen_movl_TN_reg(rd, cpu_dst);
3107
                        break;
3108
                    case 0x26:  /* srl */
3109
                        if (IS_IMM) { /* immediate */
3110
                            simm = GET_FIELDs(insn, 20, 31);
3111
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3112
                        } else { /* register */
3113
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3114
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3115
                        }
3116
                        gen_movl_TN_reg(rd, cpu_dst);
3117
                        break;
3118
                    case 0x27:  /* sra */
3119
                        if (IS_IMM) { /* immediate */
3120
                            simm = GET_FIELDs(insn, 20, 31);
3121
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3122
                        } else { /* register */
3123
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3124
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3125
                        }
3126
                        gen_movl_TN_reg(rd, cpu_dst);
3127
                        break;
3128
#endif
3129
                    case 0x30:
3130
                        {
3131
                            switch(rd) {
3132
                            case 0: /* wry */
3133
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3134
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3135
                                break;
3136
#ifndef TARGET_SPARC64
3137
                            case 0x01 ... 0x0f: /* undefined in the
3138
                                                   SPARCv8 manual, nop
3139
                                                   on the microSPARC
3140
                                                   II */
3141
                            case 0x10 ... 0x1f: /* implementation-dependent
3142
                                                   in the SPARCv8
3143
                                                   manual, nop on the
3144
                                                   microSPARC II */
3145
                                break;
3146
#else
3147
                            case 0x2: /* V9 wrccr */
3148
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3149
                                gen_helper_wrccr(cpu_dst);
3150
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3151
                                dc->cc_op = CC_OP_FLAGS;
3152
                                break;
3153
                            case 0x3: /* V9 wrasi */
3154
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3155
                                tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3156
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3157
                                break;
3158
                            case 0x6: /* V9 wrfprs */
3159
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3160
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3161
                                save_state(dc, cpu_cond);
3162
                                gen_op_next_insn();
3163
                                tcg_gen_exit_tb(0);
3164
                                dc->is_br = 1;
3165
                                break;
3166
                            case 0xf: /* V9 sir, nop if user */
3167
#if !defined(CONFIG_USER_ONLY)
3168
                                if (supervisor(dc))
3169
                                    ; // XXX
3170
#endif
3171
                                break;
3172
                            case 0x13: /* Graphics Status */
3173
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3174
                                    goto jmp_insn;
3175
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3176
                                break;
3177
                            case 0x14: /* Softint set */
3178
                                if (!supervisor(dc))
3179
                                    goto illegal_insn;
3180
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3181
                                gen_helper_set_softint(cpu_tmp64);
3182
                                break;
3183
                            case 0x15: /* Softint clear */
3184
                                if (!supervisor(dc))
3185
                                    goto illegal_insn;
3186
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3187
                                gen_helper_clear_softint(cpu_tmp64);
3188
                                break;
3189
                            case 0x16: /* Softint write */
3190
                                if (!supervisor(dc))
3191
                                    goto illegal_insn;
3192
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3193
                                gen_helper_write_softint(cpu_tmp64);
3194
                                break;
3195
                            case 0x17: /* Tick compare */
3196
#if !defined(CONFIG_USER_ONLY)
3197
                                if (!supervisor(dc))
3198
                                    goto illegal_insn;
3199
#endif
3200
                                {
3201
                                    TCGv_ptr r_tickptr;
3202

    
3203
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3204
                                                   cpu_src2);
3205
                                    r_tickptr = tcg_temp_new_ptr();
3206
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3207
                                                   offsetof(CPUState, tick));
3208
                                    gen_helper_tick_set_limit(r_tickptr,
3209
                                                              cpu_tick_cmpr);
3210
                                    tcg_temp_free_ptr(r_tickptr);
3211
                                }
3212
                                break;
3213
                            case 0x18: /* System tick */
3214
#if !defined(CONFIG_USER_ONLY)
3215
                                if (!supervisor(dc))
3216
                                    goto illegal_insn;
3217
#endif
3218
                                {
3219
                                    TCGv_ptr r_tickptr;
3220

    
3221
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3222
                                                   cpu_src2);
3223
                                    r_tickptr = tcg_temp_new_ptr();
3224
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3225
                                                   offsetof(CPUState, stick));
3226
                                    gen_helper_tick_set_count(r_tickptr,
3227
                                                              cpu_dst);
3228
                                    tcg_temp_free_ptr(r_tickptr);
3229
                                }
3230
                                break;
3231
                            case 0x19: /* System tick compare */
3232
#if !defined(CONFIG_USER_ONLY)
3233
                                if (!supervisor(dc))
3234
                                    goto illegal_insn;
3235
#endif
3236
                                {
3237
                                    TCGv_ptr r_tickptr;
3238

    
3239
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3240
                                                   cpu_src2);
3241
                                    r_tickptr = tcg_temp_new_ptr();
3242
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3243
                                                   offsetof(CPUState, stick));
3244
                                    gen_helper_tick_set_limit(r_tickptr,
3245
                                                              cpu_stick_cmpr);
3246
                                    tcg_temp_free_ptr(r_tickptr);
3247
                                }
3248
                                break;
3249

    
3250
                            case 0x10: /* Performance Control */
3251
                            case 0x11: /* Performance Instrumentation
3252
                                          Counter */
3253
                            case 0x12: /* Dispatch Control */
3254
#endif
3255
                            default:
3256
                                goto illegal_insn;
3257
                            }
3258
                        }
3259
                        break;
3260
#if !defined(CONFIG_USER_ONLY)
3261
                    case 0x31: /* wrpsr, V9 saved, restored */
3262
                        {
3263
                            if (!supervisor(dc))
3264
                                goto priv_insn;
3265
#ifdef TARGET_SPARC64
3266
                            switch (rd) {
3267
                            case 0:
3268
                                gen_helper_saved();
3269
                                break;
3270
                            case 1:
3271
                                gen_helper_restored();
3272
                                break;
3273
                            case 2: /* UA2005 allclean */
3274
                            case 3: /* UA2005 otherw */
3275
                            case 4: /* UA2005 normalw */
3276
                            case 5: /* UA2005 invalw */
3277
                                // XXX
3278
                            default:
3279
                                goto illegal_insn;
3280
                            }
3281
#else
3282
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3283
                            gen_helper_wrpsr(cpu_dst);
3284
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3285
                            dc->cc_op = CC_OP_FLAGS;
3286
                            save_state(dc, cpu_cond);
3287
                            gen_op_next_insn();
3288
                            tcg_gen_exit_tb(0);
3289
                            dc->is_br = 1;
3290
#endif
3291
                        }
3292
                        break;
3293
                    case 0x32: /* wrwim, V9 wrpr */
3294
                        {
3295
                            if (!supervisor(dc))
3296
                                goto priv_insn;
3297
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3298
#ifdef TARGET_SPARC64
3299
                            switch (rd) {
3300
                            case 0: // tpc
3301
                                {
3302
                                    TCGv_ptr r_tsptr;
3303

    
3304
                                    r_tsptr = tcg_temp_new_ptr();
3305
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3306
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3307
                                                  offsetof(trap_state, tpc));
3308
                                    tcg_temp_free_ptr(r_tsptr);
3309
                                }
3310
                                break;
3311
                            case 1: // tnpc
3312
                                {
3313
                                    TCGv_ptr r_tsptr;
3314

    
3315
                                    r_tsptr = tcg_temp_new_ptr();
3316
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3317
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3318
                                                  offsetof(trap_state, tnpc));
3319
                                    tcg_temp_free_ptr(r_tsptr);
3320
                                }
3321
                                break;
3322
                            case 2: // tstate
3323
                                {
3324
                                    TCGv_ptr r_tsptr;
3325

    
3326
                                    r_tsptr = tcg_temp_new_ptr();
3327
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3328
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3329
                                                  offsetof(trap_state,
3330
                                                           tstate));
3331
                                    tcg_temp_free_ptr(r_tsptr);
3332
                                }
3333
                                break;
3334
                            case 3: // tt
3335
                                {
3336
                                    TCGv_ptr r_tsptr;
3337

    
3338
                                    r_tsptr = tcg_temp_new_ptr();
3339
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3340
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3341
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3342
                                                   offsetof(trap_state, tt));
3343
                                    tcg_temp_free_ptr(r_tsptr);
3344
                                }
3345
                                break;
3346
                            case 4: // tick
3347
                                {
3348
                                    TCGv_ptr r_tickptr;
3349

    
3350
                                    r_tickptr = tcg_temp_new_ptr();
3351
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3352
                                                   offsetof(CPUState, tick));
3353
                                    gen_helper_tick_set_count(r_tickptr,
3354
                                                              cpu_tmp0);
3355
                                    tcg_temp_free_ptr(r_tickptr);
3356
                                }
3357
                                break;
3358
                            case 5: // tba
3359
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3360
                                break;
3361
                            case 6: // pstate
3362
                                save_state(dc, cpu_cond);
3363
                                gen_helper_wrpstate(cpu_tmp0);
3364
                                gen_op_next_insn();
3365
                                tcg_gen_exit_tb(0);
3366
                                dc->is_br = 1;
3367
                                break;
3368
                            case 7: // tl
3369
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3370
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3371
                                               offsetof(CPUSPARCState, tl));
3372
                                break;
3373
                            case 8: // pil
3374
                                gen_helper_wrpil(cpu_tmp0);
3375
                                break;
3376
                            case 9: // cwp
3377
                                gen_helper_wrcwp(cpu_tmp0);
3378
                                break;
3379
                            case 10: // cansave
3380
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3381
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3382
                                               offsetof(CPUSPARCState,
3383
                                                        cansave));
3384
                                break;
3385
                            case 11: // canrestore
3386
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3387
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3388
                                               offsetof(CPUSPARCState,
3389
                                                        canrestore));
3390
                                break;
3391
                            case 12: // cleanwin
3392
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3393
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3394
                                               offsetof(CPUSPARCState,
3395
                                                        cleanwin));
3396
                                break;
3397
                            case 13: // otherwin
3398
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3399
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3400
                                               offsetof(CPUSPARCState,
3401
                                                        otherwin));
3402
                                break;
3403
                            case 14: // wstate
3404
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3405
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3406
                                               offsetof(CPUSPARCState,
3407
                                                        wstate));
3408
                                break;
3409
                            case 16: // UA2005 gl
3410
                                CHECK_IU_FEATURE(dc, GL);
3411
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3412
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3413
                                               offsetof(CPUSPARCState, gl));
3414
                                break;
3415
                            case 26: // UA2005 strand status
3416
                                CHECK_IU_FEATURE(dc, HYPV);
3417
                                if (!hypervisor(dc))
3418
                                    goto priv_insn;
3419
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3420
                                break;
3421
                            default:
3422
                                goto illegal_insn;
3423
                            }
3424
#else
3425
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3426
                            if (dc->def->nwindows != 32)
3427
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3428
                                                (1 << dc->def->nwindows) - 1);
3429
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3430
#endif
3431
                        }
3432
                        break;
3433
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3434
                        {
3435
#ifndef TARGET_SPARC64
3436
                            if (!supervisor(dc))
3437
                                goto priv_insn;
3438
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3439
#else
3440
                            CHECK_IU_FEATURE(dc, HYPV);
3441
                            if (!hypervisor(dc))
3442
                                goto priv_insn;
3443
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3444
                            switch (rd) {
3445
                            case 0: // hpstate
3446
                                // XXX gen_op_wrhpstate();
3447
                                save_state(dc, cpu_cond);
3448
                                gen_op_next_insn();
3449
                                tcg_gen_exit_tb(0);
3450
                                dc->is_br = 1;
3451
                                break;
3452
                            case 1: // htstate
3453
                                // XXX gen_op_wrhtstate();
3454
                                break;
3455
                            case 3: // hintp
3456
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3457
                                break;
3458
                            case 5: // htba
3459
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3460
                                break;
3461
                            case 31: // hstick_cmpr
3462
                                {
3463
                                    TCGv_ptr r_tickptr;
3464

    
3465
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3466
                                    r_tickptr = tcg_temp_new_ptr();
3467
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3468
                                                   offsetof(CPUState, hstick));
3469
                                    gen_helper_tick_set_limit(r_tickptr,
3470
                                                              cpu_hstick_cmpr);
3471
                                    tcg_temp_free_ptr(r_tickptr);
3472
                                }
3473
                                break;
3474
                            case 6: // hver readonly
3475
                            default:
3476
                                goto illegal_insn;
3477
                            }
3478
#endif
3479
                        }
3480
                        break;
3481
#endif
3482
#ifdef TARGET_SPARC64
3483
                    case 0x2c: /* V9 movcc */
3484
                        {
3485
                            int cc = GET_FIELD_SP(insn, 11, 12);
3486
                            int cond = GET_FIELD_SP(insn, 14, 17);
3487
                            TCGv r_cond;
3488
                            int l1;
3489

    
3490
                            r_cond = tcg_temp_new();
3491
                            if (insn & (1 << 18)) {
3492
                                if (cc == 0)
3493
                                    gen_cond(r_cond, 0, cond, dc);
3494
                                else if (cc == 2)
3495
                                    gen_cond(r_cond, 1, cond, dc);
3496
                                else
3497
                                    goto illegal_insn;
3498
                            } else {
3499
                                gen_fcond(r_cond, cc, cond);
3500
                            }
3501

    
3502
                            l1 = gen_new_label();
3503

    
3504
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3505
                            if (IS_IMM) {       /* immediate */
3506
                                TCGv r_const;
3507

    
3508
                                simm = GET_FIELD_SPs(insn, 0, 10);
3509
                                r_const = tcg_const_tl(simm);
3510
                                gen_movl_TN_reg(rd, r_const);
3511
                                tcg_temp_free(r_const);
3512
                            } else {
3513
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3514
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3515
                                gen_movl_TN_reg(rd, cpu_tmp0);
3516
                            }
3517
                            gen_set_label(l1);
3518
                            tcg_temp_free(r_cond);
3519
                            break;
3520
                        }
3521
                    case 0x2d: /* V9 sdivx */
3522
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3523
                        gen_movl_TN_reg(rd, cpu_dst);
3524
                        break;
3525
                    case 0x2e: /* V9 popc */
3526
                        {
3527
                            cpu_src2 = get_src2(insn, cpu_src2);
3528
                            gen_helper_popc(cpu_dst, cpu_src2);
3529
                            gen_movl_TN_reg(rd, cpu_dst);
3530
                        }
3531
                    case 0x2f: /* V9 movr */
3532
                        {
3533
                            int cond = GET_FIELD_SP(insn, 10, 12);
3534
                            int l1;
3535

    
3536
                            cpu_src1 = get_src1(insn, cpu_src1);
3537

    
3538
                            l1 = gen_new_label();
3539

    
3540
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3541
                                              cpu_src1, 0, l1);
3542
                            if (IS_IMM) {       /* immediate */
3543
                                TCGv r_const;
3544

    
3545
                                simm = GET_FIELD_SPs(insn, 0, 9);
3546
                                r_const = tcg_const_tl(simm);
3547
                                gen_movl_TN_reg(rd, r_const);
3548
                                tcg_temp_free(r_const);
3549
                            } else {
3550
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3551
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3552
                                gen_movl_TN_reg(rd, cpu_tmp0);
3553
                            }
3554
                            gen_set_label(l1);
3555
                            break;
3556
                        }
3557
#endif
3558
                    default:
3559
                        goto illegal_insn;
3560
                    }
3561
                }
3562
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3563
#ifdef TARGET_SPARC64
3564
                int opf = GET_FIELD_SP(insn, 5, 13);
3565
                rs1 = GET_FIELD(insn, 13, 17);
3566
                rs2 = GET_FIELD(insn, 27, 31);
3567
                if (gen_trap_ifnofpu(dc, cpu_cond))
3568
                    goto jmp_insn;
3569

    
3570
                switch (opf) {
3571
                case 0x000: /* VIS I edge8cc */
3572
                case 0x001: /* VIS II edge8n */
3573
                case 0x002: /* VIS I edge8lcc */
3574
                case 0x003: /* VIS II edge8ln */
3575
                case 0x004: /* VIS I edge16cc */
3576
                case 0x005: /* VIS II edge16n */
3577
                case 0x006: /* VIS I edge16lcc */
3578
                case 0x007: /* VIS II edge16ln */
3579
                case 0x008: /* VIS I edge32cc */
3580
                case 0x009: /* VIS II edge32n */
3581
                case 0x00a: /* VIS I edge32lcc */
3582
                case 0x00b: /* VIS II edge32ln */
3583
                    // XXX
3584
                    goto illegal_insn;
3585
                case 0x010: /* VIS I array8 */
3586
                    CHECK_FPU_FEATURE(dc, VIS1);
3587
                    cpu_src1 = get_src1(insn, cpu_src1);
3588
                    gen_movl_reg_TN(rs2, cpu_src2);
3589
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3590
                    gen_movl_TN_reg(rd, cpu_dst);
3591
                    break;
3592
                case 0x012: /* VIS I array16 */
3593
                    CHECK_FPU_FEATURE(dc, VIS1);
3594
                    cpu_src1 = get_src1(insn, cpu_src1);
3595
                    gen_movl_reg_TN(rs2, cpu_src2);
3596
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3597
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3598
                    gen_movl_TN_reg(rd, cpu_dst);
3599
                    break;
3600
                case 0x014: /* VIS I array32 */
3601
                    CHECK_FPU_FEATURE(dc, VIS1);
3602
                    cpu_src1 = get_src1(insn, cpu_src1);
3603
                    gen_movl_reg_TN(rs2, cpu_src2);
3604
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3605
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3606
                    gen_movl_TN_reg(rd, cpu_dst);
3607
                    break;
3608
                case 0x018: /* VIS I alignaddr */
3609
                    CHECK_FPU_FEATURE(dc, VIS1);
3610
                    cpu_src1 = get_src1(insn, cpu_src1);
3611
                    gen_movl_reg_TN(rs2, cpu_src2);
3612
                    gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3613
                    gen_movl_TN_reg(rd, cpu_dst);
3614
                    break;
3615
                case 0x019: /* VIS II bmask */
3616
                case 0x01a: /* VIS I alignaddrl */
3617
                    // XXX
3618
                    goto illegal_insn;
3619
                case 0x020: /* VIS I fcmple16 */
3620
                    CHECK_FPU_FEATURE(dc, VIS1);
3621
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3622
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3623
                    gen_helper_fcmple16();
3624
                    gen_op_store_DT0_fpr(DFPREG(rd));
3625
                    break;
3626
                case 0x022: /* VIS I fcmpne16 */
3627
                    CHECK_FPU_FEATURE(dc, VIS1);
3628
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3629
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3630
                    gen_helper_fcmpne16();
3631
                    gen_op_store_DT0_fpr(DFPREG(rd));
3632
                    break;
3633
                case 0x024: /* VIS I fcmple32 */
3634
                    CHECK_FPU_FEATURE(dc, VIS1);
3635
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3636
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3637
                    gen_helper_fcmple32();
3638
                    gen_op_store_DT0_fpr(DFPREG(rd));
3639
                    break;
3640
                case 0x026: /* VIS I fcmpne32 */
3641
                    CHECK_FPU_FEATURE(dc, VIS1);
3642
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3643
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3644
                    gen_helper_fcmpne32();
3645
                    gen_op_store_DT0_fpr(DFPREG(rd));
3646
                    break;
3647
                case 0x028: /* VIS I fcmpgt16 */
3648
                    CHECK_FPU_FEATURE(dc, VIS1);
3649
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3650
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3651
                    gen_helper_fcmpgt16();
3652
                    gen_op_store_DT0_fpr(DFPREG(rd));
3653
                    break;
3654
                case 0x02a: /* VIS I fcmpeq16 */
3655
                    CHECK_FPU_FEATURE(dc, VIS1);
3656
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3657
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3658
                    gen_helper_fcmpeq16();
3659
                    gen_op_store_DT0_fpr(DFPREG(rd));
3660
                    break;
3661
                case 0x02c: /* VIS I fcmpgt32 */
3662
                    CHECK_FPU_FEATURE(dc, VIS1);
3663
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3664
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3665
                    gen_helper_fcmpgt32();
3666
                    gen_op_store_DT0_fpr(DFPREG(rd));
3667
                    break;
3668
                case 0x02e: /* VIS I fcmpeq32 */
3669
                    CHECK_FPU_FEATURE(dc, VIS1);
3670
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3671
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3672
                    gen_helper_fcmpeq32();
3673
                    gen_op_store_DT0_fpr(DFPREG(rd));
3674
                    break;
3675
                case 0x031: /* VIS I fmul8x16 */
3676
                    CHECK_FPU_FEATURE(dc, VIS1);
3677
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3678
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3679
                    gen_helper_fmul8x16();
3680
                    gen_op_store_DT0_fpr(DFPREG(rd));
3681
                    break;
3682
                case 0x033: /* VIS I fmul8x16au */
3683
                    CHECK_FPU_FEATURE(dc, VIS1);
3684
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3685
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3686
                    gen_helper_fmul8x16au();
3687
                    gen_op_store_DT0_fpr(DFPREG(rd));
3688
                    break;
3689
                case 0x035: /* VIS I fmul8x16al */
3690
                    CHECK_FPU_FEATURE(dc, VIS1);
3691
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3692
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3693
                    gen_helper_fmul8x16al();
3694
                    gen_op_store_DT0_fpr(DFPREG(rd));
3695
                    break;
3696
                case 0x036: /* VIS I fmul8sux16 */
3697
                    CHECK_FPU_FEATURE(dc, VIS1);
3698
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3699
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3700
                    gen_helper_fmul8sux16();
3701
                    gen_op_store_DT0_fpr(DFPREG(rd));
3702
                    break;
3703
                case 0x037: /* VIS I fmul8ulx16 */
3704
                    CHECK_FPU_FEATURE(dc, VIS1);
3705
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3706
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3707
                    gen_helper_fmul8ulx16();
3708
                    gen_op_store_DT0_fpr(DFPREG(rd));
3709
                    break;
3710
                case 0x038: /* VIS I fmuld8sux16 */
3711
                    CHECK_FPU_FEATURE(dc, VIS1);
3712
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3713
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3714
                    gen_helper_fmuld8sux16();
3715
                    gen_op_store_DT0_fpr(DFPREG(rd));
3716
                    break;
3717
                case 0x039: /* VIS I fmuld8ulx16 */
3718
                    CHECK_FPU_FEATURE(dc, VIS1);
3719
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3720
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3721
                    gen_helper_fmuld8ulx16();
3722
                    gen_op_store_DT0_fpr(DFPREG(rd));
3723
                    break;
3724
                case 0x03a: /* VIS I fpack32 */
3725
                case 0x03b: /* VIS I fpack16 */
3726
                case 0x03d: /* VIS I fpackfix */
3727
                case 0x03e: /* VIS I pdist */
3728
                    // XXX
3729
                    goto illegal_insn;
3730
                case 0x048: /* VIS I faligndata */
3731
                    CHECK_FPU_FEATURE(dc, VIS1);
3732
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3733
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3734
                    gen_helper_faligndata();
3735
                    gen_op_store_DT0_fpr(DFPREG(rd));
3736
                    break;
3737
                case 0x04b: /* VIS I fpmerge */
3738
                    CHECK_FPU_FEATURE(dc, VIS1);
3739
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3740
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3741
                    gen_helper_fpmerge();
3742
                    gen_op_store_DT0_fpr(DFPREG(rd));
3743
                    break;
3744
                case 0x04c: /* VIS II bshuffle */
3745
                    // XXX
3746
                    goto illegal_insn;
3747
                case 0x04d: /* VIS I fexpand */
3748
                    CHECK_FPU_FEATURE(dc, VIS1);
3749
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3750
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3751
                    gen_helper_fexpand();
3752
                    gen_op_store_DT0_fpr(DFPREG(rd));
3753
                    break;
3754
                case 0x050: /* VIS I fpadd16 */
3755
                    CHECK_FPU_FEATURE(dc, VIS1);
3756
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3757
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3758
                    gen_helper_fpadd16();
3759
                    gen_op_store_DT0_fpr(DFPREG(rd));
3760
                    break;
3761
                case 0x051: /* VIS I fpadd16s */
3762
                    CHECK_FPU_FEATURE(dc, VIS1);
3763
                    gen_helper_fpadd16s(cpu_fpr[rd],
3764
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3765
                    break;
3766
                case 0x052: /* VIS I fpadd32 */
3767
                    CHECK_FPU_FEATURE(dc, VIS1);
3768
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3769
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3770
                    gen_helper_fpadd32();
3771
                    gen_op_store_DT0_fpr(DFPREG(rd));
3772
                    break;
3773
                case 0x053: /* VIS I fpadd32s */
3774
                    CHECK_FPU_FEATURE(dc, VIS1);
3775
                    gen_helper_fpadd32s(cpu_fpr[rd],
3776
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3777
                    break;
3778
                case 0x054: /* VIS I fpsub16 */
3779
                    CHECK_FPU_FEATURE(dc, VIS1);
3780
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3781
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3782
                    gen_helper_fpsub16();
3783
                    gen_op_store_DT0_fpr(DFPREG(rd));
3784
                    break;
3785
                case 0x055: /* VIS I fpsub16s */
3786
                    CHECK_FPU_FEATURE(dc, VIS1);
3787
                    gen_helper_fpsub16s(cpu_fpr[rd],
3788
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3789
                    break;
3790
                case 0x056: /* VIS I fpsub32 */
3791
                    CHECK_FPU_FEATURE(dc, VIS1);
3792
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3793
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3794
                    gen_helper_fpsub32();
3795
                    gen_op_store_DT0_fpr(DFPREG(rd));
3796
                    break;
3797
                case 0x057: /* VIS I fpsub32s */
3798
                    CHECK_FPU_FEATURE(dc, VIS1);
3799
                    gen_helper_fpsub32s(cpu_fpr[rd],
3800
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3801
                    break;
3802
                case 0x060: /* VIS I fzero */
3803
                    CHECK_FPU_FEATURE(dc, VIS1);
3804
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3805
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3806
                    break;
3807
                case 0x061: /* VIS I fzeros */
3808
                    CHECK_FPU_FEATURE(dc, VIS1);
3809
                    tcg_gen_movi_i32(cpu_fpr[rd], 0);
3810
                    break;
3811
                case 0x062: /* VIS I fnor */
3812
                    CHECK_FPU_FEATURE(dc, VIS1);
3813
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3814
                                    cpu_fpr[DFPREG(rs2)]);
3815
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3816
                                    cpu_fpr[DFPREG(rs2) + 1]);
3817
                    break;
3818
                case 0x063: /* VIS I fnors */
3819
                    CHECK_FPU_FEATURE(dc, VIS1);
3820
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3821
                    break;
3822
                case 0x064: /* VIS I fandnot2 */
3823
                    CHECK_FPU_FEATURE(dc, VIS1);
3824
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3825
                                     cpu_fpr[DFPREG(rs2)]);
3826
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3827
                                     cpu_fpr[DFPREG(rs1) + 1],
3828
                                     cpu_fpr[DFPREG(rs2) + 1]);
3829
                    break;
3830
                case 0x065: /* VIS I fandnot2s */
3831
                    CHECK_FPU_FEATURE(dc, VIS1);
3832
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3833
                    break;
3834
                case 0x066: /* VIS I fnot2 */
3835
                    CHECK_FPU_FEATURE(dc, VIS1);
3836
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3837
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3838
                                    cpu_fpr[DFPREG(rs2) + 1]);
3839
                    break;
3840
                case 0x067: /* VIS I fnot2s */
3841
                    CHECK_FPU_FEATURE(dc, VIS1);
3842
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3843
                    break;
3844
                case 0x068: /* VIS I fandnot1 */
3845
                    CHECK_FPU_FEATURE(dc, VIS1);
3846
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3847
                                     cpu_fpr[DFPREG(rs1)]);
3848
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3849
                                     cpu_fpr[DFPREG(rs2) + 1],
3850
                                     cpu_fpr[DFPREG(rs1) + 1]);
3851
                    break;
3852
                case 0x069: /* VIS I fandnot1s */
3853
                    CHECK_FPU_FEATURE(dc, VIS1);
3854
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3855
                    break;
3856
                case 0x06a: /* VIS I fnot1 */
3857
                    CHECK_FPU_FEATURE(dc, VIS1);
3858
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3859
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3860
                                    cpu_fpr[DFPREG(rs1) + 1]);
3861
                    break;
3862
                case 0x06b: /* VIS I fnot1s */
3863
                    CHECK_FPU_FEATURE(dc, VIS1);
3864
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3865
                    break;
3866
                case 0x06c: /* VIS I fxor */
3867
                    CHECK_FPU_FEATURE(dc, VIS1);
3868
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3869
                                    cpu_fpr[DFPREG(rs2)]);
3870
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3871
                                    cpu_fpr[DFPREG(rs1) + 1],
3872
                                    cpu_fpr[DFPREG(rs2) + 1]);
3873
                    break;
3874
                case 0x06d: /* VIS I fxors */
3875
                    CHECK_FPU_FEATURE(dc, VIS1);
3876
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3877
                    break;
3878
                case 0x06e: /* VIS I fnand */
3879
                    CHECK_FPU_FEATURE(dc, VIS1);
3880
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3881
                                     cpu_fpr[DFPREG(rs2)]);
3882
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3883
                                     cpu_fpr[DFPREG(rs2) + 1]);
3884
                    break;
3885
                case 0x06f: /* VIS I fnands */
3886
                    CHECK_FPU_FEATURE(dc, VIS1);
3887
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3888
                    break;
3889
                case 0x070: /* VIS I fand */
3890
                    CHECK_FPU_FEATURE(dc, VIS1);
3891
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3892
                                    cpu_fpr[DFPREG(rs2)]);
3893
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3894
                                    cpu_fpr[DFPREG(rs1) + 1],
3895
                                    cpu_fpr[DFPREG(rs2) + 1]);
3896
                    break;
3897
                case 0x071: /* VIS I fands */
3898
                    CHECK_FPU_FEATURE(dc, VIS1);
3899
                    tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3900
                    break;
3901
                case 0x072: /* VIS I fxnor */
3902
                    CHECK_FPU_FEATURE(dc, VIS1);
3903
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3904
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3905
                                    cpu_fpr[DFPREG(rs1)]);
3906
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3907
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3908
                                    cpu_fpr[DFPREG(rs1) + 1]);
3909
                    break;
3910
                case 0x073: /* VIS I fxnors */
3911
                    CHECK_FPU_FEATURE(dc, VIS1);
3912
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3913
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3914
                    break;
3915
                case 0x074: /* VIS I fsrc1 */
3916
                    CHECK_FPU_FEATURE(dc, VIS1);
3917
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3918
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3919
                                    cpu_fpr[DFPREG(rs1) + 1]);
3920
                    break;
3921
                case 0x075: /* VIS I fsrc1s */
3922
                    CHECK_FPU_FEATURE(dc, VIS1);
3923
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3924
                    break;
3925
                case 0x076: /* VIS I fornot2 */
3926
                    CHECK_FPU_FEATURE(dc, VIS1);
3927
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3928
                                    cpu_fpr[DFPREG(rs2)]);
3929
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3930
                                    cpu_fpr[DFPREG(rs1) + 1],
3931
                                    cpu_fpr[DFPREG(rs2) + 1]);
3932
                    break;
3933
                case 0x077: /* VIS I fornot2s */
3934
                    CHECK_FPU_FEATURE(dc, VIS1);
3935
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3936
                    break;
3937
                case 0x078: /* VIS I fsrc2 */
3938
                    CHECK_FPU_FEATURE(dc, VIS1);
3939
                    gen_op_load_fpr_DT0(DFPREG(rs2));
3940
                    gen_op_store_DT0_fpr(DFPREG(rd));
3941
                    break;
3942
                case 0x079: /* VIS I fsrc2s */
3943
                    CHECK_FPU_FEATURE(dc, VIS1);
3944
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3945
                    break;
3946
                case 0x07a: /* VIS I fornot1 */
3947
                    CHECK_FPU_FEATURE(dc, VIS1);
3948
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3949
                                    cpu_fpr[DFPREG(rs1)]);
3950
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3951
                                    cpu_fpr[DFPREG(rs2) + 1],
3952
                                    cpu_fpr[DFPREG(rs1) + 1]);
3953
                    break;
3954
                case 0x07b: /* VIS I fornot1s */
3955
                    CHECK_FPU_FEATURE(dc, VIS1);
3956
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3957
                    break;
3958
                case 0x07c: /* VIS I for */
3959
                    CHECK_FPU_FEATURE(dc, VIS1);
3960
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3961
                                   cpu_fpr[DFPREG(rs2)]);
3962
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
3963
                                   cpu_fpr[DFPREG(rs1) + 1],
3964
                                   cpu_fpr[DFPREG(rs2) + 1]);
3965
                    break;
3966
                case 0x07d: /* VIS I fors */
3967
                    CHECK_FPU_FEATURE(dc, VIS1);
3968
                    tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3969
                    break;
3970
                case 0x07e: /* VIS I fone */
3971
                    CHECK_FPU_FEATURE(dc, VIS1);
3972
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
3973
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3974
                    break;
3975
                case 0x07f: /* VIS I fones */
3976
                    CHECK_FPU_FEATURE(dc, VIS1);
3977
                    tcg_gen_movi_i32(cpu_fpr[rd], -1);
3978
                    break;
3979
                case 0x080: /* VIS I shutdown */
3980
                case 0x081: /* VIS II siam */
3981
                    // XXX
3982
                    goto illegal_insn;
3983
                default:
3984
                    goto illegal_insn;
3985
                }
3986
#else
3987
                goto ncp_insn;
3988
#endif
3989
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3990
#ifdef TARGET_SPARC64
3991
                goto illegal_insn;
3992
#else
3993
                goto ncp_insn;
3994
#endif
3995
#ifdef TARGET_SPARC64
3996
            } else if (xop == 0x39) { /* V9 return */
3997
                TCGv_i32 r_const;
3998

    
3999
                save_state(dc, cpu_cond);
4000
                cpu_src1 = get_src1(insn, cpu_src1);
4001
                if (IS_IMM) {   /* immediate */
4002
                    simm = GET_FIELDs(insn, 19, 31);
4003
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4004
                } else {                /* register */
4005
                    rs2 = GET_FIELD(insn, 27, 31);
4006
                    if (rs2) {
4007
                        gen_movl_reg_TN(rs2, cpu_src2);
4008
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4009
                    } else
4010
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4011
                }
4012
                gen_helper_restore();
4013
                gen_mov_pc_npc(dc, cpu_cond);
4014
                r_const = tcg_const_i32(3);
4015
                gen_helper_check_align(cpu_dst, r_const);
4016
                tcg_temp_free_i32(r_const);
4017
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4018
                dc->npc = DYNAMIC_PC;
4019
                goto jmp_insn;
4020
#endif
4021
            } else {
4022
                cpu_src1 = get_src1(insn, cpu_src1);
4023
                if (IS_IMM) {   /* immediate */
4024
                    simm = GET_FIELDs(insn, 19, 31);
4025
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4026
                } else {                /* register */
4027
                    rs2 = GET_FIELD(insn, 27, 31);
4028
                    if (rs2) {
4029
                        gen_movl_reg_TN(rs2, cpu_src2);
4030
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4031
                    } else
4032
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4033
                }
4034
                switch (xop) {
4035
                case 0x38:      /* jmpl */
4036
                    {
4037
                        TCGv r_pc;
4038
                        TCGv_i32 r_const;
4039

    
4040
                        r_pc = tcg_const_tl(dc->pc);
4041
                        gen_movl_TN_reg(rd, r_pc);
4042
                        tcg_temp_free(r_pc);
4043
                        gen_mov_pc_npc(dc, cpu_cond);
4044
                        r_const = tcg_const_i32(3);
4045
                        gen_helper_check_align(cpu_dst, r_const);
4046
                        tcg_temp_free_i32(r_const);
4047
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4048
                        dc->npc = DYNAMIC_PC;
4049
                    }
4050
                    goto jmp_insn;
4051
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4052
                case 0x39:      /* rett, V9 return */
4053
                    {
4054
                        TCGv_i32 r_const;
4055

    
4056
                        if (!supervisor(dc))
4057
                            goto priv_insn;
4058
                        gen_mov_pc_npc(dc, cpu_cond);
4059
                        r_const = tcg_const_i32(3);
4060
                        gen_helper_check_align(cpu_dst, r_const);
4061
                        tcg_temp_free_i32(r_const);
4062
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4063
                        dc->npc = DYNAMIC_PC;
4064
                        gen_helper_rett();
4065
                    }
4066
                    goto jmp_insn;
4067
#endif
4068
                case 0x3b: /* flush */
4069
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4070
                        goto unimp_flush;
4071
                    gen_helper_flush(cpu_dst);
4072
                    break;
4073
                case 0x3c:      /* save */
4074
                    save_state(dc, cpu_cond);
4075
                    gen_helper_save();
4076
                    gen_movl_TN_reg(rd, cpu_dst);
4077
                    break;
4078
                case 0x3d:      /* restore */
4079
                    save_state(dc, cpu_cond);
4080
                    gen_helper_restore();
4081
                    gen_movl_TN_reg(rd, cpu_dst);
4082
                    break;
4083
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4084
                case 0x3e:      /* V9 done/retry */
4085
                    {
4086
                        switch (rd) {
4087
                        case 0:
4088
                            if (!supervisor(dc))
4089
                                goto priv_insn;
4090
                            dc->npc = DYNAMIC_PC;
4091
                            dc->pc = DYNAMIC_PC;
4092
                            gen_helper_done();
4093
                            goto jmp_insn;
4094
                        case 1:
4095
                            if (!supervisor(dc))
4096
                                goto priv_insn;
4097
                            dc->npc = DYNAMIC_PC;
4098
                            dc->pc = DYNAMIC_PC;
4099
                            gen_helper_retry();
4100
                            goto jmp_insn;
4101
                        default:
4102
                            goto illegal_insn;
4103
                        }
4104
                    }
4105
                    break;
4106
#endif
4107
                default:
4108
                    goto illegal_insn;
4109
                }
4110
            }
4111
            break;
4112
        }
4113
        break;
4114
    case 3:                     /* load/store instructions */
4115
        {
4116
            unsigned int xop = GET_FIELD(insn, 7, 12);
4117

    
4118
            /* flush pending conditional evaluations before exposing
4119
               cpu state */
4120
            if (dc->cc_op != CC_OP_FLAGS) {
4121
                dc->cc_op = CC_OP_FLAGS;
4122
                gen_helper_compute_psr();
4123
            }
4124
            cpu_src1 = get_src1(insn, cpu_src1);
4125
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4126
                rs2 = GET_FIELD(insn, 27, 31);
4127
                gen_movl_reg_TN(rs2, cpu_src2);
4128
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4129
            } else if (IS_IMM) {     /* immediate */
4130
                simm = GET_FIELDs(insn, 19, 31);
4131
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4132
            } else {            /* register */
4133
                rs2 = GET_FIELD(insn, 27, 31);
4134
                if (rs2 != 0) {
4135
                    gen_movl_reg_TN(rs2, cpu_src2);
4136
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4137
                } else
4138
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4139
            }
4140
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4141
                (xop > 0x17 && xop <= 0x1d ) ||
4142
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4143
                switch (xop) {
4144
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4145
                    gen_address_mask(dc, cpu_addr);
4146
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4147
                    break;
4148
                case 0x1:       /* ldub, load unsigned byte */
4149
                    gen_address_mask(dc, cpu_addr);
4150
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4151
                    break;
4152
                case 0x2:       /* lduh, load unsigned halfword */
4153
                    gen_address_mask(dc, cpu_addr);
4154
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4155
                    break;
4156
                case 0x3:       /* ldd, load double word */
4157
                    if (rd & 1)
4158
                        goto illegal_insn;
4159
                    else {
4160
                        TCGv_i32 r_const;
4161

    
4162
                        save_state(dc, cpu_cond);
4163
                        r_const = tcg_const_i32(7);
4164
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4165
                        tcg_temp_free_i32(r_const);
4166
                        gen_address_mask(dc, cpu_addr);
4167
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4168
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4169
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4170
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4171
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4172
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4173
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4174
                    }
4175
                    break;
4176
                case 0x9:       /* ldsb, load signed byte */
4177
                    gen_address_mask(dc, cpu_addr);
4178
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4179
                    break;
4180
                case 0xa:       /* ldsh, load signed halfword */
4181
                    gen_address_mask(dc, cpu_addr);
4182
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4183
                    break;
4184
                case 0xd:       /* ldstub -- XXX: should be atomically */
4185
                    {
4186
                        TCGv r_const;
4187

    
4188
                        gen_address_mask(dc, cpu_addr);
4189
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4190
                        r_const = tcg_const_tl(0xff);
4191
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4192
                        tcg_temp_free(r_const);
4193
                    }
4194
                    break;
4195
                case 0x0f:      /* swap, swap register with memory. Also
4196
                                   atomically */
4197
                    CHECK_IU_FEATURE(dc, SWAP);
4198
                    gen_movl_reg_TN(rd, cpu_val);
4199
                    gen_address_mask(dc, cpu_addr);
4200
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4201
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4202
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4203
                    break;
4204
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4205
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4206
#ifndef TARGET_SPARC64
4207
                    if (IS_IMM)
4208
                        goto illegal_insn;
4209
                    if (!supervisor(dc))
4210
                        goto priv_insn;
4211
#endif
4212
                    save_state(dc, cpu_cond);
4213
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4214
                    break;
4215
                case 0x11:      /* lduba, load unsigned byte alternate */
4216
#ifndef TARGET_SPARC64
4217
                    if (IS_IMM)
4218
                        goto illegal_insn;
4219
                    if (!supervisor(dc))
4220
                        goto priv_insn;
4221
#endif
4222
                    save_state(dc, cpu_cond);
4223
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4224
                    break;
4225
                case 0x12:      /* lduha, load unsigned halfword alternate */
4226
#ifndef TARGET_SPARC64
4227
                    if (IS_IMM)
4228
                        goto illegal_insn;
4229
                    if (!supervisor(dc))
4230
                        goto priv_insn;
4231
#endif
4232
                    save_state(dc, cpu_cond);
4233
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4234
                    break;
4235
                case 0x13:      /* ldda, load double word alternate */
4236
#ifndef TARGET_SPARC64
4237
                    if (IS_IMM)
4238
                        goto illegal_insn;
4239
                    if (!supervisor(dc))
4240
                        goto priv_insn;
4241
#endif
4242
                    if (rd & 1)
4243
                        goto illegal_insn;
4244
                    save_state(dc, cpu_cond);
4245
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4246
                    goto skip_move;
4247
                case 0x19:      /* ldsba, load signed byte alternate */
4248
#ifndef TARGET_SPARC64
4249
                    if (IS_IMM)
4250
                        goto illegal_insn;
4251
                    if (!supervisor(dc))
4252
                        goto priv_insn;
4253
#endif
4254
                    save_state(dc, cpu_cond);
4255
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4256
                    break;
4257
                case 0x1a:      /* ldsha, load signed halfword alternate */
4258
#ifndef TARGET_SPARC64
4259
                    if (IS_IMM)
4260
                        goto illegal_insn;
4261
                    if (!supervisor(dc))
4262
                        goto priv_insn;
4263
#endif
4264
                    save_state(dc, cpu_cond);
4265
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4266
                    break;
4267
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4268
#ifndef TARGET_SPARC64
4269
                    if (IS_IMM)
4270
                        goto illegal_insn;
4271
                    if (!supervisor(dc))
4272
                        goto priv_insn;
4273
#endif
4274
                    save_state(dc, cpu_cond);
4275
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4276
                    break;
4277
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4278
                                   atomically */
4279
                    CHECK_IU_FEATURE(dc, SWAP);
4280
#ifndef TARGET_SPARC64
4281
                    if (IS_IMM)
4282
                        goto illegal_insn;
4283
                    if (!supervisor(dc))
4284
                        goto priv_insn;
4285
#endif
4286
                    save_state(dc, cpu_cond);
4287
                    gen_movl_reg_TN(rd, cpu_val);
4288
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4289
                    break;
4290

    
4291
#ifndef TARGET_SPARC64
4292
                case 0x30: /* ldc */
4293
                case 0x31: /* ldcsr */
4294
                case 0x33: /* lddc */
4295
                    goto ncp_insn;
4296
#endif
4297
#endif
4298
#ifdef TARGET_SPARC64
4299
                case 0x08: /* V9 ldsw */
4300
                    gen_address_mask(dc, cpu_addr);
4301
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4302
                    break;
4303
                case 0x0b: /* V9 ldx */
4304
                    gen_address_mask(dc, cpu_addr);
4305
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4306
                    break;
4307
                case 0x18: /* V9 ldswa */
4308
                    save_state(dc, cpu_cond);
4309
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4310
                    break;
4311
                case 0x1b: /* V9 ldxa */
4312
                    save_state(dc, cpu_cond);
4313
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4314
                    break;
4315
                case 0x2d: /* V9 prefetch, no effect */
4316
                    goto skip_move;
4317
                case 0x30: /* V9 ldfa */
4318
                    save_state(dc, cpu_cond);
4319
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4320
                    goto skip_move;
4321
                case 0x33: /* V9 lddfa */
4322
                    save_state(dc, cpu_cond);
4323
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4324
                    goto skip_move;
4325
                case 0x3d: /* V9 prefetcha, no effect */
4326
                    goto skip_move;
4327
                case 0x32: /* V9 ldqfa */
4328
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4329
                    save_state(dc, cpu_cond);
4330
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4331
                    goto skip_move;
4332
#endif
4333
                default:
4334
                    goto illegal_insn;
4335
                }
4336
                gen_movl_TN_reg(rd, cpu_val);
4337
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4338
            skip_move: ;
4339
#endif
4340
            } else if (xop >= 0x20 && xop < 0x24) {
4341
                if (gen_trap_ifnofpu(dc, cpu_cond))
4342
                    goto jmp_insn;
4343
                save_state(dc, cpu_cond);
4344
                switch (xop) {
4345
                case 0x20:      /* ldf, load fpreg */
4346
                    gen_address_mask(dc, cpu_addr);
4347
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4348
                    tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4349
                    break;
4350
                case 0x21:      /* ldfsr, V9 ldxfsr */
4351
#ifdef TARGET_SPARC64
4352
                    gen_address_mask(dc, cpu_addr);
4353
                    if (rd == 1) {
4354
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4355
                        gen_helper_ldxfsr(cpu_tmp64);
4356
                    } else
4357
#else
4358
                    {
4359
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4360
                        gen_helper_ldfsr(cpu_tmp32);
4361
                    }
4362
#endif
4363
                    break;
4364
                case 0x22:      /* ldqf, load quad fpreg */
4365
                    {
4366
                        TCGv_i32 r_const;
4367

    
4368
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4369
                        r_const = tcg_const_i32(dc->mem_idx);
4370
                        gen_helper_ldqf(cpu_addr, r_const);
4371
                        tcg_temp_free_i32(r_const);
4372
                        gen_op_store_QT0_fpr(QFPREG(rd));
4373
                    }
4374
                    break;
4375
                case 0x23:      /* lddf, load double fpreg */
4376
                    {
4377
                        TCGv_i32 r_const;
4378

    
4379
                        r_const = tcg_const_i32(dc->mem_idx);
4380
                        gen_helper_lddf(cpu_addr, r_const);
4381
                        tcg_temp_free_i32(r_const);
4382
                        gen_op_store_DT0_fpr(DFPREG(rd));
4383
                    }
4384
                    break;
4385
                default:
4386
                    goto illegal_insn;
4387
                }
4388
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4389
                       xop == 0xe || xop == 0x1e) {
4390
                gen_movl_reg_TN(rd, cpu_val);
4391
                switch (xop) {
4392
                case 0x4: /* st, store word */
4393
                    gen_address_mask(dc, cpu_addr);
4394
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4395
                    break;
4396
                case 0x5: /* stb, store byte */
4397
                    gen_address_mask(dc, cpu_addr);
4398
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4399
                    break;
4400
                case 0x6: /* sth, store halfword */
4401
                    gen_address_mask(dc, cpu_addr);
4402
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4403
                    break;
4404
                case 0x7: /* std, store double word */
4405
                    if (rd & 1)
4406
                        goto illegal_insn;
4407
                    else {
4408
                        TCGv_i32 r_const;
4409

    
4410
                        save_state(dc, cpu_cond);
4411
                        gen_address_mask(dc, cpu_addr);
4412
                        r_const = tcg_const_i32(7);
4413
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4414
                        tcg_temp_free_i32(r_const);
4415
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4416
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4417
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4418
                    }
4419
                    break;
4420
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4421
                case 0x14: /* sta, V9 stwa, store word alternate */
4422
#ifndef TARGET_SPARC64
4423
                    if (IS_IMM)
4424
                        goto illegal_insn;
4425
                    if (!supervisor(dc))
4426
                        goto priv_insn;
4427
#endif
4428
                    save_state(dc, cpu_cond);
4429
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4430
                    break;
4431
                case 0x15: /* stba, store byte alternate */
4432
#ifndef TARGET_SPARC64
4433
                    if (IS_IMM)
4434
                        goto illegal_insn;
4435
                    if (!supervisor(dc))
4436
                        goto priv_insn;
4437
#endif
4438
                    save_state(dc, cpu_cond);
4439
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4440
                    break;
4441
                case 0x16: /* stha, store halfword alternate */
4442
#ifndef TARGET_SPARC64
4443
                    if (IS_IMM)
4444
                        goto illegal_insn;
4445
                    if (!supervisor(dc))
4446
                        goto priv_insn;
4447
#endif
4448
                    save_state(dc, cpu_cond);
4449
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4450
                    break;
4451
                case 0x17: /* stda, store double word alternate */
4452
#ifndef TARGET_SPARC64
4453
                    if (IS_IMM)
4454
                        goto illegal_insn;
4455
                    if (!supervisor(dc))
4456
                        goto priv_insn;
4457
#endif
4458
                    if (rd & 1)
4459
                        goto illegal_insn;
4460
                    else {
4461
                        save_state(dc, cpu_cond);
4462
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4463
                    }
4464
                    break;
4465
#endif
4466
#ifdef TARGET_SPARC64
4467
                case 0x0e: /* V9 stx */
4468
                    gen_address_mask(dc, cpu_addr);
4469
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4470
                    break;
4471
                case 0x1e: /* V9 stxa */
4472
                    save_state(dc, cpu_cond);
4473
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4474
                    break;
4475
#endif
4476
                default:
4477
                    goto illegal_insn;
4478
                }
4479
            } else if (xop > 0x23 && xop < 0x28) {
4480
                if (gen_trap_ifnofpu(dc, cpu_cond))
4481
                    goto jmp_insn;
4482
                save_state(dc, cpu_cond);
4483
                switch (xop) {
4484
                case 0x24: /* stf, store fpreg */
4485
                    gen_address_mask(dc, cpu_addr);
4486
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4487
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4488
                    break;
4489
                case 0x25: /* stfsr, V9 stxfsr */
4490
#ifdef TARGET_SPARC64
4491
                    gen_address_mask(dc, cpu_addr);
4492
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4493
                    if (rd == 1)
4494
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4495
                    else
4496
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4497
#else
4498
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4499
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4500
#endif
4501
                    break;
4502
                case 0x26:
4503
#ifdef TARGET_SPARC64
4504
                    /* V9 stqf, store quad fpreg */
4505
                    {
4506
                        TCGv_i32 r_const;
4507

    
4508
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4509
                        gen_op_load_fpr_QT0(QFPREG(rd));
4510
                        r_const = tcg_const_i32(dc->mem_idx);
4511
                        gen_helper_stqf(cpu_addr, r_const);
4512
                        tcg_temp_free_i32(r_const);
4513
                    }
4514
                    break;
4515
#else /* !TARGET_SPARC64 */
4516
                    /* stdfq, store floating point queue */
4517
#if defined(CONFIG_USER_ONLY)
4518
                    goto illegal_insn;
4519
#else
4520
                    if (!supervisor(dc))
4521
                        goto priv_insn;
4522
                    if (gen_trap_ifnofpu(dc, cpu_cond))
4523
                        goto jmp_insn;
4524
                    goto nfq_insn;
4525
#endif
4526
#endif
4527
                case 0x27: /* stdf, store double fpreg */
4528
                    {
4529
                        TCGv_i32 r_const;
4530

    
4531
                        gen_op_load_fpr_DT0(DFPREG(rd));
4532
                        r_const = tcg_const_i32(dc->mem_idx);
4533
                        gen_helper_stdf(cpu_addr, r_const);
4534
                        tcg_temp_free_i32(r_const);
4535
                    }
4536
                    break;
4537
                default:
4538
                    goto illegal_insn;
4539
                }
4540
            } else if (xop > 0x33 && xop < 0x3f) {
4541
                save_state(dc, cpu_cond);
4542
                switch (xop) {
4543
#ifdef TARGET_SPARC64
4544
                case 0x34: /* V9 stfa */
4545
                    gen_stf_asi(cpu_addr, insn, 4, rd);
4546
                    break;
4547
                case 0x36: /* V9 stqfa */
4548
                    {
4549
                        TCGv_i32 r_const;
4550

    
4551
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4552
                        r_const = tcg_const_i32(7);
4553
                        gen_helper_check_align(cpu_addr, r_const);
4554
                        tcg_temp_free_i32(r_const);
4555
                        gen_op_load_fpr_QT0(QFPREG(rd));
4556
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4557
                    }
4558
                    break;
4559
                case 0x37: /* V9 stdfa */
4560
                    gen_op_load_fpr_DT0(DFPREG(rd));
4561
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4562
                    break;
4563
                case 0x3c: /* V9 casa */
4564
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4565
                    gen_movl_TN_reg(rd, cpu_val);
4566
                    break;
4567
                case 0x3e: /* V9 casxa */
4568
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4569
                    gen_movl_TN_reg(rd, cpu_val);
4570
                    break;
4571
#else
4572
                case 0x34: /* stc */
4573
                case 0x35: /* stcsr */
4574
                case 0x36: /* stdcq */
4575
                case 0x37: /* stdc */
4576
                    goto ncp_insn;
4577
#endif
4578
                default:
4579
                    goto illegal_insn;
4580
                }
4581
            } else
4582
                goto illegal_insn;
4583
        }
4584
        break;
4585
    }
4586
    /* default case for non jump instructions */
4587
    if (dc->npc == DYNAMIC_PC) {
4588
        dc->pc = DYNAMIC_PC;
4589
        gen_op_next_insn();
4590
    } else if (dc->npc == JUMP_PC) {
4591
        /* we can do a static jump */
4592
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4593
        dc->is_br = 1;
4594
    } else {
4595
        dc->pc = dc->npc;
4596
        dc->npc = dc->npc + 4;
4597
    }
4598
 jmp_insn:
4599
    return;
4600
 illegal_insn:
4601
    {
4602
        TCGv_i32 r_const;
4603

    
4604
        save_state(dc, cpu_cond);
4605
        r_const = tcg_const_i32(TT_ILL_INSN);
4606
        gen_helper_raise_exception(r_const);
4607
        tcg_temp_free_i32(r_const);
4608
        dc->is_br = 1;
4609
    }
4610
    return;
4611
 unimp_flush:
4612
    {
4613
        TCGv_i32 r_const;
4614

    
4615
        save_state(dc, cpu_cond);
4616
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4617
        gen_helper_raise_exception(r_const);
4618
        tcg_temp_free_i32(r_const);
4619
        dc->is_br = 1;
4620
    }
4621
    return;
4622
#if !defined(CONFIG_USER_ONLY)
4623
 priv_insn:
4624
    {
4625
        TCGv_i32 r_const;
4626

    
4627
        save_state(dc, cpu_cond);
4628
        r_const = tcg_const_i32(TT_PRIV_INSN);
4629
        gen_helper_raise_exception(r_const);
4630
        tcg_temp_free_i32(r_const);
4631
        dc->is_br = 1;
4632
    }
4633
    return;
4634
#endif
4635
 nfpu_insn:
4636
    save_state(dc, cpu_cond);
4637
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4638
    dc->is_br = 1;
4639
    return;
4640
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4641
 nfq_insn:
4642
    save_state(dc, cpu_cond);
4643
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4644
    dc->is_br = 1;
4645
    return;
4646
#endif
4647
#ifndef TARGET_SPARC64
4648
 ncp_insn:
4649
    {
4650
        TCGv r_const;
4651

    
4652
        save_state(dc, cpu_cond);
4653
        r_const = tcg_const_i32(TT_NCP_INSN);
4654
        gen_helper_raise_exception(r_const);
4655
        tcg_temp_free(r_const);
4656
        dc->is_br = 1;
4657
    }
4658
    return;
4659
#endif
4660
}
4661

    
4662
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4663
                                                  int spc, CPUSPARCState *env)
4664
{
4665
    target_ulong pc_start, last_pc;
4666
    uint16_t *gen_opc_end;
4667
    DisasContext dc1, *dc = &dc1;
4668
    CPUBreakpoint *bp;
4669
    int j, lj = -1;
4670
    int num_insns;
4671
    int max_insns;
4672

    
4673
    memset(dc, 0, sizeof(DisasContext));
4674
    dc->tb = tb;
4675
    pc_start = tb->pc;
4676
    dc->pc = pc_start;
4677
    last_pc = dc->pc;
4678
    dc->npc = (target_ulong) tb->cs_base;
4679
    dc->cc_op = CC_OP_DYNAMIC;
4680
    dc->mem_idx = cpu_mmu_index(env);
4681
    dc->def = env->def;
4682
    if ((dc->def->features & CPU_FEATURE_FLOAT))
4683
        dc->fpu_enabled = cpu_fpu_enabled(env);
4684
    else
4685
        dc->fpu_enabled = 0;
4686
#ifdef TARGET_SPARC64
4687
    dc->address_mask_32bit = env->pstate & PS_AM;
4688
#endif
4689
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4690

    
4691
    cpu_tmp0 = tcg_temp_new();
4692
    cpu_tmp32 = tcg_temp_new_i32();
4693
    cpu_tmp64 = tcg_temp_new_i64();
4694

    
4695
    cpu_dst = tcg_temp_local_new();
4696

    
4697
    // loads and stores
4698
    cpu_val = tcg_temp_local_new();
4699
    cpu_addr = tcg_temp_local_new();
4700

    
4701
    num_insns = 0;
4702
    max_insns = tb->cflags & CF_COUNT_MASK;
4703
    if (max_insns == 0)
4704
        max_insns = CF_COUNT_MASK;
4705
    gen_icount_start();
4706
    do {
4707
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4708
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4709
                if (bp->pc == dc->pc) {
4710
                    if (dc->pc != pc_start)
4711
                        save_state(dc, cpu_cond);
4712
                    gen_helper_debug();
4713
                    tcg_gen_exit_tb(0);
4714
                    dc->is_br = 1;
4715
                    goto exit_gen_loop;
4716
                }
4717
            }
4718
        }
4719
        if (spc) {
4720
            qemu_log("Search PC...\n");
4721
            j = gen_opc_ptr - gen_opc_buf;
4722
            if (lj < j) {
4723
                lj++;
4724
                while (lj < j)
4725
                    gen_opc_instr_start[lj++] = 0;
4726
                gen_opc_pc[lj] = dc->pc;
4727
                gen_opc_npc[lj] = dc->npc;
4728
                gen_opc_instr_start[lj] = 1;
4729
                gen_opc_icount[lj] = num_insns;
4730
            }
4731
        }
4732
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4733
            gen_io_start();
4734
        last_pc = dc->pc;
4735
        disas_sparc_insn(dc);
4736
        num_insns++;
4737

    
4738
        if (dc->is_br)
4739
            break;
4740
        /* if the next PC is different, we abort now */
4741
        if (dc->pc != (last_pc + 4))
4742
            break;
4743
        /* if we reach a page boundary, we stop generation so that the
4744
           PC of a TT_TFAULT exception is always in the right page */
4745
        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4746
            break;
4747
        /* if single step mode, we generate only one instruction and
4748
           generate an exception */
4749
        if (env->singlestep_enabled || singlestep) {
4750
            tcg_gen_movi_tl(cpu_pc, dc->pc);
4751
            tcg_gen_exit_tb(0);
4752
            break;
4753
        }
4754
    } while ((gen_opc_ptr < gen_opc_end) &&
4755
             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4756
             num_insns < max_insns);
4757

    
4758
 exit_gen_loop:
4759
    tcg_temp_free(cpu_addr);
4760
    tcg_temp_free(cpu_val);
4761
    tcg_temp_free(cpu_dst);
4762
    tcg_temp_free_i64(cpu_tmp64);
4763
    tcg_temp_free_i32(cpu_tmp32);
4764
    tcg_temp_free(cpu_tmp0);
4765
    if (tb->cflags & CF_LAST_IO)
4766
        gen_io_end();
4767
    if (!dc->is_br) {
4768
        if (dc->pc != DYNAMIC_PC &&
4769
            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4770
            /* static PC and NPC: we can use direct chaining */
4771
            gen_goto_tb(dc, 0, dc->pc, dc->npc);
4772
        } else {
4773
            if (dc->pc != DYNAMIC_PC)
4774
                tcg_gen_movi_tl(cpu_pc, dc->pc);
4775
            save_npc(dc, cpu_cond);
4776
            tcg_gen_exit_tb(0);
4777
        }
4778
    }
4779
    gen_icount_end(tb, num_insns);
4780
    *gen_opc_ptr = INDEX_op_end;
4781
    if (spc) {
4782
        j = gen_opc_ptr - gen_opc_buf;
4783
        lj++;
4784
        while (lj <= j)
4785
            gen_opc_instr_start[lj++] = 0;
4786
#if 0
4787
        log_page_dump();
4788
#endif
4789
        gen_opc_jump_pc[0] = dc->jump_pc[0];
4790
        gen_opc_jump_pc[1] = dc->jump_pc[1];
4791
    } else {
4792
        tb->size = last_pc + 4 - pc_start;
4793
        tb->icount = num_insns;
4794
    }
4795
#ifdef DEBUG_DISAS
4796
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4797
        qemu_log("--------------\n");
4798
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
4799
        log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4800
        qemu_log("\n");
4801
    }
4802
#endif
4803
}
4804

    
4805
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4806
{
4807
    gen_intermediate_code_internal(tb, 0, env);
4808
}
4809

    
4810
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4811
{
4812
    gen_intermediate_code_internal(tb, 1, env);
4813
}
4814

    
4815
void gen_intermediate_code_init(CPUSPARCState *env)
4816
{
4817
    unsigned int i;
4818
    static int inited;
4819
    static const char * const gregnames[8] = {
4820
        NULL, // g0 not used
4821
        "g1",
4822
        "g2",
4823
        "g3",
4824
        "g4",
4825
        "g5",
4826
        "g6",
4827
        "g7",
4828
    };
4829
    static const char * const fregnames[64] = {
4830
        "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4831
        "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4832
        "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4833
        "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4834
        "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4835
        "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4836
        "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4837
        "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4838
    };
4839

    
4840
    /* init various static tables */
4841
    if (!inited) {
4842
        inited = 1;
4843

    
4844
        cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4845
        cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4846
                                             offsetof(CPUState, regwptr),
4847
                                             "regwptr");
4848
#ifdef TARGET_SPARC64
4849
        cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4850
                                         "xcc");
4851
        cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4852
                                         "asi");
4853
        cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4854
                                          "fprs");
4855
        cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4856
                                     "gsr");
4857
        cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4858
                                           offsetof(CPUState, tick_cmpr),
4859
                                           "tick_cmpr");
4860
        cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4861
                                            offsetof(CPUState, stick_cmpr),
4862
                                            "stick_cmpr");
4863
        cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4864
                                             offsetof(CPUState, hstick_cmpr),
4865
                                             "hstick_cmpr");
4866
        cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4867
                                       "hintp");
4868
        cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4869
                                      "htba");
4870
        cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4871
                                      "hver");
4872
        cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4873
                                     offsetof(CPUState, ssr), "ssr");
4874
        cpu_ver = tcg_global_mem_new(TCG_AREG0,
4875
                                     offsetof(CPUState, version), "ver");
4876
        cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4877
                                             offsetof(CPUState, softint),
4878
                                             "softint");
4879
#else
4880
        cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4881
                                     "wim");
4882
#endif
4883
        cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4884
                                      "cond");
4885
        cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4886
                                        "cc_src");
4887
        cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4888
                                         offsetof(CPUState, cc_src2),
4889
                                         "cc_src2");
4890
        cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4891
                                        "cc_dst");
4892
        cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
4893
                                           "cc_op");
4894
        cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4895
                                         "psr");
4896
        cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4897
                                     "fsr");
4898
        cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4899
                                    "pc");
4900
        cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
4901
                                     "npc");
4902
        cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
4903
#ifndef CONFIG_USER_ONLY
4904
        cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
4905
                                     "tbr");
4906
#endif
4907
        for (i = 1; i < 8; i++)
4908
            cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
4909
                                              offsetof(CPUState, gregs[i]),
4910
                                              gregnames[i]);
4911
        for (i = 0; i < TARGET_FPREGS; i++)
4912
            cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
4913
                                                offsetof(CPUState, fpr[i]),
4914
                                                fregnames[i]);
4915

    
4916
        /* register helpers */
4917

    
4918
#define GEN_HELPER 2
4919
#include "helper.h"
4920
    }
4921
}
4922

    
4923
void gen_pc_load(CPUState *env, TranslationBlock *tb,
4924
                unsigned long searched_pc, int pc_pos, void *puc)
4925
{
4926
    target_ulong npc;
4927
    env->pc = gen_opc_pc[pc_pos];
4928
    npc = gen_opc_npc[pc_pos];
4929
    if (npc == 1) {
4930
        /* dynamic NPC: already stored */
4931
    } else if (npc == 2) {
4932
        target_ulong t2 = (target_ulong)(unsigned long)puc;
4933
        /* jump PC: use T2 and the jump targets of the translation */
4934
        if (t2)
4935
            env->npc = gen_opc_jump_pc[0];
4936
        else
4937
            env->npc = gen_opc_jump_pc[1];
4938
    } else {
4939
        env->npc = npc;
4940
    }
4941

    
4942
    /* flush pending conditional evaluations before exposing cpu state */
4943
    if (CC_OP != CC_OP_FLAGS) {
4944
        helper_compute_psr();
4945
    }
4946
}