Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ d7da2a10

History | View | Annotate | Download (187.7 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20

    
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26

    
27
#include "cpu.h"
28
#include "exec-all.h"
29
#include "disas.h"
30
#include "helper.h"
31
#include "tcg-op.h"
32

    
33
#define GEN_HELPER 1
34
#include "helper.h"
35

    
36
#define DEBUG_DISAS
37

    
38
#define DYNAMIC_PC  1 /* dynamic pc value */
39
#define JUMP_PC     2 /* dynamic pc value which takes only two values
40
                         according to jump_pc[T2] */
41

    
42
/* global register indexes */
43
static TCGv_ptr cpu_env, cpu_regwptr;
44
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45
static TCGv_i32 cpu_cc_op;
46
static TCGv_i32 cpu_psr;
47
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48
static TCGv cpu_y;
49
#ifndef CONFIG_USER_ONLY
50
static TCGv cpu_tbr;
51
#endif
52
static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53
#ifdef TARGET_SPARC64
54
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55
static TCGv cpu_gsr;
56
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58
static TCGv_i32 cpu_softint;
59
#else
60
static TCGv cpu_wim;
61
#endif
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0;
64
static TCGv_i32 cpu_tmp32;
65
static TCGv_i64 cpu_tmp64;
66
/* Floating point registers */
67
static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68

    
69
static target_ulong gen_opc_npc[OPC_BUF_SIZE];
70
static target_ulong gen_opc_jump_pc[2];
71

    
72
#include "gen-icount.h"
73

    
74
typedef struct DisasContext {
75
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
76
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78
    int is_br;
79
    int mem_idx;
80
    int fpu_enabled;
81
    int address_mask_32bit;
82
    uint32_t cc_op;  /* current CC operation */
83
    struct TranslationBlock *tb;
84
    sparc_def_t *def;
85
} DisasContext;
86

    
87
// This function uses non-native bit order
88
#define GET_FIELD(X, FROM, TO)                                  \
89
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
90

    
91
// This function uses the order in the manuals, i.e. bit 0 is 2^0
92
#define GET_FIELD_SP(X, FROM, TO)               \
93
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
94

    
95
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
97

    
98
#ifdef TARGET_SPARC64
99
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101
#else
102
#define DFPREG(r) (r & 0x1e)
103
#define QFPREG(r) (r & 0x1c)
104
#endif
105

    
106
#define UA2005_HTRAP_MASK 0xff
107
#define V8_TRAP_MASK 0x7f
108

    
109
static int sign_extend(int x, int len)
110
{
111
    len = 32 - len;
112
    return (x << len) >> len;
113
}
114

    
115
#define IS_IMM (insn & (1<<13))
116

    
117
/* floating point registers moves */
118
static void gen_op_load_fpr_DT0(unsigned int src)
119
{
120
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
121
                   offsetof(CPU_DoubleU, l.upper));
122
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
123
                   offsetof(CPU_DoubleU, l.lower));
124
}
125

    
126
static void gen_op_load_fpr_DT1(unsigned int src)
127
{
128
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
129
                   offsetof(CPU_DoubleU, l.upper));
130
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
131
                   offsetof(CPU_DoubleU, l.lower));
132
}
133

    
134
static void gen_op_store_DT0_fpr(unsigned int dst)
135
{
136
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
137
                   offsetof(CPU_DoubleU, l.upper));
138
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
139
                   offsetof(CPU_DoubleU, l.lower));
140
}
141

    
142
static void gen_op_load_fpr_QT0(unsigned int src)
143
{
144
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
145
                   offsetof(CPU_QuadU, l.upmost));
146
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
147
                   offsetof(CPU_QuadU, l.upper));
148
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
149
                   offsetof(CPU_QuadU, l.lower));
150
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
151
                   offsetof(CPU_QuadU, l.lowest));
152
}
153

    
154
static void gen_op_load_fpr_QT1(unsigned int src)
155
{
156
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
157
                   offsetof(CPU_QuadU, l.upmost));
158
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
159
                   offsetof(CPU_QuadU, l.upper));
160
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
161
                   offsetof(CPU_QuadU, l.lower));
162
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
163
                   offsetof(CPU_QuadU, l.lowest));
164
}
165

    
166
static void gen_op_store_QT0_fpr(unsigned int dst)
167
{
168
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
169
                   offsetof(CPU_QuadU, l.upmost));
170
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
171
                   offsetof(CPU_QuadU, l.upper));
172
    tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
173
                   offsetof(CPU_QuadU, l.lower));
174
    tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
175
                   offsetof(CPU_QuadU, l.lowest));
176
}
177

    
178
/* moves */
179
#ifdef CONFIG_USER_ONLY
180
#define supervisor(dc) 0
181
#ifdef TARGET_SPARC64
182
#define hypervisor(dc) 0
183
#endif
184
#else
185
#define supervisor(dc) (dc->mem_idx >= 1)
186
#ifdef TARGET_SPARC64
187
#define hypervisor(dc) (dc->mem_idx == 2)
188
#else
189
#endif
190
#endif
191

    
192
#ifdef TARGET_SPARC64
193
#ifndef TARGET_ABI32
194
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
195
#else
196
#define AM_CHECK(dc) (1)
197
#endif
198
#endif
199

    
200
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
201
{
202
#ifdef TARGET_SPARC64
203
    if (AM_CHECK(dc))
204
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205
#endif
206
}
207

    
208
static inline void gen_movl_reg_TN(int reg, TCGv tn)
209
{
210
    if (reg == 0)
211
        tcg_gen_movi_tl(tn, 0);
212
    else if (reg < 8)
213
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
214
    else {
215
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216
    }
217
}
218

    
219
static inline void gen_movl_TN_reg(int reg, TCGv tn)
220
{
221
    if (reg == 0)
222
        return;
223
    else if (reg < 8)
224
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
225
    else {
226
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227
    }
228
}
229

    
230
static inline void gen_goto_tb(DisasContext *s, int tb_num,
231
                               target_ulong pc, target_ulong npc)
232
{
233
    TranslationBlock *tb;
234

    
235
    tb = s->tb;
236
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
237
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK))  {
238
        /* jump to same page: we can use a direct jump */
239
        tcg_gen_goto_tb(tb_num);
240
        tcg_gen_movi_tl(cpu_pc, pc);
241
        tcg_gen_movi_tl(cpu_npc, npc);
242
        tcg_gen_exit_tb((long)tb + tb_num);
243
    } else {
244
        /* jump to another page: currently not optimized */
245
        tcg_gen_movi_tl(cpu_pc, pc);
246
        tcg_gen_movi_tl(cpu_npc, npc);
247
        tcg_gen_exit_tb(0);
248
    }
249
}
250

    
251
// XXX suboptimal
252
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
253
{
254
    tcg_gen_extu_i32_tl(reg, src);
255
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
256
    tcg_gen_andi_tl(reg, reg, 0x1);
257
}
258

    
259
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
260
{
261
    tcg_gen_extu_i32_tl(reg, src);
262
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
263
    tcg_gen_andi_tl(reg, reg, 0x1);
264
}
265

    
266
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
267
{
268
    tcg_gen_extu_i32_tl(reg, src);
269
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
270
    tcg_gen_andi_tl(reg, reg, 0x1);
271
}
272

    
273
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
274
{
275
    tcg_gen_extu_i32_tl(reg, src);
276
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
277
    tcg_gen_andi_tl(reg, reg, 0x1);
278
}
279

    
280
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
281
{
282
    TCGv r_temp;
283
    TCGv_i32 r_const;
284
    int l1;
285

    
286
    l1 = gen_new_label();
287

    
288
    r_temp = tcg_temp_new();
289
    tcg_gen_xor_tl(r_temp, src1, src2);
290
    tcg_gen_not_tl(r_temp, r_temp);
291
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
292
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
293
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
294
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
295
    r_const = tcg_const_i32(TT_TOVF);
296
    gen_helper_raise_exception(r_const);
297
    tcg_temp_free_i32(r_const);
298
    gen_set_label(l1);
299
    tcg_temp_free(r_temp);
300
}
301

    
302
static inline void gen_tag_tv(TCGv src1, TCGv src2)
303
{
304
    int l1;
305
    TCGv_i32 r_const;
306

    
307
    l1 = gen_new_label();
308
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
309
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
310
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
311
    r_const = tcg_const_i32(TT_TOVF);
312
    gen_helper_raise_exception(r_const);
313
    tcg_temp_free_i32(r_const);
314
    gen_set_label(l1);
315
}
316

    
317
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
318
{
319
    tcg_gen_mov_tl(cpu_cc_src, src1);
320
    tcg_gen_movi_tl(cpu_cc_src2, src2);
321
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
322
    tcg_gen_mov_tl(dst, cpu_cc_dst);
323
}
324

    
325
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
326
{
327
    tcg_gen_mov_tl(cpu_cc_src, src1);
328
    tcg_gen_mov_tl(cpu_cc_src2, src2);
329
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
330
    tcg_gen_mov_tl(dst, cpu_cc_dst);
331
}
332

    
333
static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
334
{
335
    tcg_gen_mov_tl(cpu_cc_src, src1);
336
    tcg_gen_movi_tl(cpu_cc_src2, src2);
337
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
338
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
339
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
340
    tcg_gen_mov_tl(dst, cpu_cc_dst);
341
}
342

    
343
static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
344
{
345
    tcg_gen_mov_tl(cpu_cc_src, src1);
346
    tcg_gen_mov_tl(cpu_cc_src2, src2);
347
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
348
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
349
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
350
    tcg_gen_mov_tl(dst, cpu_cc_dst);
351
}
352

    
353
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
354
{
355
    tcg_gen_mov_tl(cpu_cc_src, src1);
356
    tcg_gen_mov_tl(cpu_cc_src2, src2);
357
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
358
    tcg_gen_mov_tl(dst, cpu_cc_dst);
359
}
360

    
361
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
362
{
363
    tcg_gen_mov_tl(cpu_cc_src, src1);
364
    tcg_gen_mov_tl(cpu_cc_src2, src2);
365
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
366
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
367
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
368
    tcg_gen_mov_tl(dst, cpu_cc_dst);
369
}
370

    
371
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
372
{
373
    TCGv r_temp;
374
    TCGv_i32 r_const;
375
    int l1;
376

    
377
    l1 = gen_new_label();
378

    
379
    r_temp = tcg_temp_new();
380
    tcg_gen_xor_tl(r_temp, src1, src2);
381
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
382
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
383
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
384
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
385
    r_const = tcg_const_i32(TT_TOVF);
386
    gen_helper_raise_exception(r_const);
387
    tcg_temp_free_i32(r_const);
388
    gen_set_label(l1);
389
    tcg_temp_free(r_temp);
390
}
391

    
392
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
393
{
394
    tcg_gen_mov_tl(cpu_cc_src, src1);
395
    tcg_gen_movi_tl(cpu_cc_src2, src2);
396
    if (src2 == 0) {
397
        tcg_gen_mov_tl(cpu_cc_dst, src1);
398
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
399
        dc->cc_op = CC_OP_LOGIC;
400
    } else {
401
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
402
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
403
        dc->cc_op = CC_OP_SUB;
404
    }
405
    tcg_gen_mov_tl(dst, cpu_cc_dst);
406
}
407

    
408
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
409
{
410
    tcg_gen_mov_tl(cpu_cc_src, src1);
411
    tcg_gen_mov_tl(cpu_cc_src2, src2);
412
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
413
    tcg_gen_mov_tl(dst, cpu_cc_dst);
414
}
415

    
416
static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
417
{
418
    tcg_gen_mov_tl(cpu_cc_src, src1);
419
    tcg_gen_movi_tl(cpu_cc_src2, src2);
420
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
421
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
422
    tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
423
    tcg_gen_mov_tl(dst, cpu_cc_dst);
424
}
425

    
426
static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
427
{
428
    tcg_gen_mov_tl(cpu_cc_src, src1);
429
    tcg_gen_mov_tl(cpu_cc_src2, src2);
430
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
431
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
432
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
433
    tcg_gen_mov_tl(dst, cpu_cc_dst);
434
}
435

    
436
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
437
{
438
    tcg_gen_mov_tl(cpu_cc_src, src1);
439
    tcg_gen_mov_tl(cpu_cc_src2, src2);
440
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
441
    tcg_gen_mov_tl(dst, cpu_cc_dst);
442
}
443

    
444
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
445
{
446
    tcg_gen_mov_tl(cpu_cc_src, src1);
447
    tcg_gen_mov_tl(cpu_cc_src2, src2);
448
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
449
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
450
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
451
    tcg_gen_mov_tl(dst, cpu_cc_dst);
452
}
453

    
454
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
455
{
456
    TCGv r_temp;
457
    int l1;
458

    
459
    l1 = gen_new_label();
460
    r_temp = tcg_temp_new();
461

    
462
    /* old op:
463
    if (!(env->y & 1))
464
        T1 = 0;
465
    */
466
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
467
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
468
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
469
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
470
    tcg_gen_movi_tl(cpu_cc_src2, 0);
471
    gen_set_label(l1);
472

    
473
    // b2 = T0 & 1;
474
    // env->y = (b2 << 31) | (env->y >> 1);
475
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
476
    tcg_gen_shli_tl(r_temp, r_temp, 31);
477
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
478
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
479
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
480
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
481

    
482
    // b1 = N ^ V;
483
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
484
    gen_mov_reg_V(r_temp, cpu_psr);
485
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
486
    tcg_temp_free(r_temp);
487

    
488
    // T0 = (b1 << 31) | (T0 >> 1);
489
    // src1 = T0;
490
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
491
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
492
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
493

    
494
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
495

    
496
    tcg_gen_mov_tl(dst, cpu_cc_dst);
497
}
498

    
499
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
500
{
501
    TCGv_i64 r_temp, r_temp2;
502

    
503
    r_temp = tcg_temp_new_i64();
504
    r_temp2 = tcg_temp_new_i64();
505

    
506
    tcg_gen_extu_tl_i64(r_temp, src2);
507
    tcg_gen_extu_tl_i64(r_temp2, src1);
508
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
509

    
510
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
511
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
512
    tcg_temp_free_i64(r_temp);
513
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
514
#ifdef TARGET_SPARC64
515
    tcg_gen_mov_i64(dst, r_temp2);
516
#else
517
    tcg_gen_trunc_i64_tl(dst, r_temp2);
518
#endif
519
    tcg_temp_free_i64(r_temp2);
520
}
521

    
522
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
523
{
524
    TCGv_i64 r_temp, r_temp2;
525

    
526
    r_temp = tcg_temp_new_i64();
527
    r_temp2 = tcg_temp_new_i64();
528

    
529
    tcg_gen_ext_tl_i64(r_temp, src2);
530
    tcg_gen_ext_tl_i64(r_temp2, src1);
531
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
532

    
533
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
534
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
535
    tcg_temp_free_i64(r_temp);
536
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
537
#ifdef TARGET_SPARC64
538
    tcg_gen_mov_i64(dst, r_temp2);
539
#else
540
    tcg_gen_trunc_i64_tl(dst, r_temp2);
541
#endif
542
    tcg_temp_free_i64(r_temp2);
543
}
544

    
545
#ifdef TARGET_SPARC64
546
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
547
{
548
    TCGv_i32 r_const;
549
    int l1;
550

    
551
    l1 = gen_new_label();
552
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
553
    r_const = tcg_const_i32(TT_DIV_ZERO);
554
    gen_helper_raise_exception(r_const);
555
    tcg_temp_free_i32(r_const);
556
    gen_set_label(l1);
557
}
558

    
559
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
560
{
561
    int l1, l2;
562

    
563
    l1 = gen_new_label();
564
    l2 = gen_new_label();
565
    tcg_gen_mov_tl(cpu_cc_src, src1);
566
    tcg_gen_mov_tl(cpu_cc_src2, src2);
567
    gen_trap_ifdivzero_tl(cpu_cc_src2);
568
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
569
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
570
    tcg_gen_movi_i64(dst, INT64_MIN);
571
    tcg_gen_br(l2);
572
    gen_set_label(l1);
573
    tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
574
    gen_set_label(l2);
575
}
576
#endif
577

    
578
// 1
579
static inline void gen_op_eval_ba(TCGv dst)
580
{
581
    tcg_gen_movi_tl(dst, 1);
582
}
583

    
584
// Z
585
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
586
{
587
    gen_mov_reg_Z(dst, src);
588
}
589

    
590
// Z | (N ^ V)
591
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
592
{
593
    gen_mov_reg_N(cpu_tmp0, src);
594
    gen_mov_reg_V(dst, src);
595
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
596
    gen_mov_reg_Z(cpu_tmp0, src);
597
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
598
}
599

    
600
// N ^ V
601
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
602
{
603
    gen_mov_reg_V(cpu_tmp0, src);
604
    gen_mov_reg_N(dst, src);
605
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
606
}
607

    
608
// C | Z
609
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
610
{
611
    gen_mov_reg_Z(cpu_tmp0, src);
612
    gen_mov_reg_C(dst, src);
613
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
614
}
615

    
616
// C
617
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
618
{
619
    gen_mov_reg_C(dst, src);
620
}
621

    
622
// V
623
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
624
{
625
    gen_mov_reg_V(dst, src);
626
}
627

    
628
// 0
629
static inline void gen_op_eval_bn(TCGv dst)
630
{
631
    tcg_gen_movi_tl(dst, 0);
632
}
633

    
634
// N
635
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
636
{
637
    gen_mov_reg_N(dst, src);
638
}
639

    
640
// !Z
641
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
642
{
643
    gen_mov_reg_Z(dst, src);
644
    tcg_gen_xori_tl(dst, dst, 0x1);
645
}
646

    
647
// !(Z | (N ^ V))
648
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
649
{
650
    gen_mov_reg_N(cpu_tmp0, src);
651
    gen_mov_reg_V(dst, src);
652
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
653
    gen_mov_reg_Z(cpu_tmp0, src);
654
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
655
    tcg_gen_xori_tl(dst, dst, 0x1);
656
}
657

    
658
// !(N ^ V)
659
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
660
{
661
    gen_mov_reg_V(cpu_tmp0, src);
662
    gen_mov_reg_N(dst, src);
663
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
664
    tcg_gen_xori_tl(dst, dst, 0x1);
665
}
666

    
667
// !(C | Z)
668
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
669
{
670
    gen_mov_reg_Z(cpu_tmp0, src);
671
    gen_mov_reg_C(dst, src);
672
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
673
    tcg_gen_xori_tl(dst, dst, 0x1);
674
}
675

    
676
// !C
677
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
678
{
679
    gen_mov_reg_C(dst, src);
680
    tcg_gen_xori_tl(dst, dst, 0x1);
681
}
682

    
683
// !N
684
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
685
{
686
    gen_mov_reg_N(dst, src);
687
    tcg_gen_xori_tl(dst, dst, 0x1);
688
}
689

    
690
// !V
691
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
692
{
693
    gen_mov_reg_V(dst, src);
694
    tcg_gen_xori_tl(dst, dst, 0x1);
695
}
696

    
697
/*
698
  FPSR bit field FCC1 | FCC0:
699
   0 =
700
   1 <
701
   2 >
702
   3 unordered
703
*/
704
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
705
                                    unsigned int fcc_offset)
706
{
707
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
708
    tcg_gen_andi_tl(reg, reg, 0x1);
709
}
710

    
711
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
712
                                    unsigned int fcc_offset)
713
{
714
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
715
    tcg_gen_andi_tl(reg, reg, 0x1);
716
}
717

    
718
// !0: FCC0 | FCC1
719
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
720
                                    unsigned int fcc_offset)
721
{
722
    gen_mov_reg_FCC0(dst, src, fcc_offset);
723
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
724
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
725
}
726

    
727
// 1 or 2: FCC0 ^ FCC1
728
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
729
                                    unsigned int fcc_offset)
730
{
731
    gen_mov_reg_FCC0(dst, src, fcc_offset);
732
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
733
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
734
}
735

    
736
// 1 or 3: FCC0
737
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
738
                                    unsigned int fcc_offset)
739
{
740
    gen_mov_reg_FCC0(dst, src, fcc_offset);
741
}
742

    
743
// 1: FCC0 & !FCC1
744
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
745
                                    unsigned int fcc_offset)
746
{
747
    gen_mov_reg_FCC0(dst, src, fcc_offset);
748
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
749
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
750
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
751
}
752

    
753
// 2 or 3: FCC1
754
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
755
                                    unsigned int fcc_offset)
756
{
757
    gen_mov_reg_FCC1(dst, src, fcc_offset);
758
}
759

    
760
// 2: !FCC0 & FCC1
761
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
762
                                    unsigned int fcc_offset)
763
{
764
    gen_mov_reg_FCC0(dst, src, fcc_offset);
765
    tcg_gen_xori_tl(dst, dst, 0x1);
766
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
767
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
768
}
769

    
770
// 3: FCC0 & FCC1
771
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
772
                                    unsigned int fcc_offset)
773
{
774
    gen_mov_reg_FCC0(dst, src, fcc_offset);
775
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
776
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
777
}
778

    
779
// 0: !(FCC0 | FCC1)
780
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
781
                                    unsigned int fcc_offset)
782
{
783
    gen_mov_reg_FCC0(dst, src, fcc_offset);
784
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
785
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
786
    tcg_gen_xori_tl(dst, dst, 0x1);
787
}
788

    
789
// 0 or 3: !(FCC0 ^ FCC1)
790
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
791
                                    unsigned int fcc_offset)
792
{
793
    gen_mov_reg_FCC0(dst, src, fcc_offset);
794
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
795
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
796
    tcg_gen_xori_tl(dst, dst, 0x1);
797
}
798

    
799
// 0 or 2: !FCC0
800
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
801
                                    unsigned int fcc_offset)
802
{
803
    gen_mov_reg_FCC0(dst, src, fcc_offset);
804
    tcg_gen_xori_tl(dst, dst, 0x1);
805
}
806

    
807
// !1: !(FCC0 & !FCC1)
808
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
809
                                    unsigned int fcc_offset)
810
{
811
    gen_mov_reg_FCC0(dst, src, fcc_offset);
812
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
813
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
814
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
815
    tcg_gen_xori_tl(dst, dst, 0x1);
816
}
817

    
818
// 0 or 1: !FCC1
819
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
820
                                    unsigned int fcc_offset)
821
{
822
    gen_mov_reg_FCC1(dst, src, fcc_offset);
823
    tcg_gen_xori_tl(dst, dst, 0x1);
824
}
825

    
826
// !2: !(!FCC0 & FCC1)
827
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
828
                                    unsigned int fcc_offset)
829
{
830
    gen_mov_reg_FCC0(dst, src, fcc_offset);
831
    tcg_gen_xori_tl(dst, dst, 0x1);
832
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
833
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
834
    tcg_gen_xori_tl(dst, dst, 0x1);
835
}
836

    
837
// !3: !(FCC0 & FCC1)
838
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
839
                                    unsigned int fcc_offset)
840
{
841
    gen_mov_reg_FCC0(dst, src, fcc_offset);
842
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
843
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
844
    tcg_gen_xori_tl(dst, dst, 0x1);
845
}
846

    
847
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
848
                               target_ulong pc2, TCGv r_cond)
849
{
850
    int l1;
851

    
852
    l1 = gen_new_label();
853

    
854
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
855

    
856
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
857

    
858
    gen_set_label(l1);
859
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
860
}
861

    
862
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
863
                                target_ulong pc2, TCGv r_cond)
864
{
865
    int l1;
866

    
867
    l1 = gen_new_label();
868

    
869
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
870

    
871
    gen_goto_tb(dc, 0, pc2, pc1);
872

    
873
    gen_set_label(l1);
874
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
875
}
876

    
877
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
878
                                      TCGv r_cond)
879
{
880
    int l1, l2;
881

    
882
    l1 = gen_new_label();
883
    l2 = gen_new_label();
884

    
885
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
886

    
887
    tcg_gen_movi_tl(cpu_npc, npc1);
888
    tcg_gen_br(l2);
889

    
890
    gen_set_label(l1);
891
    tcg_gen_movi_tl(cpu_npc, npc2);
892
    gen_set_label(l2);
893
}
894

    
895
/* call this function before using the condition register as it may
896
   have been set for a jump */
897
static inline void flush_cond(DisasContext *dc, TCGv cond)
898
{
899
    if (dc->npc == JUMP_PC) {
900
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
901
        dc->npc = DYNAMIC_PC;
902
    }
903
}
904

    
905
static inline void save_npc(DisasContext *dc, TCGv cond)
906
{
907
    if (dc->npc == JUMP_PC) {
908
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
909
        dc->npc = DYNAMIC_PC;
910
    } else if (dc->npc != DYNAMIC_PC) {
911
        tcg_gen_movi_tl(cpu_npc, dc->npc);
912
    }
913
}
914

    
915
static inline void save_state(DisasContext *dc, TCGv cond)
916
{
917
    tcg_gen_movi_tl(cpu_pc, dc->pc);
918
    /* flush pending conditional evaluations before exposing cpu state */
919
    if (dc->cc_op != CC_OP_FLAGS) {
920
        dc->cc_op = CC_OP_FLAGS;
921
        gen_helper_compute_psr();
922
    }
923
    save_npc(dc, cond);
924
}
925

    
926
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
927
{
928
    if (dc->npc == JUMP_PC) {
929
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
930
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
931
        dc->pc = DYNAMIC_PC;
932
    } else if (dc->npc == DYNAMIC_PC) {
933
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
934
        dc->pc = DYNAMIC_PC;
935
    } else {
936
        dc->pc = dc->npc;
937
    }
938
}
939

    
940
static inline void gen_op_next_insn(void)
941
{
942
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
943
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
944
}
945

    
946
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
947
                            DisasContext *dc)
948
{
949
    TCGv_i32 r_src;
950

    
951
#ifdef TARGET_SPARC64
952
    if (cc)
953
        r_src = cpu_xcc;
954
    else
955
        r_src = cpu_psr;
956
#else
957
    r_src = cpu_psr;
958
#endif
959
    switch (dc->cc_op) {
960
    case CC_OP_FLAGS:
961
        break;
962
    default:
963
        gen_helper_compute_psr();
964
        dc->cc_op = CC_OP_FLAGS;
965
        break;
966
    }
967
    switch (cond) {
968
    case 0x0:
969
        gen_op_eval_bn(r_dst);
970
        break;
971
    case 0x1:
972
        gen_op_eval_be(r_dst, r_src);
973
        break;
974
    case 0x2:
975
        gen_op_eval_ble(r_dst, r_src);
976
        break;
977
    case 0x3:
978
        gen_op_eval_bl(r_dst, r_src);
979
        break;
980
    case 0x4:
981
        gen_op_eval_bleu(r_dst, r_src);
982
        break;
983
    case 0x5:
984
        gen_op_eval_bcs(r_dst, r_src);
985
        break;
986
    case 0x6:
987
        gen_op_eval_bneg(r_dst, r_src);
988
        break;
989
    case 0x7:
990
        gen_op_eval_bvs(r_dst, r_src);
991
        break;
992
    case 0x8:
993
        gen_op_eval_ba(r_dst);
994
        break;
995
    case 0x9:
996
        gen_op_eval_bne(r_dst, r_src);
997
        break;
998
    case 0xa:
999
        gen_op_eval_bg(r_dst, r_src);
1000
        break;
1001
    case 0xb:
1002
        gen_op_eval_bge(r_dst, r_src);
1003
        break;
1004
    case 0xc:
1005
        gen_op_eval_bgu(r_dst, r_src);
1006
        break;
1007
    case 0xd:
1008
        gen_op_eval_bcc(r_dst, r_src);
1009
        break;
1010
    case 0xe:
1011
        gen_op_eval_bpos(r_dst, r_src);
1012
        break;
1013
    case 0xf:
1014
        gen_op_eval_bvc(r_dst, r_src);
1015
        break;
1016
    }
1017
}
1018

    
1019
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1020
{
1021
    unsigned int offset;
1022

    
1023
    switch (cc) {
1024
    default:
1025
    case 0x0:
1026
        offset = 0;
1027
        break;
1028
    case 0x1:
1029
        offset = 32 - 10;
1030
        break;
1031
    case 0x2:
1032
        offset = 34 - 10;
1033
        break;
1034
    case 0x3:
1035
        offset = 36 - 10;
1036
        break;
1037
    }
1038

    
1039
    switch (cond) {
1040
    case 0x0:
1041
        gen_op_eval_bn(r_dst);
1042
        break;
1043
    case 0x1:
1044
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1045
        break;
1046
    case 0x2:
1047
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1048
        break;
1049
    case 0x3:
1050
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1051
        break;
1052
    case 0x4:
1053
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1054
        break;
1055
    case 0x5:
1056
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1057
        break;
1058
    case 0x6:
1059
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1060
        break;
1061
    case 0x7:
1062
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1063
        break;
1064
    case 0x8:
1065
        gen_op_eval_ba(r_dst);
1066
        break;
1067
    case 0x9:
1068
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1069
        break;
1070
    case 0xa:
1071
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1072
        break;
1073
    case 0xb:
1074
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1075
        break;
1076
    case 0xc:
1077
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1078
        break;
1079
    case 0xd:
1080
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1081
        break;
1082
    case 0xe:
1083
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1084
        break;
1085
    case 0xf:
1086
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1087
        break;
1088
    }
1089
}
1090

    
1091
#ifdef TARGET_SPARC64
1092
// Inverted logic
1093
static const int gen_tcg_cond_reg[8] = {
1094
    -1,
1095
    TCG_COND_NE,
1096
    TCG_COND_GT,
1097
    TCG_COND_GE,
1098
    -1,
1099
    TCG_COND_EQ,
1100
    TCG_COND_LE,
1101
    TCG_COND_LT,
1102
};
1103

    
1104
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1105
{
1106
    int l1;
1107

    
1108
    l1 = gen_new_label();
1109
    tcg_gen_movi_tl(r_dst, 0);
1110
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1111
    tcg_gen_movi_tl(r_dst, 1);
1112
    gen_set_label(l1);
1113
}
1114
#endif
1115

    
1116
/* XXX: potentially incorrect if dynamic npc */
1117
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1118
                      TCGv r_cond)
1119
{
1120
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1121
    target_ulong target = dc->pc + offset;
1122

    
1123
    if (cond == 0x0) {
1124
        /* unconditional not taken */
1125
        if (a) {
1126
            dc->pc = dc->npc + 4;
1127
            dc->npc = dc->pc + 4;
1128
        } else {
1129
            dc->pc = dc->npc;
1130
            dc->npc = dc->pc + 4;
1131
        }
1132
    } else if (cond == 0x8) {
1133
        /* unconditional taken */
1134
        if (a) {
1135
            dc->pc = target;
1136
            dc->npc = dc->pc + 4;
1137
        } else {
1138
            dc->pc = dc->npc;
1139
            dc->npc = target;
1140
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1141
        }
1142
    } else {
1143
        flush_cond(dc, r_cond);
1144
        gen_cond(r_cond, cc, cond, dc);
1145
        if (a) {
1146
            gen_branch_a(dc, target, dc->npc, r_cond);
1147
            dc->is_br = 1;
1148
        } else {
1149
            dc->pc = dc->npc;
1150
            dc->jump_pc[0] = target;
1151
            dc->jump_pc[1] = dc->npc + 4;
1152
            dc->npc = JUMP_PC;
1153
        }
1154
    }
1155
}
1156

    
1157
/* XXX: potentially incorrect if dynamic npc */
1158
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1159
                      TCGv r_cond)
1160
{
1161
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1162
    target_ulong target = dc->pc + offset;
1163

    
1164
    if (cond == 0x0) {
1165
        /* unconditional not taken */
1166
        if (a) {
1167
            dc->pc = dc->npc + 4;
1168
            dc->npc = dc->pc + 4;
1169
        } else {
1170
            dc->pc = dc->npc;
1171
            dc->npc = dc->pc + 4;
1172
        }
1173
    } else if (cond == 0x8) {
1174
        /* unconditional taken */
1175
        if (a) {
1176
            dc->pc = target;
1177
            dc->npc = dc->pc + 4;
1178
        } else {
1179
            dc->pc = dc->npc;
1180
            dc->npc = target;
1181
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1182
        }
1183
    } else {
1184
        flush_cond(dc, r_cond);
1185
        gen_fcond(r_cond, cc, cond);
1186
        if (a) {
1187
            gen_branch_a(dc, target, dc->npc, r_cond);
1188
            dc->is_br = 1;
1189
        } else {
1190
            dc->pc = dc->npc;
1191
            dc->jump_pc[0] = target;
1192
            dc->jump_pc[1] = dc->npc + 4;
1193
            dc->npc = JUMP_PC;
1194
        }
1195
    }
1196
}
1197

    
1198
#ifdef TARGET_SPARC64
1199
/* XXX: potentially incorrect if dynamic npc */
1200
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1201
                          TCGv r_cond, TCGv r_reg)
1202
{
1203
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1204
    target_ulong target = dc->pc + offset;
1205

    
1206
    flush_cond(dc, r_cond);
1207
    gen_cond_reg(r_cond, cond, r_reg);
1208
    if (a) {
1209
        gen_branch_a(dc, target, dc->npc, r_cond);
1210
        dc->is_br = 1;
1211
    } else {
1212
        dc->pc = dc->npc;
1213
        dc->jump_pc[0] = target;
1214
        dc->jump_pc[1] = dc->npc + 4;
1215
        dc->npc = JUMP_PC;
1216
    }
1217
}
1218

    
1219
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1220
{
1221
    switch (fccno) {
1222
    case 0:
1223
        gen_helper_fcmps(r_rs1, r_rs2);
1224
        break;
1225
    case 1:
1226
        gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1227
        break;
1228
    case 2:
1229
        gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1230
        break;
1231
    case 3:
1232
        gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1233
        break;
1234
    }
1235
}
1236

    
1237
static inline void gen_op_fcmpd(int fccno)
1238
{
1239
    switch (fccno) {
1240
    case 0:
1241
        gen_helper_fcmpd();
1242
        break;
1243
    case 1:
1244
        gen_helper_fcmpd_fcc1();
1245
        break;
1246
    case 2:
1247
        gen_helper_fcmpd_fcc2();
1248
        break;
1249
    case 3:
1250
        gen_helper_fcmpd_fcc3();
1251
        break;
1252
    }
1253
}
1254

    
1255
static inline void gen_op_fcmpq(int fccno)
1256
{
1257
    switch (fccno) {
1258
    case 0:
1259
        gen_helper_fcmpq();
1260
        break;
1261
    case 1:
1262
        gen_helper_fcmpq_fcc1();
1263
        break;
1264
    case 2:
1265
        gen_helper_fcmpq_fcc2();
1266
        break;
1267
    case 3:
1268
        gen_helper_fcmpq_fcc3();
1269
        break;
1270
    }
1271
}
1272

    
1273
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1274
{
1275
    switch (fccno) {
1276
    case 0:
1277
        gen_helper_fcmpes(r_rs1, r_rs2);
1278
        break;
1279
    case 1:
1280
        gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1281
        break;
1282
    case 2:
1283
        gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1284
        break;
1285
    case 3:
1286
        gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1287
        break;
1288
    }
1289
}
1290

    
1291
static inline void gen_op_fcmped(int fccno)
1292
{
1293
    switch (fccno) {
1294
    case 0:
1295
        gen_helper_fcmped();
1296
        break;
1297
    case 1:
1298
        gen_helper_fcmped_fcc1();
1299
        break;
1300
    case 2:
1301
        gen_helper_fcmped_fcc2();
1302
        break;
1303
    case 3:
1304
        gen_helper_fcmped_fcc3();
1305
        break;
1306
    }
1307
}
1308

    
1309
static inline void gen_op_fcmpeq(int fccno)
1310
{
1311
    switch (fccno) {
1312
    case 0:
1313
        gen_helper_fcmpeq();
1314
        break;
1315
    case 1:
1316
        gen_helper_fcmpeq_fcc1();
1317
        break;
1318
    case 2:
1319
        gen_helper_fcmpeq_fcc2();
1320
        break;
1321
    case 3:
1322
        gen_helper_fcmpeq_fcc3();
1323
        break;
1324
    }
1325
}
1326

    
1327
#else
1328

    
1329
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1330
{
1331
    gen_helper_fcmps(r_rs1, r_rs2);
1332
}
1333

    
1334
static inline void gen_op_fcmpd(int fccno)
1335
{
1336
    gen_helper_fcmpd();
1337
}
1338

    
1339
static inline void gen_op_fcmpq(int fccno)
1340
{
1341
    gen_helper_fcmpq();
1342
}
1343

    
1344
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1345
{
1346
    gen_helper_fcmpes(r_rs1, r_rs2);
1347
}
1348

    
1349
static inline void gen_op_fcmped(int fccno)
1350
{
1351
    gen_helper_fcmped();
1352
}
1353

    
1354
static inline void gen_op_fcmpeq(int fccno)
1355
{
1356
    gen_helper_fcmpeq();
1357
}
1358
#endif
1359

    
1360
static inline void gen_op_fpexception_im(int fsr_flags)
1361
{
1362
    TCGv_i32 r_const;
1363

    
1364
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1365
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1366
    r_const = tcg_const_i32(TT_FP_EXCP);
1367
    gen_helper_raise_exception(r_const);
1368
    tcg_temp_free_i32(r_const);
1369
}
1370

    
1371
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1372
{
1373
#if !defined(CONFIG_USER_ONLY)
1374
    if (!dc->fpu_enabled) {
1375
        TCGv_i32 r_const;
1376

    
1377
        save_state(dc, r_cond);
1378
        r_const = tcg_const_i32(TT_NFPU_INSN);
1379
        gen_helper_raise_exception(r_const);
1380
        tcg_temp_free_i32(r_const);
1381
        dc->is_br = 1;
1382
        return 1;
1383
    }
1384
#endif
1385
    return 0;
1386
}
1387

    
1388
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1389
{
1390
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1391
}
1392

    
1393
static inline void gen_clear_float_exceptions(void)
1394
{
1395
    gen_helper_clear_float_exceptions();
1396
}
1397

    
1398
/* asi moves */
1399
#ifdef TARGET_SPARC64
1400
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1401
{
1402
    int asi;
1403
    TCGv_i32 r_asi;
1404

    
1405
    if (IS_IMM) {
1406
        r_asi = tcg_temp_new_i32();
1407
        tcg_gen_mov_i32(r_asi, cpu_asi);
1408
    } else {
1409
        asi = GET_FIELD(insn, 19, 26);
1410
        r_asi = tcg_const_i32(asi);
1411
    }
1412
    return r_asi;
1413
}
1414

    
1415
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1416
                              int sign)
1417
{
1418
    TCGv_i32 r_asi, r_size, r_sign;
1419

    
1420
    r_asi = gen_get_asi(insn, addr);
1421
    r_size = tcg_const_i32(size);
1422
    r_sign = tcg_const_i32(sign);
1423
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1424
    tcg_temp_free_i32(r_sign);
1425
    tcg_temp_free_i32(r_size);
1426
    tcg_temp_free_i32(r_asi);
1427
}
1428

    
1429
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1430
{
1431
    TCGv_i32 r_asi, r_size;
1432

    
1433
    r_asi = gen_get_asi(insn, addr);
1434
    r_size = tcg_const_i32(size);
1435
    gen_helper_st_asi(addr, src, r_asi, r_size);
1436
    tcg_temp_free_i32(r_size);
1437
    tcg_temp_free_i32(r_asi);
1438
}
1439

    
1440
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1441
{
1442
    TCGv_i32 r_asi, r_size, r_rd;
1443

    
1444
    r_asi = gen_get_asi(insn, addr);
1445
    r_size = tcg_const_i32(size);
1446
    r_rd = tcg_const_i32(rd);
1447
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1448
    tcg_temp_free_i32(r_rd);
1449
    tcg_temp_free_i32(r_size);
1450
    tcg_temp_free_i32(r_asi);
1451
}
1452

    
1453
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1454
{
1455
    TCGv_i32 r_asi, r_size, r_rd;
1456

    
1457
    r_asi = gen_get_asi(insn, addr);
1458
    r_size = tcg_const_i32(size);
1459
    r_rd = tcg_const_i32(rd);
1460
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1461
    tcg_temp_free_i32(r_rd);
1462
    tcg_temp_free_i32(r_size);
1463
    tcg_temp_free_i32(r_asi);
1464
}
1465

    
1466
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1467
{
1468
    TCGv_i32 r_asi, r_size, r_sign;
1469

    
1470
    r_asi = gen_get_asi(insn, addr);
1471
    r_size = tcg_const_i32(4);
1472
    r_sign = tcg_const_i32(0);
1473
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1474
    tcg_temp_free_i32(r_sign);
1475
    gen_helper_st_asi(addr, dst, r_asi, r_size);
1476
    tcg_temp_free_i32(r_size);
1477
    tcg_temp_free_i32(r_asi);
1478
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1479
}
1480

    
1481
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1482
{
1483
    TCGv_i32 r_asi, r_rd;
1484

    
1485
    r_asi = gen_get_asi(insn, addr);
1486
    r_rd = tcg_const_i32(rd);
1487
    gen_helper_ldda_asi(addr, r_asi, r_rd);
1488
    tcg_temp_free_i32(r_rd);
1489
    tcg_temp_free_i32(r_asi);
1490
}
1491

    
1492
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1493
{
1494
    TCGv_i32 r_asi, r_size;
1495

    
1496
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1497
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1498
    r_asi = gen_get_asi(insn, addr);
1499
    r_size = tcg_const_i32(8);
1500
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1501
    tcg_temp_free_i32(r_size);
1502
    tcg_temp_free_i32(r_asi);
1503
}
1504

    
1505
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1506
                               int rd)
1507
{
1508
    TCGv r_val1;
1509
    TCGv_i32 r_asi;
1510

    
1511
    r_val1 = tcg_temp_new();
1512
    gen_movl_reg_TN(rd, r_val1);
1513
    r_asi = gen_get_asi(insn, addr);
1514
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1515
    tcg_temp_free_i32(r_asi);
1516
    tcg_temp_free(r_val1);
1517
}
1518

    
1519
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1520
                                int rd)
1521
{
1522
    TCGv_i32 r_asi;
1523

    
1524
    gen_movl_reg_TN(rd, cpu_tmp64);
1525
    r_asi = gen_get_asi(insn, addr);
1526
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1527
    tcg_temp_free_i32(r_asi);
1528
}
1529

    
1530
#elif !defined(CONFIG_USER_ONLY)
1531

    
1532
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1533
                              int sign)
1534
{
1535
    TCGv_i32 r_asi, r_size, r_sign;
1536

    
1537
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1538
    r_size = tcg_const_i32(size);
1539
    r_sign = tcg_const_i32(sign);
1540
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1541
    tcg_temp_free(r_sign);
1542
    tcg_temp_free(r_size);
1543
    tcg_temp_free(r_asi);
1544
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1545
}
1546

    
1547
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1548
{
1549
    TCGv_i32 r_asi, r_size;
1550

    
1551
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
1552
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1553
    r_size = tcg_const_i32(size);
1554
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1555
    tcg_temp_free(r_size);
1556
    tcg_temp_free(r_asi);
1557
}
1558

    
1559
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1560
{
1561
    TCGv_i32 r_asi, r_size, r_sign;
1562
    TCGv_i64 r_val;
1563

    
1564
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1565
    r_size = tcg_const_i32(4);
1566
    r_sign = tcg_const_i32(0);
1567
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1568
    tcg_temp_free(r_sign);
1569
    r_val = tcg_temp_new_i64();
1570
    tcg_gen_extu_tl_i64(r_val, dst);
1571
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1572
    tcg_temp_free_i64(r_val);
1573
    tcg_temp_free(r_size);
1574
    tcg_temp_free(r_asi);
1575
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1576
}
1577

    
1578
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1579
{
1580
    TCGv_i32 r_asi, r_size, r_sign;
1581

    
1582
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1583
    r_size = tcg_const_i32(8);
1584
    r_sign = tcg_const_i32(0);
1585
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1586
    tcg_temp_free(r_sign);
1587
    tcg_temp_free(r_size);
1588
    tcg_temp_free(r_asi);
1589
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1590
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
1591
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1592
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1593
    gen_movl_TN_reg(rd, hi);
1594
}
1595

    
1596
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1597
{
1598
    TCGv_i32 r_asi, r_size;
1599

    
1600
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1601
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1602
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1603
    r_size = tcg_const_i32(8);
1604
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1605
    tcg_temp_free(r_size);
1606
    tcg_temp_free(r_asi);
1607
}
1608
#endif
1609

    
1610
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1611
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1612
{
1613
    TCGv_i64 r_val;
1614
    TCGv_i32 r_asi, r_size;
1615

    
1616
    gen_ld_asi(dst, addr, insn, 1, 0);
1617

    
1618
    r_val = tcg_const_i64(0xffULL);
1619
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1620
    r_size = tcg_const_i32(1);
1621
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1622
    tcg_temp_free_i32(r_size);
1623
    tcg_temp_free_i32(r_asi);
1624
    tcg_temp_free_i64(r_val);
1625
}
1626
#endif
1627

    
1628
static inline TCGv get_src1(unsigned int insn, TCGv def)
1629
{
1630
    TCGv r_rs1 = def;
1631
    unsigned int rs1;
1632

    
1633
    rs1 = GET_FIELD(insn, 13, 17);
1634
    if (rs1 == 0)
1635
        r_rs1 = tcg_const_tl(0); // XXX how to free?
1636
    else if (rs1 < 8)
1637
        r_rs1 = cpu_gregs[rs1];
1638
    else
1639
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1640
    return r_rs1;
1641
}
1642

    
1643
static inline TCGv get_src2(unsigned int insn, TCGv def)
1644
{
1645
    TCGv r_rs2 = def;
1646

    
1647
    if (IS_IMM) { /* immediate */
1648
        target_long simm;
1649

    
1650
        simm = GET_FIELDs(insn, 19, 31);
1651
        r_rs2 = tcg_const_tl(simm); // XXX how to free?
1652
    } else { /* register */
1653
        unsigned int rs2;
1654

    
1655
        rs2 = GET_FIELD(insn, 27, 31);
1656
        if (rs2 == 0)
1657
            r_rs2 = tcg_const_tl(0); // XXX how to free?
1658
        else if (rs2 < 8)
1659
            r_rs2 = cpu_gregs[rs2];
1660
        else
1661
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1662
    }
1663
    return r_rs2;
1664
}
1665

    
1666
#ifdef TARGET_SPARC64
1667
static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1668
{
1669
    TCGv_i32 r_tl = tcg_temp_new_i32();
1670

    
1671
    /* load env->tl into r_tl */
1672
    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1673

    
1674
    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1675
    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1676

    
1677
    /* calculate offset to current trap state from env->ts, reuse r_tl */
1678
    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1679
    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1680

    
1681
    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1682
    {
1683
        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1684
        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1685
        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1686
        tcg_temp_free_ptr(r_tl_tmp);
1687
    }
1688

    
1689
    tcg_temp_free_i32(r_tl);
1690
}
1691
#endif
1692

    
1693
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
1694
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1695
        goto illegal_insn;
1696
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1697
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1698
        goto nfpu_insn;
1699

    
1700
/* before an instruction, dc->pc must be static */
1701
static void disas_sparc_insn(DisasContext * dc)
1702
{
1703
    unsigned int insn, opc, rs1, rs2, rd;
1704
    target_long simm;
1705

    
1706
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1707
        tcg_gen_debug_insn_start(dc->pc);
1708
    insn = ldl_code(dc->pc);
1709
    opc = GET_FIELD(insn, 0, 1);
1710

    
1711
    rd = GET_FIELD(insn, 2, 6);
1712

    
1713
    cpu_src1 = tcg_temp_new(); // const
1714
    cpu_src2 = tcg_temp_new(); // const
1715

    
1716
    switch (opc) {
1717
    case 0:                     /* branches/sethi */
1718
        {
1719
            unsigned int xop = GET_FIELD(insn, 7, 9);
1720
            int32_t target;
1721
            switch (xop) {
1722
#ifdef TARGET_SPARC64
1723
            case 0x1:           /* V9 BPcc */
1724
                {
1725
                    int cc;
1726

    
1727
                    target = GET_FIELD_SP(insn, 0, 18);
1728
                    target = sign_extend(target, 18);
1729
                    target <<= 2;
1730
                    cc = GET_FIELD_SP(insn, 20, 21);
1731
                    if (cc == 0)
1732
                        do_branch(dc, target, insn, 0, cpu_cond);
1733
                    else if (cc == 2)
1734
                        do_branch(dc, target, insn, 1, cpu_cond);
1735
                    else
1736
                        goto illegal_insn;
1737
                    goto jmp_insn;
1738
                }
1739
            case 0x3:           /* V9 BPr */
1740
                {
1741
                    target = GET_FIELD_SP(insn, 0, 13) |
1742
                        (GET_FIELD_SP(insn, 20, 21) << 14);
1743
                    target = sign_extend(target, 16);
1744
                    target <<= 2;
1745
                    cpu_src1 = get_src1(insn, cpu_src1);
1746
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1747
                    goto jmp_insn;
1748
                }
1749
            case 0x5:           /* V9 FBPcc */
1750
                {
1751
                    int cc = GET_FIELD_SP(insn, 20, 21);
1752
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1753
                        goto jmp_insn;
1754
                    target = GET_FIELD_SP(insn, 0, 18);
1755
                    target = sign_extend(target, 19);
1756
                    target <<= 2;
1757
                    do_fbranch(dc, target, insn, cc, cpu_cond);
1758
                    goto jmp_insn;
1759
                }
1760
#else
1761
            case 0x7:           /* CBN+x */
1762
                {
1763
                    goto ncp_insn;
1764
                }
1765
#endif
1766
            case 0x2:           /* BN+x */
1767
                {
1768
                    target = GET_FIELD(insn, 10, 31);
1769
                    target = sign_extend(target, 22);
1770
                    target <<= 2;
1771
                    do_branch(dc, target, insn, 0, cpu_cond);
1772
                    goto jmp_insn;
1773
                }
1774
            case 0x6:           /* FBN+x */
1775
                {
1776
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1777
                        goto jmp_insn;
1778
                    target = GET_FIELD(insn, 10, 31);
1779
                    target = sign_extend(target, 22);
1780
                    target <<= 2;
1781
                    do_fbranch(dc, target, insn, 0, cpu_cond);
1782
                    goto jmp_insn;
1783
                }
1784
            case 0x4:           /* SETHI */
1785
                if (rd) { // nop
1786
                    uint32_t value = GET_FIELD(insn, 10, 31);
1787
                    TCGv r_const;
1788

    
1789
                    r_const = tcg_const_tl(value << 10);
1790
                    gen_movl_TN_reg(rd, r_const);
1791
                    tcg_temp_free(r_const);
1792
                }
1793
                break;
1794
            case 0x0:           /* UNIMPL */
1795
            default:
1796
                goto illegal_insn;
1797
            }
1798
            break;
1799
        }
1800
        break;
1801
    case 1:                     /*CALL*/
1802
        {
1803
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
1804
            TCGv r_const;
1805

    
1806
            r_const = tcg_const_tl(dc->pc);
1807
            gen_movl_TN_reg(15, r_const);
1808
            tcg_temp_free(r_const);
1809
            target += dc->pc;
1810
            gen_mov_pc_npc(dc, cpu_cond);
1811
            dc->npc = target;
1812
        }
1813
        goto jmp_insn;
1814
    case 2:                     /* FPU & Logical Operations */
1815
        {
1816
            unsigned int xop = GET_FIELD(insn, 7, 12);
1817
            if (xop == 0x3a) {  /* generate trap */
1818
                int cond;
1819

    
1820
                cpu_src1 = get_src1(insn, cpu_src1);
1821
                if (IS_IMM) {
1822
                    rs2 = GET_FIELD(insn, 25, 31);
1823
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1824
                } else {
1825
                    rs2 = GET_FIELD(insn, 27, 31);
1826
                    if (rs2 != 0) {
1827
                        gen_movl_reg_TN(rs2, cpu_src2);
1828
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1829
                    } else
1830
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
1831
                }
1832
                cond = GET_FIELD(insn, 3, 6);
1833
                if (cond == 0x8) {
1834
                    save_state(dc, cpu_cond);
1835
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
1836
                        supervisor(dc))
1837
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1838
                    else
1839
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1840
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1841
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1842
                    gen_helper_raise_exception(cpu_tmp32);
1843
                } else if (cond != 0) {
1844
                    TCGv r_cond = tcg_temp_new();
1845
                    int l1;
1846
#ifdef TARGET_SPARC64
1847
                    /* V9 icc/xcc */
1848
                    int cc = GET_FIELD_SP(insn, 11, 12);
1849

    
1850
                    save_state(dc, cpu_cond);
1851
                    if (cc == 0)
1852
                        gen_cond(r_cond, 0, cond, dc);
1853
                    else if (cc == 2)
1854
                        gen_cond(r_cond, 1, cond, dc);
1855
                    else
1856
                        goto illegal_insn;
1857
#else
1858
                    save_state(dc, cpu_cond);
1859
                    gen_cond(r_cond, 0, cond, dc);
1860
#endif
1861
                    l1 = gen_new_label();
1862
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1863

    
1864
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
1865
                        supervisor(dc))
1866
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1867
                    else
1868
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1869
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1870
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1871
                    gen_helper_raise_exception(cpu_tmp32);
1872

    
1873
                    gen_set_label(l1);
1874
                    tcg_temp_free(r_cond);
1875
                }
1876
                gen_op_next_insn();
1877
                tcg_gen_exit_tb(0);
1878
                dc->is_br = 1;
1879
                goto jmp_insn;
1880
            } else if (xop == 0x28) {
1881
                rs1 = GET_FIELD(insn, 13, 17);
1882
                switch(rs1) {
1883
                case 0: /* rdy */
1884
#ifndef TARGET_SPARC64
1885
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
1886
                                       manual, rdy on the microSPARC
1887
                                       II */
1888
                case 0x0f:          /* stbar in the SPARCv8 manual,
1889
                                       rdy on the microSPARC II */
1890
                case 0x10 ... 0x1f: /* implementation-dependent in the
1891
                                       SPARCv8 manual, rdy on the
1892
                                       microSPARC II */
1893
#endif
1894
                    gen_movl_TN_reg(rd, cpu_y);
1895
                    break;
1896
#ifdef TARGET_SPARC64
1897
                case 0x2: /* V9 rdccr */
1898
                    gen_helper_compute_psr();
1899
                    gen_helper_rdccr(cpu_dst);
1900
                    gen_movl_TN_reg(rd, cpu_dst);
1901
                    break;
1902
                case 0x3: /* V9 rdasi */
1903
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1904
                    gen_movl_TN_reg(rd, cpu_dst);
1905
                    break;
1906
                case 0x4: /* V9 rdtick */
1907
                    {
1908
                        TCGv_ptr r_tickptr;
1909

    
1910
                        r_tickptr = tcg_temp_new_ptr();
1911
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
1912
                                       offsetof(CPUState, tick));
1913
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
1914
                        tcg_temp_free_ptr(r_tickptr);
1915
                        gen_movl_TN_reg(rd, cpu_dst);
1916
                    }
1917
                    break;
1918
                case 0x5: /* V9 rdpc */
1919
                    {
1920
                        TCGv r_const;
1921

    
1922
                        r_const = tcg_const_tl(dc->pc);
1923
                        gen_movl_TN_reg(rd, r_const);
1924
                        tcg_temp_free(r_const);
1925
                    }
1926
                    break;
1927
                case 0x6: /* V9 rdfprs */
1928
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
1929
                    gen_movl_TN_reg(rd, cpu_dst);
1930
                    break;
1931
                case 0xf: /* V9 membar */
1932
                    break; /* no effect */
1933
                case 0x13: /* Graphics Status */
1934
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1935
                        goto jmp_insn;
1936
                    gen_movl_TN_reg(rd, cpu_gsr);
1937
                    break;
1938
                case 0x16: /* Softint */
1939
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
1940
                    gen_movl_TN_reg(rd, cpu_dst);
1941
                    break;
1942
                case 0x17: /* Tick compare */
1943
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
1944
                    break;
1945
                case 0x18: /* System tick */
1946
                    {
1947
                        TCGv_ptr r_tickptr;
1948

    
1949
                        r_tickptr = tcg_temp_new_ptr();
1950
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
1951
                                       offsetof(CPUState, stick));
1952
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
1953
                        tcg_temp_free_ptr(r_tickptr);
1954
                        gen_movl_TN_reg(rd, cpu_dst);
1955
                    }
1956
                    break;
1957
                case 0x19: /* System tick compare */
1958
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
1959
                    break;
1960
                case 0x10: /* Performance Control */
1961
                case 0x11: /* Performance Instrumentation Counter */
1962
                case 0x12: /* Dispatch Control */
1963
                case 0x14: /* Softint set, WO */
1964
                case 0x15: /* Softint clear, WO */
1965
#endif
1966
                default:
1967
                    goto illegal_insn;
1968
                }
1969
#if !defined(CONFIG_USER_ONLY)
1970
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1971
#ifndef TARGET_SPARC64
1972
                if (!supervisor(dc))
1973
                    goto priv_insn;
1974
                gen_helper_compute_psr();
1975
                dc->cc_op = CC_OP_FLAGS;
1976
                gen_helper_rdpsr(cpu_dst);
1977
#else
1978
                CHECK_IU_FEATURE(dc, HYPV);
1979
                if (!hypervisor(dc))
1980
                    goto priv_insn;
1981
                rs1 = GET_FIELD(insn, 13, 17);
1982
                switch (rs1) {
1983
                case 0: // hpstate
1984
                    // gen_op_rdhpstate();
1985
                    break;
1986
                case 1: // htstate
1987
                    // gen_op_rdhtstate();
1988
                    break;
1989
                case 3: // hintp
1990
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
1991
                    break;
1992
                case 5: // htba
1993
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
1994
                    break;
1995
                case 6: // hver
1996
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
1997
                    break;
1998
                case 31: // hstick_cmpr
1999
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2000
                    break;
2001
                default:
2002
                    goto illegal_insn;
2003
                }
2004
#endif
2005
                gen_movl_TN_reg(rd, cpu_dst);
2006
                break;
2007
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2008
                if (!supervisor(dc))
2009
                    goto priv_insn;
2010
#ifdef TARGET_SPARC64
2011
                rs1 = GET_FIELD(insn, 13, 17);
2012
                switch (rs1) {
2013
                case 0: // tpc
2014
                    {
2015
                        TCGv_ptr r_tsptr;
2016

    
2017
                        r_tsptr = tcg_temp_new_ptr();
2018
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2019
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2020
                                      offsetof(trap_state, tpc));
2021
                        tcg_temp_free_ptr(r_tsptr);
2022
                    }
2023
                    break;
2024
                case 1: // tnpc
2025
                    {
2026
                        TCGv_ptr r_tsptr;
2027

    
2028
                        r_tsptr = tcg_temp_new_ptr();
2029
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2030
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2031
                                      offsetof(trap_state, tnpc));
2032
                        tcg_temp_free_ptr(r_tsptr);
2033
                    }
2034
                    break;
2035
                case 2: // tstate
2036
                    {
2037
                        TCGv_ptr r_tsptr;
2038

    
2039
                        r_tsptr = tcg_temp_new_ptr();
2040
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2041
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2042
                                      offsetof(trap_state, tstate));
2043
                        tcg_temp_free_ptr(r_tsptr);
2044
                    }
2045
                    break;
2046
                case 3: // tt
2047
                    {
2048
                        TCGv_ptr r_tsptr;
2049

    
2050
                        r_tsptr = tcg_temp_new_ptr();
2051
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2052
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2053
                                       offsetof(trap_state, tt));
2054
                        tcg_temp_free_ptr(r_tsptr);
2055
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2056
                    }
2057
                    break;
2058
                case 4: // tick
2059
                    {
2060
                        TCGv_ptr r_tickptr;
2061

    
2062
                        r_tickptr = tcg_temp_new_ptr();
2063
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2064
                                       offsetof(CPUState, tick));
2065
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2066
                        gen_movl_TN_reg(rd, cpu_tmp0);
2067
                        tcg_temp_free_ptr(r_tickptr);
2068
                    }
2069
                    break;
2070
                case 5: // tba
2071
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2072
                    break;
2073
                case 6: // pstate
2074
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2075
                                   offsetof(CPUSPARCState, pstate));
2076
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2077
                    break;
2078
                case 7: // tl
2079
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2080
                                   offsetof(CPUSPARCState, tl));
2081
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2082
                    break;
2083
                case 8: // pil
2084
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2085
                                   offsetof(CPUSPARCState, psrpil));
2086
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2087
                    break;
2088
                case 9: // cwp
2089
                    gen_helper_rdcwp(cpu_tmp0);
2090
                    break;
2091
                case 10: // cansave
2092
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2093
                                   offsetof(CPUSPARCState, cansave));
2094
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2095
                    break;
2096
                case 11: // canrestore
2097
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2098
                                   offsetof(CPUSPARCState, canrestore));
2099
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2100
                    break;
2101
                case 12: // cleanwin
2102
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2103
                                   offsetof(CPUSPARCState, cleanwin));
2104
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2105
                    break;
2106
                case 13: // otherwin
2107
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2108
                                   offsetof(CPUSPARCState, otherwin));
2109
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2110
                    break;
2111
                case 14: // wstate
2112
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2113
                                   offsetof(CPUSPARCState, wstate));
2114
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2115
                    break;
2116
                case 16: // UA2005 gl
2117
                    CHECK_IU_FEATURE(dc, GL);
2118
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2119
                                   offsetof(CPUSPARCState, gl));
2120
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2121
                    break;
2122
                case 26: // UA2005 strand status
2123
                    CHECK_IU_FEATURE(dc, HYPV);
2124
                    if (!hypervisor(dc))
2125
                        goto priv_insn;
2126
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2127
                    break;
2128
                case 31: // ver
2129
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2130
                    break;
2131
                case 15: // fq
2132
                default:
2133
                    goto illegal_insn;
2134
                }
2135
#else
2136
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2137
#endif
2138
                gen_movl_TN_reg(rd, cpu_tmp0);
2139
                break;
2140
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2141
#ifdef TARGET_SPARC64
2142
                save_state(dc, cpu_cond);
2143
                gen_helper_flushw();
2144
#else
2145
                if (!supervisor(dc))
2146
                    goto priv_insn;
2147
                gen_movl_TN_reg(rd, cpu_tbr);
2148
#endif
2149
                break;
2150
#endif
2151
            } else if (xop == 0x34) {   /* FPU Operations */
2152
                if (gen_trap_ifnofpu(dc, cpu_cond))
2153
                    goto jmp_insn;
2154
                gen_op_clear_ieee_excp_and_FTT();
2155
                rs1 = GET_FIELD(insn, 13, 17);
2156
                rs2 = GET_FIELD(insn, 27, 31);
2157
                xop = GET_FIELD(insn, 18, 26);
2158
                switch (xop) {
2159
                case 0x1: /* fmovs */
2160
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2161
                    break;
2162
                case 0x5: /* fnegs */
2163
                    gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2164
                    break;
2165
                case 0x9: /* fabss */
2166
                    gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2167
                    break;
2168
                case 0x29: /* fsqrts */
2169
                    CHECK_FPU_FEATURE(dc, FSQRT);
2170
                    gen_clear_float_exceptions();
2171
                    gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2172
                    gen_helper_check_ieee_exceptions();
2173
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2174
                    break;
2175
                case 0x2a: /* fsqrtd */
2176
                    CHECK_FPU_FEATURE(dc, FSQRT);
2177
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2178
                    gen_clear_float_exceptions();
2179
                    gen_helper_fsqrtd();
2180
                    gen_helper_check_ieee_exceptions();
2181
                    gen_op_store_DT0_fpr(DFPREG(rd));
2182
                    break;
2183
                case 0x2b: /* fsqrtq */
2184
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2185
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2186
                    gen_clear_float_exceptions();
2187
                    gen_helper_fsqrtq();
2188
                    gen_helper_check_ieee_exceptions();
2189
                    gen_op_store_QT0_fpr(QFPREG(rd));
2190
                    break;
2191
                case 0x41: /* fadds */
2192
                    gen_clear_float_exceptions();
2193
                    gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2194
                    gen_helper_check_ieee_exceptions();
2195
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2196
                    break;
2197
                case 0x42: /* faddd */
2198
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2199
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2200
                    gen_clear_float_exceptions();
2201
                    gen_helper_faddd();
2202
                    gen_helper_check_ieee_exceptions();
2203
                    gen_op_store_DT0_fpr(DFPREG(rd));
2204
                    break;
2205
                case 0x43: /* faddq */
2206
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2207
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2208
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2209
                    gen_clear_float_exceptions();
2210
                    gen_helper_faddq();
2211
                    gen_helper_check_ieee_exceptions();
2212
                    gen_op_store_QT0_fpr(QFPREG(rd));
2213
                    break;
2214
                case 0x45: /* fsubs */
2215
                    gen_clear_float_exceptions();
2216
                    gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2217
                    gen_helper_check_ieee_exceptions();
2218
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2219
                    break;
2220
                case 0x46: /* fsubd */
2221
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2222
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2223
                    gen_clear_float_exceptions();
2224
                    gen_helper_fsubd();
2225
                    gen_helper_check_ieee_exceptions();
2226
                    gen_op_store_DT0_fpr(DFPREG(rd));
2227
                    break;
2228
                case 0x47: /* fsubq */
2229
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2230
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2231
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2232
                    gen_clear_float_exceptions();
2233
                    gen_helper_fsubq();
2234
                    gen_helper_check_ieee_exceptions();
2235
                    gen_op_store_QT0_fpr(QFPREG(rd));
2236
                    break;
2237
                case 0x49: /* fmuls */
2238
                    CHECK_FPU_FEATURE(dc, FMUL);
2239
                    gen_clear_float_exceptions();
2240
                    gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2241
                    gen_helper_check_ieee_exceptions();
2242
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2243
                    break;
2244
                case 0x4a: /* fmuld */
2245
                    CHECK_FPU_FEATURE(dc, FMUL);
2246
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2247
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2248
                    gen_clear_float_exceptions();
2249
                    gen_helper_fmuld();
2250
                    gen_helper_check_ieee_exceptions();
2251
                    gen_op_store_DT0_fpr(DFPREG(rd));
2252
                    break;
2253
                case 0x4b: /* fmulq */
2254
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2255
                    CHECK_FPU_FEATURE(dc, FMUL);
2256
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2257
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2258
                    gen_clear_float_exceptions();
2259
                    gen_helper_fmulq();
2260
                    gen_helper_check_ieee_exceptions();
2261
                    gen_op_store_QT0_fpr(QFPREG(rd));
2262
                    break;
2263
                case 0x4d: /* fdivs */
2264
                    gen_clear_float_exceptions();
2265
                    gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2266
                    gen_helper_check_ieee_exceptions();
2267
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2268
                    break;
2269
                case 0x4e: /* fdivd */
2270
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2271
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2272
                    gen_clear_float_exceptions();
2273
                    gen_helper_fdivd();
2274
                    gen_helper_check_ieee_exceptions();
2275
                    gen_op_store_DT0_fpr(DFPREG(rd));
2276
                    break;
2277
                case 0x4f: /* fdivq */
2278
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2279
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2280
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2281
                    gen_clear_float_exceptions();
2282
                    gen_helper_fdivq();
2283
                    gen_helper_check_ieee_exceptions();
2284
                    gen_op_store_QT0_fpr(QFPREG(rd));
2285
                    break;
2286
                case 0x69: /* fsmuld */
2287
                    CHECK_FPU_FEATURE(dc, FSMULD);
2288
                    gen_clear_float_exceptions();
2289
                    gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2290
                    gen_helper_check_ieee_exceptions();
2291
                    gen_op_store_DT0_fpr(DFPREG(rd));
2292
                    break;
2293
                case 0x6e: /* fdmulq */
2294
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2295
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2296
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2297
                    gen_clear_float_exceptions();
2298
                    gen_helper_fdmulq();
2299
                    gen_helper_check_ieee_exceptions();
2300
                    gen_op_store_QT0_fpr(QFPREG(rd));
2301
                    break;
2302
                case 0xc4: /* fitos */
2303
                    gen_clear_float_exceptions();
2304
                    gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2305
                    gen_helper_check_ieee_exceptions();
2306
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2307
                    break;
2308
                case 0xc6: /* fdtos */
2309
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2310
                    gen_clear_float_exceptions();
2311
                    gen_helper_fdtos(cpu_tmp32);
2312
                    gen_helper_check_ieee_exceptions();
2313
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2314
                    break;
2315
                case 0xc7: /* fqtos */
2316
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2317
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2318
                    gen_clear_float_exceptions();
2319
                    gen_helper_fqtos(cpu_tmp32);
2320
                    gen_helper_check_ieee_exceptions();
2321
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2322
                    break;
2323
                case 0xc8: /* fitod */
2324
                    gen_helper_fitod(cpu_fpr[rs2]);
2325
                    gen_op_store_DT0_fpr(DFPREG(rd));
2326
                    break;
2327
                case 0xc9: /* fstod */
2328
                    gen_helper_fstod(cpu_fpr[rs2]);
2329
                    gen_op_store_DT0_fpr(DFPREG(rd));
2330
                    break;
2331
                case 0xcb: /* fqtod */
2332
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2333
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2334
                    gen_clear_float_exceptions();
2335
                    gen_helper_fqtod();
2336
                    gen_helper_check_ieee_exceptions();
2337
                    gen_op_store_DT0_fpr(DFPREG(rd));
2338
                    break;
2339
                case 0xcc: /* fitoq */
2340
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2341
                    gen_helper_fitoq(cpu_fpr[rs2]);
2342
                    gen_op_store_QT0_fpr(QFPREG(rd));
2343
                    break;
2344
                case 0xcd: /* fstoq */
2345
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2346
                    gen_helper_fstoq(cpu_fpr[rs2]);
2347
                    gen_op_store_QT0_fpr(QFPREG(rd));
2348
                    break;
2349
                case 0xce: /* fdtoq */
2350
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2351
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2352
                    gen_helper_fdtoq();
2353
                    gen_op_store_QT0_fpr(QFPREG(rd));
2354
                    break;
2355
                case 0xd1: /* fstoi */
2356
                    gen_clear_float_exceptions();
2357
                    gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2358
                    gen_helper_check_ieee_exceptions();
2359
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2360
                    break;
2361
                case 0xd2: /* fdtoi */
2362
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2363
                    gen_clear_float_exceptions();
2364
                    gen_helper_fdtoi(cpu_tmp32);
2365
                    gen_helper_check_ieee_exceptions();
2366
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2367
                    break;
2368
                case 0xd3: /* fqtoi */
2369
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2370
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2371
                    gen_clear_float_exceptions();
2372
                    gen_helper_fqtoi(cpu_tmp32);
2373
                    gen_helper_check_ieee_exceptions();
2374
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2375
                    break;
2376
#ifdef TARGET_SPARC64
2377
                case 0x2: /* V9 fmovd */
2378
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2379
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2380
                                    cpu_fpr[DFPREG(rs2) + 1]);
2381
                    break;
2382
                case 0x3: /* V9 fmovq */
2383
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2384
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2385
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2386
                                    cpu_fpr[QFPREG(rs2) + 1]);
2387
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2388
                                    cpu_fpr[QFPREG(rs2) + 2]);
2389
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2390
                                    cpu_fpr[QFPREG(rs2) + 3]);
2391
                    break;
2392
                case 0x6: /* V9 fnegd */
2393
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2394
                    gen_helper_fnegd();
2395
                    gen_op_store_DT0_fpr(DFPREG(rd));
2396
                    break;
2397
                case 0x7: /* V9 fnegq */
2398
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2399
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2400
                    gen_helper_fnegq();
2401
                    gen_op_store_QT0_fpr(QFPREG(rd));
2402
                    break;
2403
                case 0xa: /* V9 fabsd */
2404
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2405
                    gen_helper_fabsd();
2406
                    gen_op_store_DT0_fpr(DFPREG(rd));
2407
                    break;
2408
                case 0xb: /* V9 fabsq */
2409
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2410
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2411
                    gen_helper_fabsq();
2412
                    gen_op_store_QT0_fpr(QFPREG(rd));
2413
                    break;
2414
                case 0x81: /* V9 fstox */
2415
                    gen_clear_float_exceptions();
2416
                    gen_helper_fstox(cpu_fpr[rs2]);
2417
                    gen_helper_check_ieee_exceptions();
2418
                    gen_op_store_DT0_fpr(DFPREG(rd));
2419
                    break;
2420
                case 0x82: /* V9 fdtox */
2421
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2422
                    gen_clear_float_exceptions();
2423
                    gen_helper_fdtox();
2424
                    gen_helper_check_ieee_exceptions();
2425
                    gen_op_store_DT0_fpr(DFPREG(rd));
2426
                    break;
2427
                case 0x83: /* V9 fqtox */
2428
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2429
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2430
                    gen_clear_float_exceptions();
2431
                    gen_helper_fqtox();
2432
                    gen_helper_check_ieee_exceptions();
2433
                    gen_op_store_DT0_fpr(DFPREG(rd));
2434
                    break;
2435
                case 0x84: /* V9 fxtos */
2436
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2437
                    gen_clear_float_exceptions();
2438
                    gen_helper_fxtos(cpu_tmp32);
2439
                    gen_helper_check_ieee_exceptions();
2440
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2441
                    break;
2442
                case 0x88: /* V9 fxtod */
2443
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2444
                    gen_clear_float_exceptions();
2445
                    gen_helper_fxtod();
2446
                    gen_helper_check_ieee_exceptions();
2447
                    gen_op_store_DT0_fpr(DFPREG(rd));
2448
                    break;
2449
                case 0x8c: /* V9 fxtoq */
2450
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2451
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2452
                    gen_clear_float_exceptions();
2453
                    gen_helper_fxtoq();
2454
                    gen_helper_check_ieee_exceptions();
2455
                    gen_op_store_QT0_fpr(QFPREG(rd));
2456
                    break;
2457
#endif
2458
                default:
2459
                    goto illegal_insn;
2460
                }
2461
            } else if (xop == 0x35) {   /* FPU Operations */
2462
#ifdef TARGET_SPARC64
2463
                int cond;
2464
#endif
2465
                if (gen_trap_ifnofpu(dc, cpu_cond))
2466
                    goto jmp_insn;
2467
                gen_op_clear_ieee_excp_and_FTT();
2468
                rs1 = GET_FIELD(insn, 13, 17);
2469
                rs2 = GET_FIELD(insn, 27, 31);
2470
                xop = GET_FIELD(insn, 18, 26);
2471
#ifdef TARGET_SPARC64
2472
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2473
                    int l1;
2474

    
2475
                    l1 = gen_new_label();
2476
                    cond = GET_FIELD_SP(insn, 14, 17);
2477
                    cpu_src1 = get_src1(insn, cpu_src1);
2478
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2479
                                       0, l1);
2480
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2481
                    gen_set_label(l1);
2482
                    break;
2483
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2484
                    int l1;
2485

    
2486
                    l1 = gen_new_label();
2487
                    cond = GET_FIELD_SP(insn, 14, 17);
2488
                    cpu_src1 = get_src1(insn, cpu_src1);
2489
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2490
                                       0, l1);
2491
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2492
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2493
                    gen_set_label(l1);
2494
                    break;
2495
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2496
                    int l1;
2497

    
2498
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2499
                    l1 = gen_new_label();
2500
                    cond = GET_FIELD_SP(insn, 14, 17);
2501
                    cpu_src1 = get_src1(insn, cpu_src1);
2502
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2503
                                       0, l1);
2504
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2505
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2506
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2507
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2508
                    gen_set_label(l1);
2509
                    break;
2510
                }
2511
#endif
2512
                switch (xop) {
2513
#ifdef TARGET_SPARC64
2514
#define FMOVSCC(fcc)                                                    \
2515
                    {                                                   \
2516
                        TCGv r_cond;                                    \
2517
                        int l1;                                         \
2518
                                                                        \
2519
                        l1 = gen_new_label();                           \
2520
                        r_cond = tcg_temp_new();                        \
2521
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2522
                        gen_fcond(r_cond, fcc, cond);                   \
2523
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2524
                                           0, l1);                      \
2525
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2526
                        gen_set_label(l1);                              \
2527
                        tcg_temp_free(r_cond);                          \
2528
                    }
2529
#define FMOVDCC(fcc)                                                    \
2530
                    {                                                   \
2531
                        TCGv r_cond;                                    \
2532
                        int l1;                                         \
2533
                                                                        \
2534
                        l1 = gen_new_label();                           \
2535
                        r_cond = tcg_temp_new();                        \
2536
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2537
                        gen_fcond(r_cond, fcc, cond);                   \
2538
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2539
                                           0, l1);                      \
2540
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2541
                                        cpu_fpr[DFPREG(rs2)]);          \
2542
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2543
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2544
                        gen_set_label(l1);                              \
2545
                        tcg_temp_free(r_cond);                          \
2546
                    }
2547
#define FMOVQCC(fcc)                                                    \
2548
                    {                                                   \
2549
                        TCGv r_cond;                                    \
2550
                        int l1;                                         \
2551
                                                                        \
2552
                        l1 = gen_new_label();                           \
2553
                        r_cond = tcg_temp_new();                        \
2554
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2555
                        gen_fcond(r_cond, fcc, cond);                   \
2556
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2557
                                           0, l1);                      \
2558
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2559
                                        cpu_fpr[QFPREG(rs2)]);          \
2560
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2561
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2562
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2563
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2564
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2565
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2566
                        gen_set_label(l1);                              \
2567
                        tcg_temp_free(r_cond);                          \
2568
                    }
2569
                    case 0x001: /* V9 fmovscc %fcc0 */
2570
                        FMOVSCC(0);
2571
                        break;
2572
                    case 0x002: /* V9 fmovdcc %fcc0 */
2573
                        FMOVDCC(0);
2574
                        break;
2575
                    case 0x003: /* V9 fmovqcc %fcc0 */
2576
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2577
                        FMOVQCC(0);
2578
                        break;
2579
                    case 0x041: /* V9 fmovscc %fcc1 */
2580
                        FMOVSCC(1);
2581
                        break;
2582
                    case 0x042: /* V9 fmovdcc %fcc1 */
2583
                        FMOVDCC(1);
2584
                        break;
2585
                    case 0x043: /* V9 fmovqcc %fcc1 */
2586
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2587
                        FMOVQCC(1);
2588
                        break;
2589
                    case 0x081: /* V9 fmovscc %fcc2 */
2590
                        FMOVSCC(2);
2591
                        break;
2592
                    case 0x082: /* V9 fmovdcc %fcc2 */
2593
                        FMOVDCC(2);
2594
                        break;
2595
                    case 0x083: /* V9 fmovqcc %fcc2 */
2596
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2597
                        FMOVQCC(2);
2598
                        break;
2599
                    case 0x0c1: /* V9 fmovscc %fcc3 */
2600
                        FMOVSCC(3);
2601
                        break;
2602
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
2603
                        FMOVDCC(3);
2604
                        break;
2605
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
2606
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2607
                        FMOVQCC(3);
2608
                        break;
2609
#undef FMOVSCC
2610
#undef FMOVDCC
2611
#undef FMOVQCC
2612
#define FMOVSCC(icc)                                                    \
2613
                    {                                                   \
2614
                        TCGv r_cond;                                    \
2615
                        int l1;                                         \
2616
                                                                        \
2617
                        l1 = gen_new_label();                           \
2618
                        r_cond = tcg_temp_new();                        \
2619
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2620
                        gen_cond(r_cond, icc, cond, dc);                \
2621
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2622
                                           0, l1);                      \
2623
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2624
                        gen_set_label(l1);                              \
2625
                        tcg_temp_free(r_cond);                          \
2626
                    }
2627
#define FMOVDCC(icc)                                                    \
2628
                    {                                                   \
2629
                        TCGv r_cond;                                    \
2630
                        int l1;                                         \
2631
                                                                        \
2632
                        l1 = gen_new_label();                           \
2633
                        r_cond = tcg_temp_new();                        \
2634
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2635
                        gen_cond(r_cond, icc, cond, dc);                \
2636
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2637
                                           0, l1);                      \
2638
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2639
                                        cpu_fpr[DFPREG(rs2)]);          \
2640
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2641
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2642
                        gen_set_label(l1);                              \
2643
                        tcg_temp_free(r_cond);                          \
2644
                    }
2645
#define FMOVQCC(icc)                                                    \
2646
                    {                                                   \
2647
                        TCGv r_cond;                                    \
2648
                        int l1;                                         \
2649
                                                                        \
2650
                        l1 = gen_new_label();                           \
2651
                        r_cond = tcg_temp_new();                        \
2652
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2653
                        gen_cond(r_cond, icc, cond, dc);                \
2654
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2655
                                           0, l1);                      \
2656
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2657
                                        cpu_fpr[QFPREG(rs2)]);          \
2658
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2659
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2660
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2661
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2662
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2663
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2664
                        gen_set_label(l1);                              \
2665
                        tcg_temp_free(r_cond);                          \
2666
                    }
2667

    
2668
                    case 0x101: /* V9 fmovscc %icc */
2669
                        FMOVSCC(0);
2670
                        break;
2671
                    case 0x102: /* V9 fmovdcc %icc */
2672
                        FMOVDCC(0);
2673
                    case 0x103: /* V9 fmovqcc %icc */
2674
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2675
                        FMOVQCC(0);
2676
                        break;
2677
                    case 0x181: /* V9 fmovscc %xcc */
2678
                        FMOVSCC(1);
2679
                        break;
2680
                    case 0x182: /* V9 fmovdcc %xcc */
2681
                        FMOVDCC(1);
2682
                        break;
2683
                    case 0x183: /* V9 fmovqcc %xcc */
2684
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2685
                        FMOVQCC(1);
2686
                        break;
2687
#undef FMOVSCC
2688
#undef FMOVDCC
2689
#undef FMOVQCC
2690
#endif
2691
                    case 0x51: /* fcmps, V9 %fcc */
2692
                        gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2693
                        break;
2694
                    case 0x52: /* fcmpd, V9 %fcc */
2695
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2696
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2697
                        gen_op_fcmpd(rd & 3);
2698
                        break;
2699
                    case 0x53: /* fcmpq, V9 %fcc */
2700
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2701
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2702
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2703
                        gen_op_fcmpq(rd & 3);
2704
                        break;
2705
                    case 0x55: /* fcmpes, V9 %fcc */
2706
                        gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2707
                        break;
2708
                    case 0x56: /* fcmped, V9 %fcc */
2709
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2710
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2711
                        gen_op_fcmped(rd & 3);
2712
                        break;
2713
                    case 0x57: /* fcmpeq, V9 %fcc */
2714
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2715
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2716
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2717
                        gen_op_fcmpeq(rd & 3);
2718
                        break;
2719
                    default:
2720
                        goto illegal_insn;
2721
                }
2722
            } else if (xop == 0x2) {
2723
                // clr/mov shortcut
2724

    
2725
                rs1 = GET_FIELD(insn, 13, 17);
2726
                if (rs1 == 0) {
2727
                    // or %g0, x, y -> mov T0, x; mov y, T0
2728
                    if (IS_IMM) {       /* immediate */
2729
                        TCGv r_const;
2730

    
2731
                        simm = GET_FIELDs(insn, 19, 31);
2732
                        r_const = tcg_const_tl(simm);
2733
                        gen_movl_TN_reg(rd, r_const);
2734
                        tcg_temp_free(r_const);
2735
                    } else {            /* register */
2736
                        rs2 = GET_FIELD(insn, 27, 31);
2737
                        gen_movl_reg_TN(rs2, cpu_dst);
2738
                        gen_movl_TN_reg(rd, cpu_dst);
2739
                    }
2740
                } else {
2741
                    cpu_src1 = get_src1(insn, cpu_src1);
2742
                    if (IS_IMM) {       /* immediate */
2743
                        simm = GET_FIELDs(insn, 19, 31);
2744
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2745
                        gen_movl_TN_reg(rd, cpu_dst);
2746
                    } else {            /* register */
2747
                        // or x, %g0, y -> mov T1, x; mov y, T1
2748
                        rs2 = GET_FIELD(insn, 27, 31);
2749
                        if (rs2 != 0) {
2750
                            gen_movl_reg_TN(rs2, cpu_src2);
2751
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2752
                            gen_movl_TN_reg(rd, cpu_dst);
2753
                        } else
2754
                            gen_movl_TN_reg(rd, cpu_src1);
2755
                    }
2756
                }
2757
#ifdef TARGET_SPARC64
2758
            } else if (xop == 0x25) { /* sll, V9 sllx */
2759
                cpu_src1 = get_src1(insn, cpu_src1);
2760
                if (IS_IMM) {   /* immediate */
2761
                    simm = GET_FIELDs(insn, 20, 31);
2762
                    if (insn & (1 << 12)) {
2763
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2764
                    } else {
2765
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2766
                    }
2767
                } else {                /* register */
2768
                    rs2 = GET_FIELD(insn, 27, 31);
2769
                    gen_movl_reg_TN(rs2, cpu_src2);
2770
                    if (insn & (1 << 12)) {
2771
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2772
                    } else {
2773
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2774
                    }
2775
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2776
                }
2777
                gen_movl_TN_reg(rd, cpu_dst);
2778
            } else if (xop == 0x26) { /* srl, V9 srlx */
2779
                cpu_src1 = get_src1(insn, cpu_src1);
2780
                if (IS_IMM) {   /* immediate */
2781
                    simm = GET_FIELDs(insn, 20, 31);
2782
                    if (insn & (1 << 12)) {
2783
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2784
                    } else {
2785
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2786
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2787
                    }
2788
                } else {                /* register */
2789
                    rs2 = GET_FIELD(insn, 27, 31);
2790
                    gen_movl_reg_TN(rs2, cpu_src2);
2791
                    if (insn & (1 << 12)) {
2792
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2793
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2794
                    } else {
2795
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2796
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2797
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2798
                    }
2799
                }
2800
                gen_movl_TN_reg(rd, cpu_dst);
2801
            } else if (xop == 0x27) { /* sra, V9 srax */
2802
                cpu_src1 = get_src1(insn, cpu_src1);
2803
                if (IS_IMM) {   /* immediate */
2804
                    simm = GET_FIELDs(insn, 20, 31);
2805
                    if (insn & (1 << 12)) {
2806
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2807
                    } else {
2808
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2809
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2810
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2811
                    }
2812
                } else {                /* register */
2813
                    rs2 = GET_FIELD(insn, 27, 31);
2814
                    gen_movl_reg_TN(rs2, cpu_src2);
2815
                    if (insn & (1 << 12)) {
2816
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2817
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2818
                    } else {
2819
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2820
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2821
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2822
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2823
                    }
2824
                }
2825
                gen_movl_TN_reg(rd, cpu_dst);
2826
#endif
2827
            } else if (xop < 0x36) {
2828
                if (xop < 0x20) {
2829
                    cpu_src1 = get_src1(insn, cpu_src1);
2830
                    cpu_src2 = get_src2(insn, cpu_src2);
2831
                    switch (xop & ~0x10) {
2832
                    case 0x0: /* add */
2833
                        if (IS_IMM) {
2834
                            simm = GET_FIELDs(insn, 19, 31);
2835
                            if (xop & 0x10) {
2836
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2837
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2838
                                dc->cc_op = CC_OP_ADD;
2839
                            } else {
2840
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2841
                            }
2842
                        } else {
2843
                            if (xop & 0x10) {
2844
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2845
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2846
                                dc->cc_op = CC_OP_ADD;
2847
                            } else {
2848
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2849
                            }
2850
                        }
2851
                        break;
2852
                    case 0x1: /* and */
2853
                        if (IS_IMM) {
2854
                            simm = GET_FIELDs(insn, 19, 31);
2855
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2856
                        } else {
2857
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2858
                        }
2859
                        if (xop & 0x10) {
2860
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2861
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2862
                            dc->cc_op = CC_OP_LOGIC;
2863
                        }
2864
                        break;
2865
                    case 0x2: /* or */
2866
                        if (IS_IMM) {
2867
                            simm = GET_FIELDs(insn, 19, 31);
2868
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2869
                        } else {
2870
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2871
                        }
2872
                        if (xop & 0x10) {
2873
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2874
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2875
                            dc->cc_op = CC_OP_LOGIC;
2876
                        }
2877
                        break;
2878
                    case 0x3: /* xor */
2879
                        if (IS_IMM) {
2880
                            simm = GET_FIELDs(insn, 19, 31);
2881
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2882
                        } else {
2883
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2884
                        }
2885
                        if (xop & 0x10) {
2886
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2887
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2888
                            dc->cc_op = CC_OP_LOGIC;
2889
                        }
2890
                        break;
2891
                    case 0x4: /* sub */
2892
                        if (IS_IMM) {
2893
                            simm = GET_FIELDs(insn, 19, 31);
2894
                            if (xop & 0x10) {
2895
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2896
                            } else {
2897
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2898
                            }
2899
                        } else {
2900
                            if (xop & 0x10) {
2901
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2902
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2903
                                dc->cc_op = CC_OP_SUB;
2904
                            } else {
2905
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2906
                            }
2907
                        }
2908
                        break;
2909
                    case 0x5: /* andn */
2910
                        if (IS_IMM) {
2911
                            simm = GET_FIELDs(insn, 19, 31);
2912
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
2913
                        } else {
2914
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2915
                        }
2916
                        if (xop & 0x10) {
2917
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2918
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2919
                            dc->cc_op = CC_OP_LOGIC;
2920
                        }
2921
                        break;
2922
                    case 0x6: /* orn */
2923
                        if (IS_IMM) {
2924
                            simm = GET_FIELDs(insn, 19, 31);
2925
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
2926
                        } else {
2927
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2928
                        }
2929
                        if (xop & 0x10) {
2930
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2931
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2932
                            dc->cc_op = CC_OP_LOGIC;
2933
                        }
2934
                        break;
2935
                    case 0x7: /* xorn */
2936
                        if (IS_IMM) {
2937
                            simm = GET_FIELDs(insn, 19, 31);
2938
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
2939
                        } else {
2940
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
2941
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2942
                        }
2943
                        if (xop & 0x10) {
2944
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2945
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2946
                            dc->cc_op = CC_OP_LOGIC;
2947
                        }
2948
                        break;
2949
                    case 0x8: /* addx, V9 addc */
2950
                        if (IS_IMM) {
2951
                            simm = GET_FIELDs(insn, 19, 31);
2952
                            if (xop & 0x10) {
2953
                                gen_helper_compute_psr();
2954
                                gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
2955
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2956
                                dc->cc_op = CC_OP_ADDX;
2957
                            } else {
2958
                                gen_helper_compute_psr();
2959
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
2960
                                tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2961
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2962
                            }
2963
                        } else {
2964
                            if (xop & 0x10) {
2965
                                gen_helper_compute_psr();
2966
                                gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2967
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2968
                                dc->cc_op = CC_OP_ADDX;
2969
                            } else {
2970
                                gen_helper_compute_psr();
2971
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
2972
                                tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2973
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2974
                            }
2975
                        }
2976
                        break;
2977
#ifdef TARGET_SPARC64
2978
                    case 0x9: /* V9 mulx */
2979
                        if (IS_IMM) {
2980
                            simm = GET_FIELDs(insn, 19, 31);
2981
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
2982
                        } else {
2983
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2984
                        }
2985
                        break;
2986
#endif
2987
                    case 0xa: /* umul */
2988
                        CHECK_IU_FEATURE(dc, MUL);
2989
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2990
                        if (xop & 0x10) {
2991
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2992
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2993
                            dc->cc_op = CC_OP_LOGIC;
2994
                        }
2995
                        break;
2996
                    case 0xb: /* smul */
2997
                        CHECK_IU_FEATURE(dc, MUL);
2998
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
2999
                        if (xop & 0x10) {
3000
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3001
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3002
                            dc->cc_op = CC_OP_LOGIC;
3003
                        }
3004
                        break;
3005
                    case 0xc: /* subx, V9 subc */
3006
                        if (IS_IMM) {
3007
                            simm = GET_FIELDs(insn, 19, 31);
3008
                            if (xop & 0x10) {
3009
                                gen_helper_compute_psr();
3010
                                gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3011
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3012
                                dc->cc_op = CC_OP_SUBX;
3013
                            } else {
3014
                                gen_helper_compute_psr();
3015
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3016
                                tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3017
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3018
                            }
3019
                        } else {
3020
                            if (xop & 0x10) {
3021
                                gen_helper_compute_psr();
3022
                                gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3023
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3024
                                dc->cc_op = CC_OP_SUBX;
3025
                            } else {
3026
                                gen_helper_compute_psr();
3027
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3028
                                tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3029
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3030
                            }
3031
                        }
3032
                        break;
3033
#ifdef TARGET_SPARC64
3034
                    case 0xd: /* V9 udivx */
3035
                        tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3036
                        tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3037
                        gen_trap_ifdivzero_tl(cpu_cc_src2);
3038
                        tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3039
                        break;
3040
#endif
3041
                    case 0xe: /* udiv */
3042
                        CHECK_IU_FEATURE(dc, DIV);
3043
                        gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3044
                        if (xop & 0x10) {
3045
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3046
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3047
                            dc->cc_op = CC_OP_DIV;
3048
                        }
3049
                        break;
3050
                    case 0xf: /* sdiv */
3051
                        CHECK_IU_FEATURE(dc, DIV);
3052
                        gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3053
                        if (xop & 0x10) {
3054
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3055
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3056
                            dc->cc_op = CC_OP_DIV;
3057
                        }
3058
                        break;
3059
                    default:
3060
                        goto illegal_insn;
3061
                    }
3062
                    gen_movl_TN_reg(rd, cpu_dst);
3063
                } else {
3064
                    cpu_src1 = get_src1(insn, cpu_src1);
3065
                    cpu_src2 = get_src2(insn, cpu_src2);
3066
                    switch (xop) {
3067
                    case 0x20: /* taddcc */
3068
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3069
                        gen_movl_TN_reg(rd, cpu_dst);
3070
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3071
                        dc->cc_op = CC_OP_TADD;
3072
                        break;
3073
                    case 0x21: /* tsubcc */
3074
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3075
                        gen_movl_TN_reg(rd, cpu_dst);
3076
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3077
                        dc->cc_op = CC_OP_TSUB;
3078
                        break;
3079
                    case 0x22: /* taddcctv */
3080
                        save_state(dc, cpu_cond);
3081
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3082
                        gen_movl_TN_reg(rd, cpu_dst);
3083
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3084
                        dc->cc_op = CC_OP_TADDTV;
3085
                        break;
3086
                    case 0x23: /* tsubcctv */
3087
                        save_state(dc, cpu_cond);
3088
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3089
                        gen_movl_TN_reg(rd, cpu_dst);
3090
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3091
                        dc->cc_op = CC_OP_TSUBTV;
3092
                        break;
3093
                    case 0x24: /* mulscc */
3094
                        gen_helper_compute_psr();
3095
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3096
                        gen_movl_TN_reg(rd, cpu_dst);
3097
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3098
                        dc->cc_op = CC_OP_ADD;
3099
                        break;
3100
#ifndef TARGET_SPARC64
3101
                    case 0x25:  /* sll */
3102
                        if (IS_IMM) { /* immediate */
3103
                            simm = GET_FIELDs(insn, 20, 31);
3104
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3105
                        } else { /* register */
3106
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3107
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3108
                        }
3109
                        gen_movl_TN_reg(rd, cpu_dst);
3110
                        break;
3111
                    case 0x26:  /* srl */
3112
                        if (IS_IMM) { /* immediate */
3113
                            simm = GET_FIELDs(insn, 20, 31);
3114
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3115
                        } else { /* register */
3116
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3117
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3118
                        }
3119
                        gen_movl_TN_reg(rd, cpu_dst);
3120
                        break;
3121
                    case 0x27:  /* sra */
3122
                        if (IS_IMM) { /* immediate */
3123
                            simm = GET_FIELDs(insn, 20, 31);
3124
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3125
                        } else { /* register */
3126
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3127
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3128
                        }
3129
                        gen_movl_TN_reg(rd, cpu_dst);
3130
                        break;
3131
#endif
3132
                    case 0x30:
3133
                        {
3134
                            switch(rd) {
3135
                            case 0: /* wry */
3136
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3137
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3138
                                break;
3139
#ifndef TARGET_SPARC64
3140
                            case 0x01 ... 0x0f: /* undefined in the
3141
                                                   SPARCv8 manual, nop
3142
                                                   on the microSPARC
3143
                                                   II */
3144
                            case 0x10 ... 0x1f: /* implementation-dependent
3145
                                                   in the SPARCv8
3146
                                                   manual, nop on the
3147
                                                   microSPARC II */
3148
                                break;
3149
#else
3150
                            case 0x2: /* V9 wrccr */
3151
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3152
                                gen_helper_wrccr(cpu_dst);
3153
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3154
                                dc->cc_op = CC_OP_FLAGS;
3155
                                break;
3156
                            case 0x3: /* V9 wrasi */
3157
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3158
                                tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3159
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3160
                                break;
3161
                            case 0x6: /* V9 wrfprs */
3162
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3163
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3164
                                save_state(dc, cpu_cond);
3165
                                gen_op_next_insn();
3166
                                tcg_gen_exit_tb(0);
3167
                                dc->is_br = 1;
3168
                                break;
3169
                            case 0xf: /* V9 sir, nop if user */
3170
#if !defined(CONFIG_USER_ONLY)
3171
                                if (supervisor(dc))
3172
                                    ; // XXX
3173
#endif
3174
                                break;
3175
                            case 0x13: /* Graphics Status */
3176
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3177
                                    goto jmp_insn;
3178
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3179
                                break;
3180
                            case 0x14: /* Softint set */
3181
                                if (!supervisor(dc))
3182
                                    goto illegal_insn;
3183
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3184
                                gen_helper_set_softint(cpu_tmp64);
3185
                                break;
3186
                            case 0x15: /* Softint clear */
3187
                                if (!supervisor(dc))
3188
                                    goto illegal_insn;
3189
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3190
                                gen_helper_clear_softint(cpu_tmp64);
3191
                                break;
3192
                            case 0x16: /* Softint write */
3193
                                if (!supervisor(dc))
3194
                                    goto illegal_insn;
3195
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3196
                                gen_helper_write_softint(cpu_tmp64);
3197
                                break;
3198
                            case 0x17: /* Tick compare */
3199
#if !defined(CONFIG_USER_ONLY)
3200
                                if (!supervisor(dc))
3201
                                    goto illegal_insn;
3202
#endif
3203
                                {
3204
                                    TCGv_ptr r_tickptr;
3205

    
3206
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3207
                                                   cpu_src2);
3208
                                    r_tickptr = tcg_temp_new_ptr();
3209
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3210
                                                   offsetof(CPUState, tick));
3211
                                    gen_helper_tick_set_limit(r_tickptr,
3212
                                                              cpu_tick_cmpr);
3213
                                    tcg_temp_free_ptr(r_tickptr);
3214
                                }
3215
                                break;
3216
                            case 0x18: /* System tick */
3217
#if !defined(CONFIG_USER_ONLY)
3218
                                if (!supervisor(dc))
3219
                                    goto illegal_insn;
3220
#endif
3221
                                {
3222
                                    TCGv_ptr r_tickptr;
3223

    
3224
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3225
                                                   cpu_src2);
3226
                                    r_tickptr = tcg_temp_new_ptr();
3227
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3228
                                                   offsetof(CPUState, stick));
3229
                                    gen_helper_tick_set_count(r_tickptr,
3230
                                                              cpu_dst);
3231
                                    tcg_temp_free_ptr(r_tickptr);
3232
                                }
3233
                                break;
3234
                            case 0x19: /* System tick compare */
3235
#if !defined(CONFIG_USER_ONLY)
3236
                                if (!supervisor(dc))
3237
                                    goto illegal_insn;
3238
#endif
3239
                                {
3240
                                    TCGv_ptr r_tickptr;
3241

    
3242
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3243
                                                   cpu_src2);
3244
                                    r_tickptr = tcg_temp_new_ptr();
3245
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3246
                                                   offsetof(CPUState, stick));
3247
                                    gen_helper_tick_set_limit(r_tickptr,
3248
                                                              cpu_stick_cmpr);
3249
                                    tcg_temp_free_ptr(r_tickptr);
3250
                                }
3251
                                break;
3252

    
3253
                            case 0x10: /* Performance Control */
3254
                            case 0x11: /* Performance Instrumentation
3255
                                          Counter */
3256
                            case 0x12: /* Dispatch Control */
3257
#endif
3258
                            default:
3259
                                goto illegal_insn;
3260
                            }
3261
                        }
3262
                        break;
3263
#if !defined(CONFIG_USER_ONLY)
3264
                    case 0x31: /* wrpsr, V9 saved, restored */
3265
                        {
3266
                            if (!supervisor(dc))
3267
                                goto priv_insn;
3268
#ifdef TARGET_SPARC64
3269
                            switch (rd) {
3270
                            case 0:
3271
                                gen_helper_saved();
3272
                                break;
3273
                            case 1:
3274
                                gen_helper_restored();
3275
                                break;
3276
                            case 2: /* UA2005 allclean */
3277
                            case 3: /* UA2005 otherw */
3278
                            case 4: /* UA2005 normalw */
3279
                            case 5: /* UA2005 invalw */
3280
                                // XXX
3281
                            default:
3282
                                goto illegal_insn;
3283
                            }
3284
#else
3285
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3286
                            gen_helper_wrpsr(cpu_dst);
3287
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3288
                            dc->cc_op = CC_OP_FLAGS;
3289
                            save_state(dc, cpu_cond);
3290
                            gen_op_next_insn();
3291
                            tcg_gen_exit_tb(0);
3292
                            dc->is_br = 1;
3293
#endif
3294
                        }
3295
                        break;
3296
                    case 0x32: /* wrwim, V9 wrpr */
3297
                        {
3298
                            if (!supervisor(dc))
3299
                                goto priv_insn;
3300
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3301
#ifdef TARGET_SPARC64
3302
                            switch (rd) {
3303
                            case 0: // tpc
3304
                                {
3305
                                    TCGv_ptr r_tsptr;
3306

    
3307
                                    r_tsptr = tcg_temp_new_ptr();
3308
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3309
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3310
                                                  offsetof(trap_state, tpc));
3311
                                    tcg_temp_free_ptr(r_tsptr);
3312
                                }
3313
                                break;
3314
                            case 1: // tnpc
3315
                                {
3316
                                    TCGv_ptr r_tsptr;
3317

    
3318
                                    r_tsptr = tcg_temp_new_ptr();
3319
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3320
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3321
                                                  offsetof(trap_state, tnpc));
3322
                                    tcg_temp_free_ptr(r_tsptr);
3323
                                }
3324
                                break;
3325
                            case 2: // tstate
3326
                                {
3327
                                    TCGv_ptr r_tsptr;
3328

    
3329
                                    r_tsptr = tcg_temp_new_ptr();
3330
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3331
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3332
                                                  offsetof(trap_state,
3333
                                                           tstate));
3334
                                    tcg_temp_free_ptr(r_tsptr);
3335
                                }
3336
                                break;
3337
                            case 3: // tt
3338
                                {
3339
                                    TCGv_ptr r_tsptr;
3340

    
3341
                                    r_tsptr = tcg_temp_new_ptr();
3342
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3343
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3344
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3345
                                                   offsetof(trap_state, tt));
3346
                                    tcg_temp_free_ptr(r_tsptr);
3347
                                }
3348
                                break;
3349
                            case 4: // tick
3350
                                {
3351
                                    TCGv_ptr r_tickptr;
3352

    
3353
                                    r_tickptr = tcg_temp_new_ptr();
3354
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3355
                                                   offsetof(CPUState, tick));
3356
                                    gen_helper_tick_set_count(r_tickptr,
3357
                                                              cpu_tmp0);
3358
                                    tcg_temp_free_ptr(r_tickptr);
3359
                                }
3360
                                break;
3361
                            case 5: // tba
3362
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3363
                                break;
3364
                            case 6: // pstate
3365
                                save_state(dc, cpu_cond);
3366
                                gen_helper_wrpstate(cpu_tmp0);
3367
                                gen_op_next_insn();
3368
                                tcg_gen_exit_tb(0);
3369
                                dc->is_br = 1;
3370
                                break;
3371
                            case 7: // tl
3372
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3373
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3374
                                               offsetof(CPUSPARCState, tl));
3375
                                break;
3376
                            case 8: // pil
3377
                                gen_helper_wrpil(cpu_tmp0);
3378
                                break;
3379
                            case 9: // cwp
3380
                                gen_helper_wrcwp(cpu_tmp0);
3381
                                break;
3382
                            case 10: // cansave
3383
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3384
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3385
                                               offsetof(CPUSPARCState,
3386
                                                        cansave));
3387
                                break;
3388
                            case 11: // canrestore
3389
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3390
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3391
                                               offsetof(CPUSPARCState,
3392
                                                        canrestore));
3393
                                break;
3394
                            case 12: // cleanwin
3395
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3396
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3397
                                               offsetof(CPUSPARCState,
3398
                                                        cleanwin));
3399
                                break;
3400
                            case 13: // otherwin
3401
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3402
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3403
                                               offsetof(CPUSPARCState,
3404
                                                        otherwin));
3405
                                break;
3406
                            case 14: // wstate
3407
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3408
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3409
                                               offsetof(CPUSPARCState,
3410
                                                        wstate));
3411
                                break;
3412
                            case 16: // UA2005 gl
3413
                                CHECK_IU_FEATURE(dc, GL);
3414
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3415
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3416
                                               offsetof(CPUSPARCState, gl));
3417
                                break;
3418
                            case 26: // UA2005 strand status
3419
                                CHECK_IU_FEATURE(dc, HYPV);
3420
                                if (!hypervisor(dc))
3421
                                    goto priv_insn;
3422
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3423
                                break;
3424
                            default:
3425
                                goto illegal_insn;
3426
                            }
3427
#else
3428
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3429
                            if (dc->def->nwindows != 32)
3430
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3431
                                                (1 << dc->def->nwindows) - 1);
3432
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3433
#endif
3434
                        }
3435
                        break;
3436
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3437
                        {
3438
#ifndef TARGET_SPARC64
3439
                            if (!supervisor(dc))
3440
                                goto priv_insn;
3441
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3442
#else
3443
                            CHECK_IU_FEATURE(dc, HYPV);
3444
                            if (!hypervisor(dc))
3445
                                goto priv_insn;
3446
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3447
                            switch (rd) {
3448
                            case 0: // hpstate
3449
                                // XXX gen_op_wrhpstate();
3450
                                save_state(dc, cpu_cond);
3451
                                gen_op_next_insn();
3452
                                tcg_gen_exit_tb(0);
3453
                                dc->is_br = 1;
3454
                                break;
3455
                            case 1: // htstate
3456
                                // XXX gen_op_wrhtstate();
3457
                                break;
3458
                            case 3: // hintp
3459
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3460
                                break;
3461
                            case 5: // htba
3462
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3463
                                break;
3464
                            case 31: // hstick_cmpr
3465
                                {
3466
                                    TCGv_ptr r_tickptr;
3467

    
3468
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3469
                                    r_tickptr = tcg_temp_new_ptr();
3470
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3471
                                                   offsetof(CPUState, hstick));
3472
                                    gen_helper_tick_set_limit(r_tickptr,
3473
                                                              cpu_hstick_cmpr);
3474
                                    tcg_temp_free_ptr(r_tickptr);
3475
                                }
3476
                                break;
3477
                            case 6: // hver readonly
3478
                            default:
3479
                                goto illegal_insn;
3480
                            }
3481
#endif
3482
                        }
3483
                        break;
3484
#endif
3485
#ifdef TARGET_SPARC64
3486
                    case 0x2c: /* V9 movcc */
3487
                        {
3488
                            int cc = GET_FIELD_SP(insn, 11, 12);
3489
                            int cond = GET_FIELD_SP(insn, 14, 17);
3490
                            TCGv r_cond;
3491
                            int l1;
3492

    
3493
                            r_cond = tcg_temp_new();
3494
                            if (insn & (1 << 18)) {
3495
                                if (cc == 0)
3496
                                    gen_cond(r_cond, 0, cond, dc);
3497
                                else if (cc == 2)
3498
                                    gen_cond(r_cond, 1, cond, dc);
3499
                                else
3500
                                    goto illegal_insn;
3501
                            } else {
3502
                                gen_fcond(r_cond, cc, cond);
3503
                            }
3504

    
3505
                            l1 = gen_new_label();
3506

    
3507
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3508
                            if (IS_IMM) {       /* immediate */
3509
                                TCGv r_const;
3510

    
3511
                                simm = GET_FIELD_SPs(insn, 0, 10);
3512
                                r_const = tcg_const_tl(simm);
3513
                                gen_movl_TN_reg(rd, r_const);
3514
                                tcg_temp_free(r_const);
3515
                            } else {
3516
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3517
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3518
                                gen_movl_TN_reg(rd, cpu_tmp0);
3519
                            }
3520
                            gen_set_label(l1);
3521
                            tcg_temp_free(r_cond);
3522
                            break;
3523
                        }
3524
                    case 0x2d: /* V9 sdivx */
3525
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3526
                        gen_movl_TN_reg(rd, cpu_dst);
3527
                        break;
3528
                    case 0x2e: /* V9 popc */
3529
                        {
3530
                            cpu_src2 = get_src2(insn, cpu_src2);
3531
                            gen_helper_popc(cpu_dst, cpu_src2);
3532
                            gen_movl_TN_reg(rd, cpu_dst);
3533
                        }
3534
                    case 0x2f: /* V9 movr */
3535
                        {
3536
                            int cond = GET_FIELD_SP(insn, 10, 12);
3537
                            int l1;
3538

    
3539
                            cpu_src1 = get_src1(insn, cpu_src1);
3540

    
3541
                            l1 = gen_new_label();
3542

    
3543
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3544
                                              cpu_src1, 0, l1);
3545
                            if (IS_IMM) {       /* immediate */
3546
                                TCGv r_const;
3547

    
3548
                                simm = GET_FIELD_SPs(insn, 0, 9);
3549
                                r_const = tcg_const_tl(simm);
3550
                                gen_movl_TN_reg(rd, r_const);
3551
                                tcg_temp_free(r_const);
3552
                            } else {
3553
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3554
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3555
                                gen_movl_TN_reg(rd, cpu_tmp0);
3556
                            }
3557
                            gen_set_label(l1);
3558
                            break;
3559
                        }
3560
#endif
3561
                    default:
3562
                        goto illegal_insn;
3563
                    }
3564
                }
3565
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3566
#ifdef TARGET_SPARC64
3567
                int opf = GET_FIELD_SP(insn, 5, 13);
3568
                rs1 = GET_FIELD(insn, 13, 17);
3569
                rs2 = GET_FIELD(insn, 27, 31);
3570
                if (gen_trap_ifnofpu(dc, cpu_cond))
3571
                    goto jmp_insn;
3572

    
3573
                switch (opf) {
3574
                case 0x000: /* VIS I edge8cc */
3575
                case 0x001: /* VIS II edge8n */
3576
                case 0x002: /* VIS I edge8lcc */
3577
                case 0x003: /* VIS II edge8ln */
3578
                case 0x004: /* VIS I edge16cc */
3579
                case 0x005: /* VIS II edge16n */
3580
                case 0x006: /* VIS I edge16lcc */
3581
                case 0x007: /* VIS II edge16ln */
3582
                case 0x008: /* VIS I edge32cc */
3583
                case 0x009: /* VIS II edge32n */
3584
                case 0x00a: /* VIS I edge32lcc */
3585
                case 0x00b: /* VIS II edge32ln */
3586
                    // XXX
3587
                    goto illegal_insn;
3588
                case 0x010: /* VIS I array8 */
3589
                    CHECK_FPU_FEATURE(dc, VIS1);
3590
                    cpu_src1 = get_src1(insn, cpu_src1);
3591
                    gen_movl_reg_TN(rs2, cpu_src2);
3592
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3593
                    gen_movl_TN_reg(rd, cpu_dst);
3594
                    break;
3595
                case 0x012: /* VIS I array16 */
3596
                    CHECK_FPU_FEATURE(dc, VIS1);
3597
                    cpu_src1 = get_src1(insn, cpu_src1);
3598
                    gen_movl_reg_TN(rs2, cpu_src2);
3599
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3600
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3601
                    gen_movl_TN_reg(rd, cpu_dst);
3602
                    break;
3603
                case 0x014: /* VIS I array32 */
3604
                    CHECK_FPU_FEATURE(dc, VIS1);
3605
                    cpu_src1 = get_src1(insn, cpu_src1);
3606
                    gen_movl_reg_TN(rs2, cpu_src2);
3607
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3608
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3609
                    gen_movl_TN_reg(rd, cpu_dst);
3610
                    break;
3611
                case 0x018: /* VIS I alignaddr */
3612
                    CHECK_FPU_FEATURE(dc, VIS1);
3613
                    cpu_src1 = get_src1(insn, cpu_src1);
3614
                    gen_movl_reg_TN(rs2, cpu_src2);
3615
                    gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3616
                    gen_movl_TN_reg(rd, cpu_dst);
3617
                    break;
3618
                case 0x019: /* VIS II bmask */
3619
                case 0x01a: /* VIS I alignaddrl */
3620
                    // XXX
3621
                    goto illegal_insn;
3622
                case 0x020: /* VIS I fcmple16 */
3623
                    CHECK_FPU_FEATURE(dc, VIS1);
3624
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3625
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3626
                    gen_helper_fcmple16();
3627
                    gen_op_store_DT0_fpr(DFPREG(rd));
3628
                    break;
3629
                case 0x022: /* VIS I fcmpne16 */
3630
                    CHECK_FPU_FEATURE(dc, VIS1);
3631
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3632
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3633
                    gen_helper_fcmpne16();
3634
                    gen_op_store_DT0_fpr(DFPREG(rd));
3635
                    break;
3636
                case 0x024: /* VIS I fcmple32 */
3637
                    CHECK_FPU_FEATURE(dc, VIS1);
3638
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3639
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3640
                    gen_helper_fcmple32();
3641
                    gen_op_store_DT0_fpr(DFPREG(rd));
3642
                    break;
3643
                case 0x026: /* VIS I fcmpne32 */
3644
                    CHECK_FPU_FEATURE(dc, VIS1);
3645
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3646
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3647
                    gen_helper_fcmpne32();
3648
                    gen_op_store_DT0_fpr(DFPREG(rd));
3649
                    break;
3650
                case 0x028: /* VIS I fcmpgt16 */
3651
                    CHECK_FPU_FEATURE(dc, VIS1);
3652
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3653
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3654
                    gen_helper_fcmpgt16();
3655
                    gen_op_store_DT0_fpr(DFPREG(rd));
3656
                    break;
3657
                case 0x02a: /* VIS I fcmpeq16 */
3658
                    CHECK_FPU_FEATURE(dc, VIS1);
3659
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3660
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3661
                    gen_helper_fcmpeq16();
3662
                    gen_op_store_DT0_fpr(DFPREG(rd));
3663
                    break;
3664
                case 0x02c: /* VIS I fcmpgt32 */
3665
                    CHECK_FPU_FEATURE(dc, VIS1);
3666
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3667
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3668
                    gen_helper_fcmpgt32();
3669
                    gen_op_store_DT0_fpr(DFPREG(rd));
3670
                    break;
3671
                case 0x02e: /* VIS I fcmpeq32 */
3672
                    CHECK_FPU_FEATURE(dc, VIS1);
3673
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3674
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3675
                    gen_helper_fcmpeq32();
3676
                    gen_op_store_DT0_fpr(DFPREG(rd));
3677
                    break;
3678
                case 0x031: /* VIS I fmul8x16 */
3679
                    CHECK_FPU_FEATURE(dc, VIS1);
3680
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3681
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3682
                    gen_helper_fmul8x16();
3683
                    gen_op_store_DT0_fpr(DFPREG(rd));
3684
                    break;
3685
                case 0x033: /* VIS I fmul8x16au */
3686
                    CHECK_FPU_FEATURE(dc, VIS1);
3687
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3688
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3689
                    gen_helper_fmul8x16au();
3690
                    gen_op_store_DT0_fpr(DFPREG(rd));
3691
                    break;
3692
                case 0x035: /* VIS I fmul8x16al */
3693
                    CHECK_FPU_FEATURE(dc, VIS1);
3694
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3695
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3696
                    gen_helper_fmul8x16al();
3697
                    gen_op_store_DT0_fpr(DFPREG(rd));
3698
                    break;
3699
                case 0x036: /* VIS I fmul8sux16 */
3700
                    CHECK_FPU_FEATURE(dc, VIS1);
3701
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3702
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3703
                    gen_helper_fmul8sux16();
3704
                    gen_op_store_DT0_fpr(DFPREG(rd));
3705
                    break;
3706
                case 0x037: /* VIS I fmul8ulx16 */
3707
                    CHECK_FPU_FEATURE(dc, VIS1);
3708
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3709
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3710
                    gen_helper_fmul8ulx16();
3711
                    gen_op_store_DT0_fpr(DFPREG(rd));
3712
                    break;
3713
                case 0x038: /* VIS I fmuld8sux16 */
3714
                    CHECK_FPU_FEATURE(dc, VIS1);
3715
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3716
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3717
                    gen_helper_fmuld8sux16();
3718
                    gen_op_store_DT0_fpr(DFPREG(rd));
3719
                    break;
3720
                case 0x039: /* VIS I fmuld8ulx16 */
3721
                    CHECK_FPU_FEATURE(dc, VIS1);
3722
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3723
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3724
                    gen_helper_fmuld8ulx16();
3725
                    gen_op_store_DT0_fpr(DFPREG(rd));
3726
                    break;
3727
                case 0x03a: /* VIS I fpack32 */
3728
                case 0x03b: /* VIS I fpack16 */
3729
                case 0x03d: /* VIS I fpackfix */
3730
                case 0x03e: /* VIS I pdist */
3731
                    // XXX
3732
                    goto illegal_insn;
3733
                case 0x048: /* VIS I faligndata */
3734
                    CHECK_FPU_FEATURE(dc, VIS1);
3735
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3736
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3737
                    gen_helper_faligndata();
3738
                    gen_op_store_DT0_fpr(DFPREG(rd));
3739
                    break;
3740
                case 0x04b: /* VIS I fpmerge */
3741
                    CHECK_FPU_FEATURE(dc, VIS1);
3742
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3743
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3744
                    gen_helper_fpmerge();
3745
                    gen_op_store_DT0_fpr(DFPREG(rd));
3746
                    break;
3747
                case 0x04c: /* VIS II bshuffle */
3748
                    // XXX
3749
                    goto illegal_insn;
3750
                case 0x04d: /* VIS I fexpand */
3751
                    CHECK_FPU_FEATURE(dc, VIS1);
3752
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3753
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3754
                    gen_helper_fexpand();
3755
                    gen_op_store_DT0_fpr(DFPREG(rd));
3756
                    break;
3757
                case 0x050: /* VIS I fpadd16 */
3758
                    CHECK_FPU_FEATURE(dc, VIS1);
3759
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3760
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3761
                    gen_helper_fpadd16();
3762
                    gen_op_store_DT0_fpr(DFPREG(rd));
3763
                    break;
3764
                case 0x051: /* VIS I fpadd16s */
3765
                    CHECK_FPU_FEATURE(dc, VIS1);
3766
                    gen_helper_fpadd16s(cpu_fpr[rd],
3767
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3768
                    break;
3769
                case 0x052: /* VIS I fpadd32 */
3770
                    CHECK_FPU_FEATURE(dc, VIS1);
3771
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3772
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3773
                    gen_helper_fpadd32();
3774
                    gen_op_store_DT0_fpr(DFPREG(rd));
3775
                    break;
3776
                case 0x053: /* VIS I fpadd32s */
3777
                    CHECK_FPU_FEATURE(dc, VIS1);
3778
                    gen_helper_fpadd32s(cpu_fpr[rd],
3779
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3780
                    break;
3781
                case 0x054: /* VIS I fpsub16 */
3782
                    CHECK_FPU_FEATURE(dc, VIS1);
3783
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3784
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3785
                    gen_helper_fpsub16();
3786
                    gen_op_store_DT0_fpr(DFPREG(rd));
3787
                    break;
3788
                case 0x055: /* VIS I fpsub16s */
3789
                    CHECK_FPU_FEATURE(dc, VIS1);
3790
                    gen_helper_fpsub16s(cpu_fpr[rd],
3791
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3792
                    break;
3793
                case 0x056: /* VIS I fpsub32 */
3794
                    CHECK_FPU_FEATURE(dc, VIS1);
3795
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3796
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3797
                    gen_helper_fpsub32();
3798
                    gen_op_store_DT0_fpr(DFPREG(rd));
3799
                    break;
3800
                case 0x057: /* VIS I fpsub32s */
3801
                    CHECK_FPU_FEATURE(dc, VIS1);
3802
                    gen_helper_fpsub32s(cpu_fpr[rd],
3803
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3804
                    break;
3805
                case 0x060: /* VIS I fzero */
3806
                    CHECK_FPU_FEATURE(dc, VIS1);
3807
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3808
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3809
                    break;
3810
                case 0x061: /* VIS I fzeros */
3811
                    CHECK_FPU_FEATURE(dc, VIS1);
3812
                    tcg_gen_movi_i32(cpu_fpr[rd], 0);
3813
                    break;
3814
                case 0x062: /* VIS I fnor */
3815
                    CHECK_FPU_FEATURE(dc, VIS1);
3816
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3817
                                    cpu_fpr[DFPREG(rs2)]);
3818
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3819
                                    cpu_fpr[DFPREG(rs2) + 1]);
3820
                    break;
3821
                case 0x063: /* VIS I fnors */
3822
                    CHECK_FPU_FEATURE(dc, VIS1);
3823
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3824
                    break;
3825
                case 0x064: /* VIS I fandnot2 */
3826
                    CHECK_FPU_FEATURE(dc, VIS1);
3827
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3828
                                     cpu_fpr[DFPREG(rs2)]);
3829
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3830
                                     cpu_fpr[DFPREG(rs1) + 1],
3831
                                     cpu_fpr[DFPREG(rs2) + 1]);
3832
                    break;
3833
                case 0x065: /* VIS I fandnot2s */
3834
                    CHECK_FPU_FEATURE(dc, VIS1);
3835
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3836
                    break;
3837
                case 0x066: /* VIS I fnot2 */
3838
                    CHECK_FPU_FEATURE(dc, VIS1);
3839
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3840
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3841
                                    cpu_fpr[DFPREG(rs2) + 1]);
3842
                    break;
3843
                case 0x067: /* VIS I fnot2s */
3844
                    CHECK_FPU_FEATURE(dc, VIS1);
3845
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3846
                    break;
3847
                case 0x068: /* VIS I fandnot1 */
3848
                    CHECK_FPU_FEATURE(dc, VIS1);
3849
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3850
                                     cpu_fpr[DFPREG(rs1)]);
3851
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3852
                                     cpu_fpr[DFPREG(rs2) + 1],
3853
                                     cpu_fpr[DFPREG(rs1) + 1]);
3854
                    break;
3855
                case 0x069: /* VIS I fandnot1s */
3856
                    CHECK_FPU_FEATURE(dc, VIS1);
3857
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3858
                    break;
3859
                case 0x06a: /* VIS I fnot1 */
3860
                    CHECK_FPU_FEATURE(dc, VIS1);
3861
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3862
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3863
                                    cpu_fpr[DFPREG(rs1) + 1]);
3864
                    break;
3865
                case 0x06b: /* VIS I fnot1s */
3866
                    CHECK_FPU_FEATURE(dc, VIS1);
3867
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3868
                    break;
3869
                case 0x06c: /* VIS I fxor */
3870
                    CHECK_FPU_FEATURE(dc, VIS1);
3871
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3872
                                    cpu_fpr[DFPREG(rs2)]);
3873
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3874
                                    cpu_fpr[DFPREG(rs1) + 1],
3875
                                    cpu_fpr[DFPREG(rs2) + 1]);
3876
                    break;
3877
                case 0x06d: /* VIS I fxors */
3878
                    CHECK_FPU_FEATURE(dc, VIS1);
3879
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3880
                    break;
3881
                case 0x06e: /* VIS I fnand */
3882
                    CHECK_FPU_FEATURE(dc, VIS1);
3883
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3884
                                     cpu_fpr[DFPREG(rs2)]);
3885
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3886
                                     cpu_fpr[DFPREG(rs2) + 1]);
3887
                    break;
3888
                case 0x06f: /* VIS I fnands */
3889
                    CHECK_FPU_FEATURE(dc, VIS1);
3890
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3891
                    break;
3892
                case 0x070: /* VIS I fand */
3893
                    CHECK_FPU_FEATURE(dc, VIS1);
3894
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3895
                                    cpu_fpr[DFPREG(rs2)]);
3896
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3897
                                    cpu_fpr[DFPREG(rs1) + 1],
3898
                                    cpu_fpr[DFPREG(rs2) + 1]);
3899
                    break;
3900
                case 0x071: /* VIS I fands */
3901
                    CHECK_FPU_FEATURE(dc, VIS1);
3902
                    tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3903
                    break;
3904
                case 0x072: /* VIS I fxnor */
3905
                    CHECK_FPU_FEATURE(dc, VIS1);
3906
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3907
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3908
                                    cpu_fpr[DFPREG(rs1)]);
3909
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3910
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3911
                                    cpu_fpr[DFPREG(rs1) + 1]);
3912
                    break;
3913
                case 0x073: /* VIS I fxnors */
3914
                    CHECK_FPU_FEATURE(dc, VIS1);
3915
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3916
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3917
                    break;
3918
                case 0x074: /* VIS I fsrc1 */
3919
                    CHECK_FPU_FEATURE(dc, VIS1);
3920
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3921
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3922
                                    cpu_fpr[DFPREG(rs1) + 1]);
3923
                    break;
3924
                case 0x075: /* VIS I fsrc1s */
3925
                    CHECK_FPU_FEATURE(dc, VIS1);
3926
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3927
                    break;
3928
                case 0x076: /* VIS I fornot2 */
3929
                    CHECK_FPU_FEATURE(dc, VIS1);
3930
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3931
                                    cpu_fpr[DFPREG(rs2)]);
3932
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3933
                                    cpu_fpr[DFPREG(rs1) + 1],
3934
                                    cpu_fpr[DFPREG(rs2) + 1]);
3935
                    break;
3936
                case 0x077: /* VIS I fornot2s */
3937
                    CHECK_FPU_FEATURE(dc, VIS1);
3938
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3939
                    break;
3940
                case 0x078: /* VIS I fsrc2 */
3941
                    CHECK_FPU_FEATURE(dc, VIS1);
3942
                    gen_op_load_fpr_DT0(DFPREG(rs2));
3943
                    gen_op_store_DT0_fpr(DFPREG(rd));
3944
                    break;
3945
                case 0x079: /* VIS I fsrc2s */
3946
                    CHECK_FPU_FEATURE(dc, VIS1);
3947
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3948
                    break;
3949
                case 0x07a: /* VIS I fornot1 */
3950
                    CHECK_FPU_FEATURE(dc, VIS1);
3951
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3952
                                    cpu_fpr[DFPREG(rs1)]);
3953
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3954
                                    cpu_fpr[DFPREG(rs2) + 1],
3955
                                    cpu_fpr[DFPREG(rs1) + 1]);
3956
                    break;
3957
                case 0x07b: /* VIS I fornot1s */
3958
                    CHECK_FPU_FEATURE(dc, VIS1);
3959
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3960
                    break;
3961
                case 0x07c: /* VIS I for */
3962
                    CHECK_FPU_FEATURE(dc, VIS1);
3963
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3964
                                   cpu_fpr[DFPREG(rs2)]);
3965
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
3966
                                   cpu_fpr[DFPREG(rs1) + 1],
3967
                                   cpu_fpr[DFPREG(rs2) + 1]);
3968
                    break;
3969
                case 0x07d: /* VIS I fors */
3970
                    CHECK_FPU_FEATURE(dc, VIS1);
3971
                    tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3972
                    break;
3973
                case 0x07e: /* VIS I fone */
3974
                    CHECK_FPU_FEATURE(dc, VIS1);
3975
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
3976
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3977
                    break;
3978
                case 0x07f: /* VIS I fones */
3979
                    CHECK_FPU_FEATURE(dc, VIS1);
3980
                    tcg_gen_movi_i32(cpu_fpr[rd], -1);
3981
                    break;
3982
                case 0x080: /* VIS I shutdown */
3983
                case 0x081: /* VIS II siam */
3984
                    // XXX
3985
                    goto illegal_insn;
3986
                default:
3987
                    goto illegal_insn;
3988
                }
3989
#else
3990
                goto ncp_insn;
3991
#endif
3992
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3993
#ifdef TARGET_SPARC64
3994
                goto illegal_insn;
3995
#else
3996
                goto ncp_insn;
3997
#endif
3998
#ifdef TARGET_SPARC64
3999
            } else if (xop == 0x39) { /* V9 return */
4000
                TCGv_i32 r_const;
4001

    
4002
                save_state(dc, cpu_cond);
4003
                cpu_src1 = get_src1(insn, cpu_src1);
4004
                if (IS_IMM) {   /* immediate */
4005
                    simm = GET_FIELDs(insn, 19, 31);
4006
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4007
                } else {                /* register */
4008
                    rs2 = GET_FIELD(insn, 27, 31);
4009
                    if (rs2) {
4010
                        gen_movl_reg_TN(rs2, cpu_src2);
4011
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4012
                    } else
4013
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4014
                }
4015
                gen_helper_restore();
4016
                gen_mov_pc_npc(dc, cpu_cond);
4017
                r_const = tcg_const_i32(3);
4018
                gen_helper_check_align(cpu_dst, r_const);
4019
                tcg_temp_free_i32(r_const);
4020
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4021
                dc->npc = DYNAMIC_PC;
4022
                goto jmp_insn;
4023
#endif
4024
            } else {
4025
                cpu_src1 = get_src1(insn, cpu_src1);
4026
                if (IS_IMM) {   /* immediate */
4027
                    simm = GET_FIELDs(insn, 19, 31);
4028
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4029
                } else {                /* register */
4030
                    rs2 = GET_FIELD(insn, 27, 31);
4031
                    if (rs2) {
4032
                        gen_movl_reg_TN(rs2, cpu_src2);
4033
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4034
                    } else
4035
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4036
                }
4037
                switch (xop) {
4038
                case 0x38:      /* jmpl */
4039
                    {
4040
                        TCGv r_pc;
4041
                        TCGv_i32 r_const;
4042

    
4043
                        r_pc = tcg_const_tl(dc->pc);
4044
                        gen_movl_TN_reg(rd, r_pc);
4045
                        tcg_temp_free(r_pc);
4046
                        gen_mov_pc_npc(dc, cpu_cond);
4047
                        r_const = tcg_const_i32(3);
4048
                        gen_helper_check_align(cpu_dst, r_const);
4049
                        tcg_temp_free_i32(r_const);
4050
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4051
                        dc->npc = DYNAMIC_PC;
4052
                    }
4053
                    goto jmp_insn;
4054
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4055
                case 0x39:      /* rett, V9 return */
4056
                    {
4057
                        TCGv_i32 r_const;
4058

    
4059
                        if (!supervisor(dc))
4060
                            goto priv_insn;
4061
                        gen_mov_pc_npc(dc, cpu_cond);
4062
                        r_const = tcg_const_i32(3);
4063
                        gen_helper_check_align(cpu_dst, r_const);
4064
                        tcg_temp_free_i32(r_const);
4065
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4066
                        dc->npc = DYNAMIC_PC;
4067
                        gen_helper_rett();
4068
                    }
4069
                    goto jmp_insn;
4070
#endif
4071
                case 0x3b: /* flush */
4072
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4073
                        goto unimp_flush;
4074
                    gen_helper_flush(cpu_dst);
4075
                    break;
4076
                case 0x3c:      /* save */
4077
                    save_state(dc, cpu_cond);
4078
                    gen_helper_save();
4079
                    gen_movl_TN_reg(rd, cpu_dst);
4080
                    break;
4081
                case 0x3d:      /* restore */
4082
                    save_state(dc, cpu_cond);
4083
                    gen_helper_restore();
4084
                    gen_movl_TN_reg(rd, cpu_dst);
4085
                    break;
4086
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4087
                case 0x3e:      /* V9 done/retry */
4088
                    {
4089
                        switch (rd) {
4090
                        case 0:
4091
                            if (!supervisor(dc))
4092
                                goto priv_insn;
4093
                            dc->npc = DYNAMIC_PC;
4094
                            dc->pc = DYNAMIC_PC;
4095
                            gen_helper_done();
4096
                            goto jmp_insn;
4097
                        case 1:
4098
                            if (!supervisor(dc))
4099
                                goto priv_insn;
4100
                            dc->npc = DYNAMIC_PC;
4101
                            dc->pc = DYNAMIC_PC;
4102
                            gen_helper_retry();
4103
                            goto jmp_insn;
4104
                        default:
4105
                            goto illegal_insn;
4106
                        }
4107
                    }
4108
                    break;
4109
#endif
4110
                default:
4111
                    goto illegal_insn;
4112
                }
4113
            }
4114
            break;
4115
        }
4116
        break;
4117
    case 3:                     /* load/store instructions */
4118
        {
4119
            unsigned int xop = GET_FIELD(insn, 7, 12);
4120

    
4121
            /* flush pending conditional evaluations before exposing
4122
               cpu state */
4123
            if (dc->cc_op != CC_OP_FLAGS) {
4124
                dc->cc_op = CC_OP_FLAGS;
4125
                gen_helper_compute_psr();
4126
            }
4127
            cpu_src1 = get_src1(insn, cpu_src1);
4128
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4129
                rs2 = GET_FIELD(insn, 27, 31);
4130
                gen_movl_reg_TN(rs2, cpu_src2);
4131
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4132
            } else if (IS_IMM) {     /* immediate */
4133
                simm = GET_FIELDs(insn, 19, 31);
4134
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4135
            } else {            /* register */
4136
                rs2 = GET_FIELD(insn, 27, 31);
4137
                if (rs2 != 0) {
4138
                    gen_movl_reg_TN(rs2, cpu_src2);
4139
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4140
                } else
4141
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4142
            }
4143
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4144
                (xop > 0x17 && xop <= 0x1d ) ||
4145
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4146
                switch (xop) {
4147
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4148
                    gen_address_mask(dc, cpu_addr);
4149
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4150
                    break;
4151
                case 0x1:       /* ldub, load unsigned byte */
4152
                    gen_address_mask(dc, cpu_addr);
4153
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4154
                    break;
4155
                case 0x2:       /* lduh, load unsigned halfword */
4156
                    gen_address_mask(dc, cpu_addr);
4157
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4158
                    break;
4159
                case 0x3:       /* ldd, load double word */
4160
                    if (rd & 1)
4161
                        goto illegal_insn;
4162
                    else {
4163
                        TCGv_i32 r_const;
4164

    
4165
                        save_state(dc, cpu_cond);
4166
                        r_const = tcg_const_i32(7);
4167
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4168
                        tcg_temp_free_i32(r_const);
4169
                        gen_address_mask(dc, cpu_addr);
4170
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4171
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4172
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4173
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4174
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4175
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4176
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4177
                    }
4178
                    break;
4179
                case 0x9:       /* ldsb, load signed byte */
4180
                    gen_address_mask(dc, cpu_addr);
4181
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4182
                    break;
4183
                case 0xa:       /* ldsh, load signed halfword */
4184
                    gen_address_mask(dc, cpu_addr);
4185
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4186
                    break;
4187
                case 0xd:       /* ldstub -- XXX: should be atomically */
4188
                    {
4189
                        TCGv r_const;
4190

    
4191
                        gen_address_mask(dc, cpu_addr);
4192
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4193
                        r_const = tcg_const_tl(0xff);
4194
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4195
                        tcg_temp_free(r_const);
4196
                    }
4197
                    break;
4198
                case 0x0f:      /* swap, swap register with memory. Also
4199
                                   atomically */
4200
                    CHECK_IU_FEATURE(dc, SWAP);
4201
                    gen_movl_reg_TN(rd, cpu_val);
4202
                    gen_address_mask(dc, cpu_addr);
4203
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4204
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4205
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4206
                    break;
4207
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4208
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4209
#ifndef TARGET_SPARC64
4210
                    if (IS_IMM)
4211
                        goto illegal_insn;
4212
                    if (!supervisor(dc))
4213
                        goto priv_insn;
4214
#endif
4215
                    save_state(dc, cpu_cond);
4216
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4217
                    break;
4218
                case 0x11:      /* lduba, load unsigned byte alternate */
4219
#ifndef TARGET_SPARC64
4220
                    if (IS_IMM)
4221
                        goto illegal_insn;
4222
                    if (!supervisor(dc))
4223
                        goto priv_insn;
4224
#endif
4225
                    save_state(dc, cpu_cond);
4226
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4227
                    break;
4228
                case 0x12:      /* lduha, load unsigned halfword alternate */
4229
#ifndef TARGET_SPARC64
4230
                    if (IS_IMM)
4231
                        goto illegal_insn;
4232
                    if (!supervisor(dc))
4233
                        goto priv_insn;
4234
#endif
4235
                    save_state(dc, cpu_cond);
4236
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4237
                    break;
4238
                case 0x13:      /* ldda, load double word alternate */
4239
#ifndef TARGET_SPARC64
4240
                    if (IS_IMM)
4241
                        goto illegal_insn;
4242
                    if (!supervisor(dc))
4243
                        goto priv_insn;
4244
#endif
4245
                    if (rd & 1)
4246
                        goto illegal_insn;
4247
                    save_state(dc, cpu_cond);
4248
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4249
                    goto skip_move;
4250
                case 0x19:      /* ldsba, load signed byte alternate */
4251
#ifndef TARGET_SPARC64
4252
                    if (IS_IMM)
4253
                        goto illegal_insn;
4254
                    if (!supervisor(dc))
4255
                        goto priv_insn;
4256
#endif
4257
                    save_state(dc, cpu_cond);
4258
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4259
                    break;
4260
                case 0x1a:      /* ldsha, load signed halfword alternate */
4261
#ifndef TARGET_SPARC64
4262
                    if (IS_IMM)
4263
                        goto illegal_insn;
4264
                    if (!supervisor(dc))
4265
                        goto priv_insn;
4266
#endif
4267
                    save_state(dc, cpu_cond);
4268
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4269
                    break;
4270
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4271
#ifndef TARGET_SPARC64
4272
                    if (IS_IMM)
4273
                        goto illegal_insn;
4274
                    if (!supervisor(dc))
4275
                        goto priv_insn;
4276
#endif
4277
                    save_state(dc, cpu_cond);
4278
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4279
                    break;
4280
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4281
                                   atomically */
4282
                    CHECK_IU_FEATURE(dc, SWAP);
4283
#ifndef TARGET_SPARC64
4284
                    if (IS_IMM)
4285
                        goto illegal_insn;
4286
                    if (!supervisor(dc))
4287
                        goto priv_insn;
4288
#endif
4289
                    save_state(dc, cpu_cond);
4290
                    gen_movl_reg_TN(rd, cpu_val);
4291
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4292
                    break;
4293

    
4294
#ifndef TARGET_SPARC64
4295
                case 0x30: /* ldc */
4296
                case 0x31: /* ldcsr */
4297
                case 0x33: /* lddc */
4298
                    goto ncp_insn;
4299
#endif
4300
#endif
4301
#ifdef TARGET_SPARC64
4302
                case 0x08: /* V9 ldsw */
4303
                    gen_address_mask(dc, cpu_addr);
4304
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4305
                    break;
4306
                case 0x0b: /* V9 ldx */
4307
                    gen_address_mask(dc, cpu_addr);
4308
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4309
                    break;
4310
                case 0x18: /* V9 ldswa */
4311
                    save_state(dc, cpu_cond);
4312
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4313
                    break;
4314
                case 0x1b: /* V9 ldxa */
4315
                    save_state(dc, cpu_cond);
4316
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4317
                    break;
4318
                case 0x2d: /* V9 prefetch, no effect */
4319
                    goto skip_move;
4320
                case 0x30: /* V9 ldfa */
4321
                    save_state(dc, cpu_cond);
4322
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4323
                    goto skip_move;
4324
                case 0x33: /* V9 lddfa */
4325
                    save_state(dc, cpu_cond);
4326
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4327
                    goto skip_move;
4328
                case 0x3d: /* V9 prefetcha, no effect */
4329
                    goto skip_move;
4330
                case 0x32: /* V9 ldqfa */
4331
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4332
                    save_state(dc, cpu_cond);
4333
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4334
                    goto skip_move;
4335
#endif
4336
                default:
4337
                    goto illegal_insn;
4338
                }
4339
                gen_movl_TN_reg(rd, cpu_val);
4340
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4341
            skip_move: ;
4342
#endif
4343
            } else if (xop >= 0x20 && xop < 0x24) {
4344
                if (gen_trap_ifnofpu(dc, cpu_cond))
4345
                    goto jmp_insn;
4346
                save_state(dc, cpu_cond);
4347
                switch (xop) {
4348
                case 0x20:      /* ldf, load fpreg */
4349
                    gen_address_mask(dc, cpu_addr);
4350
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4351
                    tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4352
                    break;
4353
                case 0x21:      /* ldfsr, V9 ldxfsr */
4354
#ifdef TARGET_SPARC64
4355
                    gen_address_mask(dc, cpu_addr);
4356
                    if (rd == 1) {
4357
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4358
                        gen_helper_ldxfsr(cpu_tmp64);
4359
                    } else
4360
#else
4361
                    {
4362
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4363
                        gen_helper_ldfsr(cpu_tmp32);
4364
                    }
4365
#endif
4366
                    break;
4367
                case 0x22:      /* ldqf, load quad fpreg */
4368
                    {
4369
                        TCGv_i32 r_const;
4370

    
4371
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4372
                        r_const = tcg_const_i32(dc->mem_idx);
4373
                        gen_helper_ldqf(cpu_addr, r_const);
4374
                        tcg_temp_free_i32(r_const);
4375
                        gen_op_store_QT0_fpr(QFPREG(rd));
4376
                    }
4377
                    break;
4378
                case 0x23:      /* lddf, load double fpreg */
4379
                    {
4380
                        TCGv_i32 r_const;
4381

    
4382
                        r_const = tcg_const_i32(dc->mem_idx);
4383
                        gen_helper_lddf(cpu_addr, r_const);
4384
                        tcg_temp_free_i32(r_const);
4385
                        gen_op_store_DT0_fpr(DFPREG(rd));
4386
                    }
4387
                    break;
4388
                default:
4389
                    goto illegal_insn;
4390
                }
4391
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4392
                       xop == 0xe || xop == 0x1e) {
4393
                gen_movl_reg_TN(rd, cpu_val);
4394
                switch (xop) {
4395
                case 0x4: /* st, store word */
4396
                    gen_address_mask(dc, cpu_addr);
4397
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4398
                    break;
4399
                case 0x5: /* stb, store byte */
4400
                    gen_address_mask(dc, cpu_addr);
4401
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4402
                    break;
4403
                case 0x6: /* sth, store halfword */
4404
                    gen_address_mask(dc, cpu_addr);
4405
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4406
                    break;
4407
                case 0x7: /* std, store double word */
4408
                    if (rd & 1)
4409
                        goto illegal_insn;
4410
                    else {
4411
                        TCGv_i32 r_const;
4412

    
4413
                        save_state(dc, cpu_cond);
4414
                        gen_address_mask(dc, cpu_addr);
4415
                        r_const = tcg_const_i32(7);
4416
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4417
                        tcg_temp_free_i32(r_const);
4418
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4419
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4420
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4421
                    }
4422
                    break;
4423
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4424
                case 0x14: /* sta, V9 stwa, store word alternate */
4425
#ifndef TARGET_SPARC64
4426
                    if (IS_IMM)
4427
                        goto illegal_insn;
4428
                    if (!supervisor(dc))
4429
                        goto priv_insn;
4430
#endif
4431
                    save_state(dc, cpu_cond);
4432
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4433
                    break;
4434
                case 0x15: /* stba, store byte alternate */
4435
#ifndef TARGET_SPARC64
4436
                    if (IS_IMM)
4437
                        goto illegal_insn;
4438
                    if (!supervisor(dc))
4439
                        goto priv_insn;
4440
#endif
4441
                    save_state(dc, cpu_cond);
4442
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4443
                    break;
4444
                case 0x16: /* stha, store halfword alternate */
4445
#ifndef TARGET_SPARC64
4446
                    if (IS_IMM)
4447
                        goto illegal_insn;
4448
                    if (!supervisor(dc))
4449
                        goto priv_insn;
4450
#endif
4451
                    save_state(dc, cpu_cond);
4452
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4453
                    break;
4454
                case 0x17: /* stda, store double word alternate */
4455
#ifndef TARGET_SPARC64
4456
                    if (IS_IMM)
4457
                        goto illegal_insn;
4458
                    if (!supervisor(dc))
4459
                        goto priv_insn;
4460
#endif
4461
                    if (rd & 1)
4462
                        goto illegal_insn;
4463
                    else {
4464
                        save_state(dc, cpu_cond);
4465
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4466
                    }
4467
                    break;
4468
#endif
4469
#ifdef TARGET_SPARC64
4470
                case 0x0e: /* V9 stx */
4471
                    gen_address_mask(dc, cpu_addr);
4472
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4473
                    break;
4474
                case 0x1e: /* V9 stxa */
4475
                    save_state(dc, cpu_cond);
4476
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4477
                    break;
4478
#endif
4479
                default:
4480
                    goto illegal_insn;
4481
                }
4482
            } else if (xop > 0x23 && xop < 0x28) {
4483
                if (gen_trap_ifnofpu(dc, cpu_cond))
4484
                    goto jmp_insn;
4485
                save_state(dc, cpu_cond);
4486
                switch (xop) {
4487
                case 0x24: /* stf, store fpreg */
4488
                    gen_address_mask(dc, cpu_addr);
4489
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4490
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4491
                    break;
4492
                case 0x25: /* stfsr, V9 stxfsr */
4493
#ifdef TARGET_SPARC64
4494
                    gen_address_mask(dc, cpu_addr);
4495
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4496
                    if (rd == 1)
4497
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4498
                    else
4499
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4500
#else
4501
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4502
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4503
#endif
4504
                    break;
4505
                case 0x26:
4506
#ifdef TARGET_SPARC64
4507
                    /* V9 stqf, store quad fpreg */
4508
                    {
4509
                        TCGv_i32 r_const;
4510

    
4511
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4512
                        gen_op_load_fpr_QT0(QFPREG(rd));
4513
                        r_const = tcg_const_i32(dc->mem_idx);
4514
                        gen_helper_stqf(cpu_addr, r_const);
4515
                        tcg_temp_free_i32(r_const);
4516
                    }
4517
                    break;
4518
#else /* !TARGET_SPARC64 */
4519
                    /* stdfq, store floating point queue */
4520
#if defined(CONFIG_USER_ONLY)
4521
                    goto illegal_insn;
4522
#else
4523
                    if (!supervisor(dc))
4524
                        goto priv_insn;
4525
                    if (gen_trap_ifnofpu(dc, cpu_cond))
4526
                        goto jmp_insn;
4527
                    goto nfq_insn;
4528
#endif
4529
#endif
4530
                case 0x27: /* stdf, store double fpreg */
4531
                    {
4532
                        TCGv_i32 r_const;
4533

    
4534
                        gen_op_load_fpr_DT0(DFPREG(rd));
4535
                        r_const = tcg_const_i32(dc->mem_idx);
4536
                        gen_helper_stdf(cpu_addr, r_const);
4537
                        tcg_temp_free_i32(r_const);
4538
                    }
4539
                    break;
4540
                default:
4541
                    goto illegal_insn;
4542
                }
4543
            } else if (xop > 0x33 && xop < 0x3f) {
4544
                save_state(dc, cpu_cond);
4545
                switch (xop) {
4546
#ifdef TARGET_SPARC64
4547
                case 0x34: /* V9 stfa */
4548
                    gen_stf_asi(cpu_addr, insn, 4, rd);
4549
                    break;
4550
                case 0x36: /* V9 stqfa */
4551
                    {
4552
                        TCGv_i32 r_const;
4553

    
4554
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4555
                        r_const = tcg_const_i32(7);
4556
                        gen_helper_check_align(cpu_addr, r_const);
4557
                        tcg_temp_free_i32(r_const);
4558
                        gen_op_load_fpr_QT0(QFPREG(rd));
4559
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4560
                    }
4561
                    break;
4562
                case 0x37: /* V9 stdfa */
4563
                    gen_op_load_fpr_DT0(DFPREG(rd));
4564
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4565
                    break;
4566
                case 0x3c: /* V9 casa */
4567
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4568
                    gen_movl_TN_reg(rd, cpu_val);
4569
                    break;
4570
                case 0x3e: /* V9 casxa */
4571
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4572
                    gen_movl_TN_reg(rd, cpu_val);
4573
                    break;
4574
#else
4575
                case 0x34: /* stc */
4576
                case 0x35: /* stcsr */
4577
                case 0x36: /* stdcq */
4578
                case 0x37: /* stdc */
4579
                    goto ncp_insn;
4580
#endif
4581
                default:
4582
                    goto illegal_insn;
4583
                }
4584
            } else
4585
                goto illegal_insn;
4586
        }
4587
        break;
4588
    }
4589
    /* default case for non jump instructions */
4590
    if (dc->npc == DYNAMIC_PC) {
4591
        dc->pc = DYNAMIC_PC;
4592
        gen_op_next_insn();
4593
    } else if (dc->npc == JUMP_PC) {
4594
        /* we can do a static jump */
4595
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4596
        dc->is_br = 1;
4597
    } else {
4598
        dc->pc = dc->npc;
4599
        dc->npc = dc->npc + 4;
4600
    }
4601
 jmp_insn:
4602
    return;
4603
 illegal_insn:
4604
    {
4605
        TCGv_i32 r_const;
4606

    
4607
        save_state(dc, cpu_cond);
4608
        r_const = tcg_const_i32(TT_ILL_INSN);
4609
        gen_helper_raise_exception(r_const);
4610
        tcg_temp_free_i32(r_const);
4611
        dc->is_br = 1;
4612
    }
4613
    return;
4614
 unimp_flush:
4615
    {
4616
        TCGv_i32 r_const;
4617

    
4618
        save_state(dc, cpu_cond);
4619
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4620
        gen_helper_raise_exception(r_const);
4621
        tcg_temp_free_i32(r_const);
4622
        dc->is_br = 1;
4623
    }
4624
    return;
4625
#if !defined(CONFIG_USER_ONLY)
4626
 priv_insn:
4627
    {
4628
        TCGv_i32 r_const;
4629

    
4630
        save_state(dc, cpu_cond);
4631
        r_const = tcg_const_i32(TT_PRIV_INSN);
4632
        gen_helper_raise_exception(r_const);
4633
        tcg_temp_free_i32(r_const);
4634
        dc->is_br = 1;
4635
    }
4636
    return;
4637
#endif
4638
 nfpu_insn:
4639
    save_state(dc, cpu_cond);
4640
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4641
    dc->is_br = 1;
4642
    return;
4643
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4644
 nfq_insn:
4645
    save_state(dc, cpu_cond);
4646
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4647
    dc->is_br = 1;
4648
    return;
4649
#endif
4650
#ifndef TARGET_SPARC64
4651
 ncp_insn:
4652
    {
4653
        TCGv r_const;
4654

    
4655
        save_state(dc, cpu_cond);
4656
        r_const = tcg_const_i32(TT_NCP_INSN);
4657
        gen_helper_raise_exception(r_const);
4658
        tcg_temp_free(r_const);
4659
        dc->is_br = 1;
4660
    }
4661
    return;
4662
#endif
4663
}
4664

    
4665
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4666
                                                  int spc, CPUSPARCState *env)
4667
{
4668
    target_ulong pc_start, last_pc;
4669
    uint16_t *gen_opc_end;
4670
    DisasContext dc1, *dc = &dc1;
4671
    CPUBreakpoint *bp;
4672
    int j, lj = -1;
4673
    int num_insns;
4674
    int max_insns;
4675

    
4676
    memset(dc, 0, sizeof(DisasContext));
4677
    dc->tb = tb;
4678
    pc_start = tb->pc;
4679
    dc->pc = pc_start;
4680
    last_pc = dc->pc;
4681
    dc->npc = (target_ulong) tb->cs_base;
4682
    dc->cc_op = CC_OP_DYNAMIC;
4683
    dc->mem_idx = cpu_mmu_index(env);
4684
    dc->def = env->def;
4685
    if ((dc->def->features & CPU_FEATURE_FLOAT))
4686
        dc->fpu_enabled = cpu_fpu_enabled(env);
4687
    else
4688
        dc->fpu_enabled = 0;
4689
#ifdef TARGET_SPARC64
4690
    dc->address_mask_32bit = env->pstate & PS_AM;
4691
#endif
4692
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4693

    
4694
    cpu_tmp0 = tcg_temp_new();
4695
    cpu_tmp32 = tcg_temp_new_i32();
4696
    cpu_tmp64 = tcg_temp_new_i64();
4697

    
4698
    cpu_dst = tcg_temp_local_new();
4699

    
4700
    // loads and stores
4701
    cpu_val = tcg_temp_local_new();
4702
    cpu_addr = tcg_temp_local_new();
4703

    
4704
    num_insns = 0;
4705
    max_insns = tb->cflags & CF_COUNT_MASK;
4706
    if (max_insns == 0)
4707
        max_insns = CF_COUNT_MASK;
4708
    gen_icount_start();
4709
    do {
4710
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4711
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4712
                if (bp->pc == dc->pc) {
4713
                    if (dc->pc != pc_start)
4714
                        save_state(dc, cpu_cond);
4715
                    gen_helper_debug();
4716
                    tcg_gen_exit_tb(0);
4717
                    dc->is_br = 1;
4718
                    goto exit_gen_loop;
4719
                }
4720
            }
4721
        }
4722
        if (spc) {
4723
            qemu_log("Search PC...\n");
4724
            j = gen_opc_ptr - gen_opc_buf;
4725
            if (lj < j) {
4726
                lj++;
4727
                while (lj < j)
4728
                    gen_opc_instr_start[lj++] = 0;
4729
                gen_opc_pc[lj] = dc->pc;
4730
                gen_opc_npc[lj] = dc->npc;
4731
                gen_opc_instr_start[lj] = 1;
4732
                gen_opc_icount[lj] = num_insns;
4733
            }
4734
        }
4735
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4736
            gen_io_start();
4737
        last_pc = dc->pc;
4738
        disas_sparc_insn(dc);
4739
        num_insns++;
4740

    
4741
        if (dc->is_br)
4742
            break;
4743
        /* if the next PC is different, we abort now */
4744
        if (dc->pc != (last_pc + 4))
4745
            break;
4746
        /* if we reach a page boundary, we stop generation so that the
4747
           PC of a TT_TFAULT exception is always in the right page */
4748
        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4749
            break;
4750
        /* if single step mode, we generate only one instruction and
4751
           generate an exception */
4752
        if (env->singlestep_enabled || singlestep) {
4753
            tcg_gen_movi_tl(cpu_pc, dc->pc);
4754
            tcg_gen_exit_tb(0);
4755
            break;
4756
        }
4757
    } while ((gen_opc_ptr < gen_opc_end) &&
4758
             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4759
             num_insns < max_insns);
4760

    
4761
 exit_gen_loop:
4762
    tcg_temp_free(cpu_addr);
4763
    tcg_temp_free(cpu_val);
4764
    tcg_temp_free(cpu_dst);
4765
    tcg_temp_free_i64(cpu_tmp64);
4766
    tcg_temp_free_i32(cpu_tmp32);
4767
    tcg_temp_free(cpu_tmp0);
4768
    if (tb->cflags & CF_LAST_IO)
4769
        gen_io_end();
4770
    if (!dc->is_br) {
4771
        if (dc->pc != DYNAMIC_PC &&
4772
            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4773
            /* static PC and NPC: we can use direct chaining */
4774
            gen_goto_tb(dc, 0, dc->pc, dc->npc);
4775
        } else {
4776
            if (dc->pc != DYNAMIC_PC)
4777
                tcg_gen_movi_tl(cpu_pc, dc->pc);
4778
            save_npc(dc, cpu_cond);
4779
            tcg_gen_exit_tb(0);
4780
        }
4781
    }
4782
    gen_icount_end(tb, num_insns);
4783
    *gen_opc_ptr = INDEX_op_end;
4784
    if (spc) {
4785
        j = gen_opc_ptr - gen_opc_buf;
4786
        lj++;
4787
        while (lj <= j)
4788
            gen_opc_instr_start[lj++] = 0;
4789
#if 0
4790
        log_page_dump();
4791
#endif
4792
        gen_opc_jump_pc[0] = dc->jump_pc[0];
4793
        gen_opc_jump_pc[1] = dc->jump_pc[1];
4794
    } else {
4795
        tb->size = last_pc + 4 - pc_start;
4796
        tb->icount = num_insns;
4797
    }
4798
#ifdef DEBUG_DISAS
4799
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4800
        qemu_log("--------------\n");
4801
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
4802
        log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4803
        qemu_log("\n");
4804
    }
4805
#endif
4806
}
4807

    
4808
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4809
{
4810
    gen_intermediate_code_internal(tb, 0, env);
4811
}
4812

    
4813
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4814
{
4815
    gen_intermediate_code_internal(tb, 1, env);
4816
}
4817

    
4818
void gen_intermediate_code_init(CPUSPARCState *env)
4819
{
4820
    unsigned int i;
4821
    static int inited;
4822
    static const char * const gregnames[8] = {
4823
        NULL, // g0 not used
4824
        "g1",
4825
        "g2",
4826
        "g3",
4827
        "g4",
4828
        "g5",
4829
        "g6",
4830
        "g7",
4831
    };
4832
    static const char * const fregnames[64] = {
4833
        "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4834
        "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4835
        "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4836
        "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4837
        "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4838
        "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4839
        "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4840
        "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4841
    };
4842

    
4843
    /* init various static tables */
4844
    if (!inited) {
4845
        inited = 1;
4846

    
4847
        cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4848
        cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4849
                                             offsetof(CPUState, regwptr),
4850
                                             "regwptr");
4851
#ifdef TARGET_SPARC64
4852
        cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4853
                                         "xcc");
4854
        cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4855
                                         "asi");
4856
        cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4857
                                          "fprs");
4858
        cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4859
                                     "gsr");
4860
        cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4861
                                           offsetof(CPUState, tick_cmpr),
4862
                                           "tick_cmpr");
4863
        cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4864
                                            offsetof(CPUState, stick_cmpr),
4865
                                            "stick_cmpr");
4866
        cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4867
                                             offsetof(CPUState, hstick_cmpr),
4868
                                             "hstick_cmpr");
4869
        cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4870
                                       "hintp");
4871
        cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4872
                                      "htba");
4873
        cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4874
                                      "hver");
4875
        cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4876
                                     offsetof(CPUState, ssr), "ssr");
4877
        cpu_ver = tcg_global_mem_new(TCG_AREG0,
4878
                                     offsetof(CPUState, version), "ver");
4879
        cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4880
                                             offsetof(CPUState, softint),
4881
                                             "softint");
4882
#else
4883
        cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4884
                                     "wim");
4885
#endif
4886
        cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4887
                                      "cond");
4888
        cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4889
                                        "cc_src");
4890
        cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4891
                                         offsetof(CPUState, cc_src2),
4892
                                         "cc_src2");
4893
        cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4894
                                        "cc_dst");
4895
        cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
4896
                                           "cc_op");
4897
        cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4898
                                         "psr");
4899
        cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4900
                                     "fsr");
4901
        cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4902
                                    "pc");
4903
        cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
4904
                                     "npc");
4905
        cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
4906
#ifndef CONFIG_USER_ONLY
4907
        cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
4908
                                     "tbr");
4909
#endif
4910
        for (i = 1; i < 8; i++)
4911
            cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
4912
                                              offsetof(CPUState, gregs[i]),
4913
                                              gregnames[i]);
4914
        for (i = 0; i < TARGET_FPREGS; i++)
4915
            cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
4916
                                                offsetof(CPUState, fpr[i]),
4917
                                                fregnames[i]);
4918

    
4919
        /* register helpers */
4920

    
4921
#define GEN_HELPER 2
4922
#include "helper.h"
4923
    }
4924
}
4925

    
4926
void gen_pc_load(CPUState *env, TranslationBlock *tb,
4927
                unsigned long searched_pc, int pc_pos, void *puc)
4928
{
4929
    target_ulong npc;
4930
    env->pc = gen_opc_pc[pc_pos];
4931
    npc = gen_opc_npc[pc_pos];
4932
    if (npc == 1) {
4933
        /* dynamic NPC: already stored */
4934
    } else if (npc == 2) {
4935
        /* jump PC: use 'cond' and the jump targets of the translation */
4936
        if (env->cond) {
4937
            env->npc = gen_opc_jump_pc[0];
4938
        } else {
4939
            env->npc = gen_opc_jump_pc[1];
4940
        }
4941
    } else {
4942
        env->npc = npc;
4943
    }
4944

    
4945
    /* flush pending conditional evaluations before exposing cpu state */
4946
    if (CC_OP != CC_OP_FLAGS) {
4947
        helper_compute_psr();
4948
    }
4949
}