Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ 6ad6135d

History | View | Annotate | Download (187.8 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20

    
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26

    
27
#include "cpu.h"
28
#include "exec-all.h"
29
#include "disas.h"
30
#include "helper.h"
31
#include "tcg-op.h"
32

    
33
#define GEN_HELPER 1
34
#include "helper.h"
35

    
36
#define DEBUG_DISAS
37

    
38
#define DYNAMIC_PC  1 /* dynamic pc value */
39
#define JUMP_PC     2 /* dynamic pc value which takes only two values
40
                         according to jump_pc[T2] */
41

    
42
/* global register indexes */
43
static TCGv_ptr cpu_env, cpu_regwptr;
44
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45
static TCGv_i32 cpu_cc_op;
46
static TCGv_i32 cpu_psr;
47
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48
static TCGv cpu_y;
49
#ifndef CONFIG_USER_ONLY
50
static TCGv cpu_tbr;
51
#endif
52
static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53
#ifdef TARGET_SPARC64
54
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55
static TCGv cpu_gsr;
56
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58
static TCGv_i32 cpu_softint;
59
#else
60
static TCGv cpu_wim;
61
#endif
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0;
64
static TCGv_i32 cpu_tmp32;
65
static TCGv_i64 cpu_tmp64;
66
/* Floating point registers */
67
static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68

    
69
static target_ulong gen_opc_npc[OPC_BUF_SIZE];
70
static target_ulong gen_opc_jump_pc[2];
71

    
72
#include "gen-icount.h"
73

    
74
typedef struct DisasContext {
75
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
76
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78
    int is_br;
79
    int mem_idx;
80
    int fpu_enabled;
81
    int address_mask_32bit;
82
    uint32_t cc_op;  /* current CC operation */
83
    struct TranslationBlock *tb;
84
    sparc_def_t *def;
85
} DisasContext;
86

    
87
// This function uses non-native bit order
88
#define GET_FIELD(X, FROM, TO)                                  \
89
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
90

    
91
// This function uses the order in the manuals, i.e. bit 0 is 2^0
92
#define GET_FIELD_SP(X, FROM, TO)               \
93
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
94

    
95
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
97

    
98
#ifdef TARGET_SPARC64
99
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101
#else
102
#define DFPREG(r) (r & 0x1e)
103
#define QFPREG(r) (r & 0x1c)
104
#endif
105

    
106
#define UA2005_HTRAP_MASK 0xff
107
#define V8_TRAP_MASK 0x7f
108

    
109
static int sign_extend(int x, int len)
110
{
111
    len = 32 - len;
112
    return (x << len) >> len;
113
}
114

    
115
#define IS_IMM (insn & (1<<13))
116

    
117
/* floating point registers moves */
118
static void gen_op_load_fpr_DT0(unsigned int src)
119
{
120
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
121
                   offsetof(CPU_DoubleU, l.upper));
122
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
123
                   offsetof(CPU_DoubleU, l.lower));
124
}
125

    
126
static void gen_op_load_fpr_DT1(unsigned int src)
127
{
128
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
129
                   offsetof(CPU_DoubleU, l.upper));
130
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
131
                   offsetof(CPU_DoubleU, l.lower));
132
}
133

    
134
static void gen_op_store_DT0_fpr(unsigned int dst)
135
{
136
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
137
                   offsetof(CPU_DoubleU, l.upper));
138
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
139
                   offsetof(CPU_DoubleU, l.lower));
140
}
141

    
142
static void gen_op_load_fpr_QT0(unsigned int src)
143
{
144
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
145
                   offsetof(CPU_QuadU, l.upmost));
146
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
147
                   offsetof(CPU_QuadU, l.upper));
148
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
149
                   offsetof(CPU_QuadU, l.lower));
150
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
151
                   offsetof(CPU_QuadU, l.lowest));
152
}
153

    
154
static void gen_op_load_fpr_QT1(unsigned int src)
155
{
156
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
157
                   offsetof(CPU_QuadU, l.upmost));
158
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
159
                   offsetof(CPU_QuadU, l.upper));
160
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
161
                   offsetof(CPU_QuadU, l.lower));
162
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
163
                   offsetof(CPU_QuadU, l.lowest));
164
}
165

    
166
static void gen_op_store_QT0_fpr(unsigned int dst)
167
{
168
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
169
                   offsetof(CPU_QuadU, l.upmost));
170
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
171
                   offsetof(CPU_QuadU, l.upper));
172
    tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
173
                   offsetof(CPU_QuadU, l.lower));
174
    tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
175
                   offsetof(CPU_QuadU, l.lowest));
176
}
177

    
178
/* moves */
179
#ifdef CONFIG_USER_ONLY
180
#define supervisor(dc) 0
181
#ifdef TARGET_SPARC64
182
#define hypervisor(dc) 0
183
#endif
184
#else
185
#define supervisor(dc) (dc->mem_idx >= 1)
186
#ifdef TARGET_SPARC64
187
#define hypervisor(dc) (dc->mem_idx == 2)
188
#else
189
#endif
190
#endif
191

    
192
#ifdef TARGET_SPARC64
193
#ifndef TARGET_ABI32
194
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
195
#else
196
#define AM_CHECK(dc) (1)
197
#endif
198
#endif
199

    
200
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
201
{
202
#ifdef TARGET_SPARC64
203
    if (AM_CHECK(dc))
204
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205
#endif
206
}
207

    
208
static inline void gen_movl_reg_TN(int reg, TCGv tn)
209
{
210
    if (reg == 0)
211
        tcg_gen_movi_tl(tn, 0);
212
    else if (reg < 8)
213
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
214
    else {
215
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216
    }
217
}
218

    
219
static inline void gen_movl_TN_reg(int reg, TCGv tn)
220
{
221
    if (reg == 0)
222
        return;
223
    else if (reg < 8)
224
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
225
    else {
226
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227
    }
228
}
229

    
230
static inline void gen_goto_tb(DisasContext *s, int tb_num,
231
                               target_ulong pc, target_ulong npc)
232
{
233
    TranslationBlock *tb;
234

    
235
    tb = s->tb;
236
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
237
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK))  {
238
        /* jump to same page: we can use a direct jump */
239
        tcg_gen_goto_tb(tb_num);
240
        tcg_gen_movi_tl(cpu_pc, pc);
241
        tcg_gen_movi_tl(cpu_npc, npc);
242
        tcg_gen_exit_tb((long)tb + tb_num);
243
    } else {
244
        /* jump to another page: currently not optimized */
245
        tcg_gen_movi_tl(cpu_pc, pc);
246
        tcg_gen_movi_tl(cpu_npc, npc);
247
        tcg_gen_exit_tb(0);
248
    }
249
}
250

    
251
// XXX suboptimal
252
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
253
{
254
    tcg_gen_extu_i32_tl(reg, src);
255
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
256
    tcg_gen_andi_tl(reg, reg, 0x1);
257
}
258

    
259
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
260
{
261
    tcg_gen_extu_i32_tl(reg, src);
262
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
263
    tcg_gen_andi_tl(reg, reg, 0x1);
264
}
265

    
266
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
267
{
268
    tcg_gen_extu_i32_tl(reg, src);
269
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
270
    tcg_gen_andi_tl(reg, reg, 0x1);
271
}
272

    
273
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
274
{
275
    tcg_gen_extu_i32_tl(reg, src);
276
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
277
    tcg_gen_andi_tl(reg, reg, 0x1);
278
}
279

    
280
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
281
{
282
    TCGv r_temp;
283
    TCGv_i32 r_const;
284
    int l1;
285

    
286
    l1 = gen_new_label();
287

    
288
    r_temp = tcg_temp_new();
289
    tcg_gen_xor_tl(r_temp, src1, src2);
290
    tcg_gen_not_tl(r_temp, r_temp);
291
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
292
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
293
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
294
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
295
    r_const = tcg_const_i32(TT_TOVF);
296
    gen_helper_raise_exception(r_const);
297
    tcg_temp_free_i32(r_const);
298
    gen_set_label(l1);
299
    tcg_temp_free(r_temp);
300
}
301

    
302
static inline void gen_tag_tv(TCGv src1, TCGv src2)
303
{
304
    int l1;
305
    TCGv_i32 r_const;
306

    
307
    l1 = gen_new_label();
308
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
309
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
310
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
311
    r_const = tcg_const_i32(TT_TOVF);
312
    gen_helper_raise_exception(r_const);
313
    tcg_temp_free_i32(r_const);
314
    gen_set_label(l1);
315
}
316

    
317
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
318
{
319
    tcg_gen_mov_tl(cpu_cc_src, src1);
320
    tcg_gen_movi_tl(cpu_cc_src2, src2);
321
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
322
    tcg_gen_mov_tl(dst, cpu_cc_dst);
323
}
324

    
325
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
326
{
327
    tcg_gen_mov_tl(cpu_cc_src, src1);
328
    tcg_gen_mov_tl(cpu_cc_src2, src2);
329
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
330
    tcg_gen_mov_tl(dst, cpu_cc_dst);
331
}
332

    
333
static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
334
{
335
    tcg_gen_mov_tl(cpu_cc_src, src1);
336
    tcg_gen_movi_tl(cpu_cc_src2, src2);
337
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
338
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
339
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
340
    tcg_gen_mov_tl(dst, cpu_cc_dst);
341
}
342

    
343
static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
344
{
345
    tcg_gen_mov_tl(cpu_cc_src, src1);
346
    tcg_gen_mov_tl(cpu_cc_src2, src2);
347
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
348
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
349
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
350
    tcg_gen_mov_tl(dst, cpu_cc_dst);
351
}
352

    
353
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
354
{
355
    tcg_gen_mov_tl(cpu_cc_src, src1);
356
    tcg_gen_mov_tl(cpu_cc_src2, src2);
357
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
358
    tcg_gen_mov_tl(dst, cpu_cc_dst);
359
}
360

    
361
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
362
{
363
    tcg_gen_mov_tl(cpu_cc_src, src1);
364
    tcg_gen_mov_tl(cpu_cc_src2, src2);
365
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
366
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
367
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
368
    tcg_gen_mov_tl(dst, cpu_cc_dst);
369
}
370

    
371
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
372
{
373
    TCGv r_temp;
374
    TCGv_i32 r_const;
375
    int l1;
376

    
377
    l1 = gen_new_label();
378

    
379
    r_temp = tcg_temp_new();
380
    tcg_gen_xor_tl(r_temp, src1, src2);
381
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
382
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
383
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
384
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
385
    r_const = tcg_const_i32(TT_TOVF);
386
    gen_helper_raise_exception(r_const);
387
    tcg_temp_free_i32(r_const);
388
    gen_set_label(l1);
389
    tcg_temp_free(r_temp);
390
}
391

    
392
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
393
{
394
    tcg_gen_mov_tl(cpu_cc_src, src1);
395
    tcg_gen_movi_tl(cpu_cc_src2, src2);
396
    if (src2 == 0) {
397
        tcg_gen_mov_tl(cpu_cc_dst, src1);
398
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
399
        dc->cc_op = CC_OP_LOGIC;
400
    } else {
401
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
402
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
403
        dc->cc_op = CC_OP_SUB;
404
    }
405
    tcg_gen_mov_tl(dst, cpu_cc_dst);
406
}
407

    
408
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
409
{
410
    tcg_gen_mov_tl(cpu_cc_src, src1);
411
    tcg_gen_mov_tl(cpu_cc_src2, src2);
412
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
413
    tcg_gen_mov_tl(dst, cpu_cc_dst);
414
}
415

    
416
static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
417
{
418
    tcg_gen_mov_tl(cpu_cc_src, src1);
419
    tcg_gen_movi_tl(cpu_cc_src2, src2);
420
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
421
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
422
    tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
423
    tcg_gen_mov_tl(dst, cpu_cc_dst);
424
}
425

    
426
static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
427
{
428
    tcg_gen_mov_tl(cpu_cc_src, src1);
429
    tcg_gen_mov_tl(cpu_cc_src2, src2);
430
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
431
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
432
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
433
    tcg_gen_mov_tl(dst, cpu_cc_dst);
434
}
435

    
436
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
437
{
438
    tcg_gen_mov_tl(cpu_cc_src, src1);
439
    tcg_gen_mov_tl(cpu_cc_src2, src2);
440
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
441
    tcg_gen_mov_tl(dst, cpu_cc_dst);
442
}
443

    
444
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
445
{
446
    tcg_gen_mov_tl(cpu_cc_src, src1);
447
    tcg_gen_mov_tl(cpu_cc_src2, src2);
448
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
449
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
450
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
451
    tcg_gen_mov_tl(dst, cpu_cc_dst);
452
}
453

    
454
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
455
{
456
    TCGv r_temp;
457
    int l1;
458

    
459
    l1 = gen_new_label();
460
    r_temp = tcg_temp_new();
461

    
462
    /* old op:
463
    if (!(env->y & 1))
464
        T1 = 0;
465
    */
466
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
467
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
468
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
469
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
470
    tcg_gen_movi_tl(cpu_cc_src2, 0);
471
    gen_set_label(l1);
472

    
473
    // b2 = T0 & 1;
474
    // env->y = (b2 << 31) | (env->y >> 1);
475
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
476
    tcg_gen_shli_tl(r_temp, r_temp, 31);
477
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
478
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
479
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
480
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
481

    
482
    // b1 = N ^ V;
483
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
484
    gen_mov_reg_V(r_temp, cpu_psr);
485
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
486
    tcg_temp_free(r_temp);
487

    
488
    // T0 = (b1 << 31) | (T0 >> 1);
489
    // src1 = T0;
490
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
491
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
492
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
493

    
494
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
495

    
496
    tcg_gen_mov_tl(dst, cpu_cc_dst);
497
}
498

    
499
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
500
{
501
    TCGv_i64 r_temp, r_temp2;
502

    
503
    r_temp = tcg_temp_new_i64();
504
    r_temp2 = tcg_temp_new_i64();
505

    
506
    tcg_gen_extu_tl_i64(r_temp, src2);
507
    tcg_gen_extu_tl_i64(r_temp2, src1);
508
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
509

    
510
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
511
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
512
    tcg_temp_free_i64(r_temp);
513
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
514
#ifdef TARGET_SPARC64
515
    tcg_gen_mov_i64(dst, r_temp2);
516
#else
517
    tcg_gen_trunc_i64_tl(dst, r_temp2);
518
#endif
519
    tcg_temp_free_i64(r_temp2);
520
}
521

    
522
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
523
{
524
    TCGv_i64 r_temp, r_temp2;
525

    
526
    r_temp = tcg_temp_new_i64();
527
    r_temp2 = tcg_temp_new_i64();
528

    
529
    tcg_gen_ext_tl_i64(r_temp, src2);
530
    tcg_gen_ext_tl_i64(r_temp2, src1);
531
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
532

    
533
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
534
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
535
    tcg_temp_free_i64(r_temp);
536
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
537
#ifdef TARGET_SPARC64
538
    tcg_gen_mov_i64(dst, r_temp2);
539
#else
540
    tcg_gen_trunc_i64_tl(dst, r_temp2);
541
#endif
542
    tcg_temp_free_i64(r_temp2);
543
}
544

    
545
#ifdef TARGET_SPARC64
546
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
547
{
548
    TCGv_i32 r_const;
549
    int l1;
550

    
551
    l1 = gen_new_label();
552
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
553
    r_const = tcg_const_i32(TT_DIV_ZERO);
554
    gen_helper_raise_exception(r_const);
555
    tcg_temp_free_i32(r_const);
556
    gen_set_label(l1);
557
}
558

    
559
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
560
{
561
    int l1, l2;
562

    
563
    l1 = gen_new_label();
564
    l2 = gen_new_label();
565
    tcg_gen_mov_tl(cpu_cc_src, src1);
566
    tcg_gen_mov_tl(cpu_cc_src2, src2);
567
    gen_trap_ifdivzero_tl(cpu_cc_src2);
568
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
569
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
570
    tcg_gen_movi_i64(dst, INT64_MIN);
571
    tcg_gen_br(l2);
572
    gen_set_label(l1);
573
    tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
574
    gen_set_label(l2);
575
}
576
#endif
577

    
578
// 1
579
static inline void gen_op_eval_ba(TCGv dst)
580
{
581
    tcg_gen_movi_tl(dst, 1);
582
}
583

    
584
// Z
585
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
586
{
587
    gen_mov_reg_Z(dst, src);
588
}
589

    
590
// Z | (N ^ V)
591
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
592
{
593
    gen_mov_reg_N(cpu_tmp0, src);
594
    gen_mov_reg_V(dst, src);
595
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
596
    gen_mov_reg_Z(cpu_tmp0, src);
597
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
598
}
599

    
600
// N ^ V
601
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
602
{
603
    gen_mov_reg_V(cpu_tmp0, src);
604
    gen_mov_reg_N(dst, src);
605
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
606
}
607

    
608
// C | Z
609
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
610
{
611
    gen_mov_reg_Z(cpu_tmp0, src);
612
    gen_mov_reg_C(dst, src);
613
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
614
}
615

    
616
// C
617
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
618
{
619
    gen_mov_reg_C(dst, src);
620
}
621

    
622
// V
623
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
624
{
625
    gen_mov_reg_V(dst, src);
626
}
627

    
628
// 0
629
static inline void gen_op_eval_bn(TCGv dst)
630
{
631
    tcg_gen_movi_tl(dst, 0);
632
}
633

    
634
// N
635
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
636
{
637
    gen_mov_reg_N(dst, src);
638
}
639

    
640
// !Z
641
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
642
{
643
    gen_mov_reg_Z(dst, src);
644
    tcg_gen_xori_tl(dst, dst, 0x1);
645
}
646

    
647
// !(Z | (N ^ V))
648
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
649
{
650
    gen_mov_reg_N(cpu_tmp0, src);
651
    gen_mov_reg_V(dst, src);
652
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
653
    gen_mov_reg_Z(cpu_tmp0, src);
654
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
655
    tcg_gen_xori_tl(dst, dst, 0x1);
656
}
657

    
658
// !(N ^ V)
659
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
660
{
661
    gen_mov_reg_V(cpu_tmp0, src);
662
    gen_mov_reg_N(dst, src);
663
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
664
    tcg_gen_xori_tl(dst, dst, 0x1);
665
}
666

    
667
// !(C | Z)
668
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
669
{
670
    gen_mov_reg_Z(cpu_tmp0, src);
671
    gen_mov_reg_C(dst, src);
672
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
673
    tcg_gen_xori_tl(dst, dst, 0x1);
674
}
675

    
676
// !C
677
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
678
{
679
    gen_mov_reg_C(dst, src);
680
    tcg_gen_xori_tl(dst, dst, 0x1);
681
}
682

    
683
// !N
684
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
685
{
686
    gen_mov_reg_N(dst, src);
687
    tcg_gen_xori_tl(dst, dst, 0x1);
688
}
689

    
690
// !V
691
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
692
{
693
    gen_mov_reg_V(dst, src);
694
    tcg_gen_xori_tl(dst, dst, 0x1);
695
}
696

    
697
/*
698
  FPSR bit field FCC1 | FCC0:
699
   0 =
700
   1 <
701
   2 >
702
   3 unordered
703
*/
704
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
705
                                    unsigned int fcc_offset)
706
{
707
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
708
    tcg_gen_andi_tl(reg, reg, 0x1);
709
}
710

    
711
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
712
                                    unsigned int fcc_offset)
713
{
714
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
715
    tcg_gen_andi_tl(reg, reg, 0x1);
716
}
717

    
718
// !0: FCC0 | FCC1
719
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
720
                                    unsigned int fcc_offset)
721
{
722
    gen_mov_reg_FCC0(dst, src, fcc_offset);
723
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
724
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
725
}
726

    
727
// 1 or 2: FCC0 ^ FCC1
728
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
729
                                    unsigned int fcc_offset)
730
{
731
    gen_mov_reg_FCC0(dst, src, fcc_offset);
732
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
733
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
734
}
735

    
736
// 1 or 3: FCC0
737
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
738
                                    unsigned int fcc_offset)
739
{
740
    gen_mov_reg_FCC0(dst, src, fcc_offset);
741
}
742

    
743
// 1: FCC0 & !FCC1
744
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
745
                                    unsigned int fcc_offset)
746
{
747
    gen_mov_reg_FCC0(dst, src, fcc_offset);
748
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
749
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
750
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
751
}
752

    
753
// 2 or 3: FCC1
754
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
755
                                    unsigned int fcc_offset)
756
{
757
    gen_mov_reg_FCC1(dst, src, fcc_offset);
758
}
759

    
760
// 2: !FCC0 & FCC1
761
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
762
                                    unsigned int fcc_offset)
763
{
764
    gen_mov_reg_FCC0(dst, src, fcc_offset);
765
    tcg_gen_xori_tl(dst, dst, 0x1);
766
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
767
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
768
}
769

    
770
// 3: FCC0 & FCC1
771
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
772
                                    unsigned int fcc_offset)
773
{
774
    gen_mov_reg_FCC0(dst, src, fcc_offset);
775
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
776
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
777
}
778

    
779
// 0: !(FCC0 | FCC1)
780
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
781
                                    unsigned int fcc_offset)
782
{
783
    gen_mov_reg_FCC0(dst, src, fcc_offset);
784
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
785
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
786
    tcg_gen_xori_tl(dst, dst, 0x1);
787
}
788

    
789
// 0 or 3: !(FCC0 ^ FCC1)
790
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
791
                                    unsigned int fcc_offset)
792
{
793
    gen_mov_reg_FCC0(dst, src, fcc_offset);
794
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
795
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
796
    tcg_gen_xori_tl(dst, dst, 0x1);
797
}
798

    
799
// 0 or 2: !FCC0
800
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
801
                                    unsigned int fcc_offset)
802
{
803
    gen_mov_reg_FCC0(dst, src, fcc_offset);
804
    tcg_gen_xori_tl(dst, dst, 0x1);
805
}
806

    
807
// !1: !(FCC0 & !FCC1)
808
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
809
                                    unsigned int fcc_offset)
810
{
811
    gen_mov_reg_FCC0(dst, src, fcc_offset);
812
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
813
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
814
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
815
    tcg_gen_xori_tl(dst, dst, 0x1);
816
}
817

    
818
// 0 or 1: !FCC1
819
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
820
                                    unsigned int fcc_offset)
821
{
822
    gen_mov_reg_FCC1(dst, src, fcc_offset);
823
    tcg_gen_xori_tl(dst, dst, 0x1);
824
}
825

    
826
// !2: !(!FCC0 & FCC1)
827
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
828
                                    unsigned int fcc_offset)
829
{
830
    gen_mov_reg_FCC0(dst, src, fcc_offset);
831
    tcg_gen_xori_tl(dst, dst, 0x1);
832
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
833
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
834
    tcg_gen_xori_tl(dst, dst, 0x1);
835
}
836

    
837
// !3: !(FCC0 & FCC1)
838
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
839
                                    unsigned int fcc_offset)
840
{
841
    gen_mov_reg_FCC0(dst, src, fcc_offset);
842
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
843
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
844
    tcg_gen_xori_tl(dst, dst, 0x1);
845
}
846

    
847
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
848
                               target_ulong pc2, TCGv r_cond)
849
{
850
    int l1;
851

    
852
    l1 = gen_new_label();
853

    
854
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
855

    
856
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
857

    
858
    gen_set_label(l1);
859
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
860
}
861

    
862
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
863
                                target_ulong pc2, TCGv r_cond)
864
{
865
    int l1;
866

    
867
    l1 = gen_new_label();
868

    
869
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
870

    
871
    gen_goto_tb(dc, 0, pc2, pc1);
872

    
873
    gen_set_label(l1);
874
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
875
}
876

    
877
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
878
                                      TCGv r_cond)
879
{
880
    int l1, l2;
881

    
882
    l1 = gen_new_label();
883
    l2 = gen_new_label();
884

    
885
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
886

    
887
    tcg_gen_movi_tl(cpu_npc, npc1);
888
    tcg_gen_br(l2);
889

    
890
    gen_set_label(l1);
891
    tcg_gen_movi_tl(cpu_npc, npc2);
892
    gen_set_label(l2);
893
}
894

    
895
/* call this function before using the condition register as it may
896
   have been set for a jump */
897
static inline void flush_cond(DisasContext *dc, TCGv cond)
898
{
899
    if (dc->npc == JUMP_PC) {
900
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
901
        dc->npc = DYNAMIC_PC;
902
    }
903
}
904

    
905
static inline void save_npc(DisasContext *dc, TCGv cond)
906
{
907
    if (dc->npc == JUMP_PC) {
908
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
909
        dc->npc = DYNAMIC_PC;
910
    } else if (dc->npc != DYNAMIC_PC) {
911
        tcg_gen_movi_tl(cpu_npc, dc->npc);
912
    }
913
}
914

    
915
static inline void save_state(DisasContext *dc, TCGv cond)
916
{
917
    tcg_gen_movi_tl(cpu_pc, dc->pc);
918
    /* flush pending conditional evaluations before exposing cpu state */
919
    if (dc->cc_op != CC_OP_FLAGS) {
920
        dc->cc_op = CC_OP_FLAGS;
921
        gen_helper_compute_psr();
922
    }
923
    save_npc(dc, cond);
924
}
925

    
926
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
927
{
928
    if (dc->npc == JUMP_PC) {
929
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
930
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
931
        dc->pc = DYNAMIC_PC;
932
    } else if (dc->npc == DYNAMIC_PC) {
933
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
934
        dc->pc = DYNAMIC_PC;
935
    } else {
936
        dc->pc = dc->npc;
937
    }
938
}
939

    
940
static inline void gen_op_next_insn(void)
941
{
942
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
943
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
944
}
945

    
946
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
947
                            DisasContext *dc)
948
{
949
    TCGv_i32 r_src;
950

    
951
#ifdef TARGET_SPARC64
952
    if (cc)
953
        r_src = cpu_xcc;
954
    else
955
        r_src = cpu_psr;
956
#else
957
    r_src = cpu_psr;
958
#endif
959
    switch (dc->cc_op) {
960
    case CC_OP_FLAGS:
961
        break;
962
    default:
963
        gen_helper_compute_psr();
964
        dc->cc_op = CC_OP_FLAGS;
965
        break;
966
    }
967
    switch (cond) {
968
    case 0x0:
969
        gen_op_eval_bn(r_dst);
970
        break;
971
    case 0x1:
972
        gen_op_eval_be(r_dst, r_src);
973
        break;
974
    case 0x2:
975
        gen_op_eval_ble(r_dst, r_src);
976
        break;
977
    case 0x3:
978
        gen_op_eval_bl(r_dst, r_src);
979
        break;
980
    case 0x4:
981
        gen_op_eval_bleu(r_dst, r_src);
982
        break;
983
    case 0x5:
984
        gen_op_eval_bcs(r_dst, r_src);
985
        break;
986
    case 0x6:
987
        gen_op_eval_bneg(r_dst, r_src);
988
        break;
989
    case 0x7:
990
        gen_op_eval_bvs(r_dst, r_src);
991
        break;
992
    case 0x8:
993
        gen_op_eval_ba(r_dst);
994
        break;
995
    case 0x9:
996
        gen_op_eval_bne(r_dst, r_src);
997
        break;
998
    case 0xa:
999
        gen_op_eval_bg(r_dst, r_src);
1000
        break;
1001
    case 0xb:
1002
        gen_op_eval_bge(r_dst, r_src);
1003
        break;
1004
    case 0xc:
1005
        gen_op_eval_bgu(r_dst, r_src);
1006
        break;
1007
    case 0xd:
1008
        gen_op_eval_bcc(r_dst, r_src);
1009
        break;
1010
    case 0xe:
1011
        gen_op_eval_bpos(r_dst, r_src);
1012
        break;
1013
    case 0xf:
1014
        gen_op_eval_bvc(r_dst, r_src);
1015
        break;
1016
    }
1017
}
1018

    
1019
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1020
{
1021
    unsigned int offset;
1022

    
1023
    switch (cc) {
1024
    default:
1025
    case 0x0:
1026
        offset = 0;
1027
        break;
1028
    case 0x1:
1029
        offset = 32 - 10;
1030
        break;
1031
    case 0x2:
1032
        offset = 34 - 10;
1033
        break;
1034
    case 0x3:
1035
        offset = 36 - 10;
1036
        break;
1037
    }
1038

    
1039
    switch (cond) {
1040
    case 0x0:
1041
        gen_op_eval_bn(r_dst);
1042
        break;
1043
    case 0x1:
1044
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1045
        break;
1046
    case 0x2:
1047
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1048
        break;
1049
    case 0x3:
1050
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1051
        break;
1052
    case 0x4:
1053
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1054
        break;
1055
    case 0x5:
1056
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1057
        break;
1058
    case 0x6:
1059
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1060
        break;
1061
    case 0x7:
1062
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1063
        break;
1064
    case 0x8:
1065
        gen_op_eval_ba(r_dst);
1066
        break;
1067
    case 0x9:
1068
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1069
        break;
1070
    case 0xa:
1071
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1072
        break;
1073
    case 0xb:
1074
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1075
        break;
1076
    case 0xc:
1077
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1078
        break;
1079
    case 0xd:
1080
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1081
        break;
1082
    case 0xe:
1083
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1084
        break;
1085
    case 0xf:
1086
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1087
        break;
1088
    }
1089
}
1090

    
1091
#ifdef TARGET_SPARC64
1092
// Inverted logic
1093
static const int gen_tcg_cond_reg[8] = {
1094
    -1,
1095
    TCG_COND_NE,
1096
    TCG_COND_GT,
1097
    TCG_COND_GE,
1098
    -1,
1099
    TCG_COND_EQ,
1100
    TCG_COND_LE,
1101
    TCG_COND_LT,
1102
};
1103

    
1104
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1105
{
1106
    int l1;
1107

    
1108
    l1 = gen_new_label();
1109
    tcg_gen_movi_tl(r_dst, 0);
1110
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1111
    tcg_gen_movi_tl(r_dst, 1);
1112
    gen_set_label(l1);
1113
}
1114
#endif
1115

    
1116
/* XXX: potentially incorrect if dynamic npc */
1117
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1118
                      TCGv r_cond)
1119
{
1120
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1121
    target_ulong target = dc->pc + offset;
1122

    
1123
    if (cond == 0x0) {
1124
        /* unconditional not taken */
1125
        if (a) {
1126
            dc->pc = dc->npc + 4;
1127
            dc->npc = dc->pc + 4;
1128
        } else {
1129
            dc->pc = dc->npc;
1130
            dc->npc = dc->pc + 4;
1131
        }
1132
    } else if (cond == 0x8) {
1133
        /* unconditional taken */
1134
        if (a) {
1135
            dc->pc = target;
1136
            dc->npc = dc->pc + 4;
1137
        } else {
1138
            dc->pc = dc->npc;
1139
            dc->npc = target;
1140
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1141
        }
1142
    } else {
1143
        flush_cond(dc, r_cond);
1144
        gen_cond(r_cond, cc, cond, dc);
1145
        if (a) {
1146
            gen_branch_a(dc, target, dc->npc, r_cond);
1147
            dc->is_br = 1;
1148
        } else {
1149
            dc->pc = dc->npc;
1150
            dc->jump_pc[0] = target;
1151
            dc->jump_pc[1] = dc->npc + 4;
1152
            dc->npc = JUMP_PC;
1153
        }
1154
    }
1155
}
1156

    
1157
/* XXX: potentially incorrect if dynamic npc */
1158
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1159
                      TCGv r_cond)
1160
{
1161
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1162
    target_ulong target = dc->pc + offset;
1163

    
1164
    if (cond == 0x0) {
1165
        /* unconditional not taken */
1166
        if (a) {
1167
            dc->pc = dc->npc + 4;
1168
            dc->npc = dc->pc + 4;
1169
        } else {
1170
            dc->pc = dc->npc;
1171
            dc->npc = dc->pc + 4;
1172
        }
1173
    } else if (cond == 0x8) {
1174
        /* unconditional taken */
1175
        if (a) {
1176
            dc->pc = target;
1177
            dc->npc = dc->pc + 4;
1178
        } else {
1179
            dc->pc = dc->npc;
1180
            dc->npc = target;
1181
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1182
        }
1183
    } else {
1184
        flush_cond(dc, r_cond);
1185
        gen_fcond(r_cond, cc, cond);
1186
        if (a) {
1187
            gen_branch_a(dc, target, dc->npc, r_cond);
1188
            dc->is_br = 1;
1189
        } else {
1190
            dc->pc = dc->npc;
1191
            dc->jump_pc[0] = target;
1192
            dc->jump_pc[1] = dc->npc + 4;
1193
            dc->npc = JUMP_PC;
1194
        }
1195
    }
1196
}
1197

    
1198
#ifdef TARGET_SPARC64
1199
/* XXX: potentially incorrect if dynamic npc */
1200
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1201
                          TCGv r_cond, TCGv r_reg)
1202
{
1203
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1204
    target_ulong target = dc->pc + offset;
1205

    
1206
    flush_cond(dc, r_cond);
1207
    gen_cond_reg(r_cond, cond, r_reg);
1208
    if (a) {
1209
        gen_branch_a(dc, target, dc->npc, r_cond);
1210
        dc->is_br = 1;
1211
    } else {
1212
        dc->pc = dc->npc;
1213
        dc->jump_pc[0] = target;
1214
        dc->jump_pc[1] = dc->npc + 4;
1215
        dc->npc = JUMP_PC;
1216
    }
1217
}
1218

    
1219
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1220
{
1221
    switch (fccno) {
1222
    case 0:
1223
        gen_helper_fcmps(r_rs1, r_rs2);
1224
        break;
1225
    case 1:
1226
        gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1227
        break;
1228
    case 2:
1229
        gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1230
        break;
1231
    case 3:
1232
        gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1233
        break;
1234
    }
1235
}
1236

    
1237
static inline void gen_op_fcmpd(int fccno)
1238
{
1239
    switch (fccno) {
1240
    case 0:
1241
        gen_helper_fcmpd();
1242
        break;
1243
    case 1:
1244
        gen_helper_fcmpd_fcc1();
1245
        break;
1246
    case 2:
1247
        gen_helper_fcmpd_fcc2();
1248
        break;
1249
    case 3:
1250
        gen_helper_fcmpd_fcc3();
1251
        break;
1252
    }
1253
}
1254

    
1255
static inline void gen_op_fcmpq(int fccno)
1256
{
1257
    switch (fccno) {
1258
    case 0:
1259
        gen_helper_fcmpq();
1260
        break;
1261
    case 1:
1262
        gen_helper_fcmpq_fcc1();
1263
        break;
1264
    case 2:
1265
        gen_helper_fcmpq_fcc2();
1266
        break;
1267
    case 3:
1268
        gen_helper_fcmpq_fcc3();
1269
        break;
1270
    }
1271
}
1272

    
1273
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1274
{
1275
    switch (fccno) {
1276
    case 0:
1277
        gen_helper_fcmpes(r_rs1, r_rs2);
1278
        break;
1279
    case 1:
1280
        gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1281
        break;
1282
    case 2:
1283
        gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1284
        break;
1285
    case 3:
1286
        gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1287
        break;
1288
    }
1289
}
1290

    
1291
static inline void gen_op_fcmped(int fccno)
1292
{
1293
    switch (fccno) {
1294
    case 0:
1295
        gen_helper_fcmped();
1296
        break;
1297
    case 1:
1298
        gen_helper_fcmped_fcc1();
1299
        break;
1300
    case 2:
1301
        gen_helper_fcmped_fcc2();
1302
        break;
1303
    case 3:
1304
        gen_helper_fcmped_fcc3();
1305
        break;
1306
    }
1307
}
1308

    
1309
static inline void gen_op_fcmpeq(int fccno)
1310
{
1311
    switch (fccno) {
1312
    case 0:
1313
        gen_helper_fcmpeq();
1314
        break;
1315
    case 1:
1316
        gen_helper_fcmpeq_fcc1();
1317
        break;
1318
    case 2:
1319
        gen_helper_fcmpeq_fcc2();
1320
        break;
1321
    case 3:
1322
        gen_helper_fcmpeq_fcc3();
1323
        break;
1324
    }
1325
}
1326

    
1327
#else
1328

    
1329
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1330
{
1331
    gen_helper_fcmps(r_rs1, r_rs2);
1332
}
1333

    
1334
static inline void gen_op_fcmpd(int fccno)
1335
{
1336
    gen_helper_fcmpd();
1337
}
1338

    
1339
static inline void gen_op_fcmpq(int fccno)
1340
{
1341
    gen_helper_fcmpq();
1342
}
1343

    
1344
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1345
{
1346
    gen_helper_fcmpes(r_rs1, r_rs2);
1347
}
1348

    
1349
static inline void gen_op_fcmped(int fccno)
1350
{
1351
    gen_helper_fcmped();
1352
}
1353

    
1354
static inline void gen_op_fcmpeq(int fccno)
1355
{
1356
    gen_helper_fcmpeq();
1357
}
1358
#endif
1359

    
1360
static inline void gen_op_fpexception_im(int fsr_flags)
1361
{
1362
    TCGv_i32 r_const;
1363

    
1364
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1365
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1366
    r_const = tcg_const_i32(TT_FP_EXCP);
1367
    gen_helper_raise_exception(r_const);
1368
    tcg_temp_free_i32(r_const);
1369
}
1370

    
1371
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1372
{
1373
#if !defined(CONFIG_USER_ONLY)
1374
    if (!dc->fpu_enabled) {
1375
        TCGv_i32 r_const;
1376

    
1377
        save_state(dc, r_cond);
1378
        r_const = tcg_const_i32(TT_NFPU_INSN);
1379
        gen_helper_raise_exception(r_const);
1380
        tcg_temp_free_i32(r_const);
1381
        dc->is_br = 1;
1382
        return 1;
1383
    }
1384
#endif
1385
    return 0;
1386
}
1387

    
1388
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1389
{
1390
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1391
}
1392

    
1393
static inline void gen_clear_float_exceptions(void)
1394
{
1395
    gen_helper_clear_float_exceptions();
1396
}
1397

    
1398
/* asi moves */
1399
#ifdef TARGET_SPARC64
1400
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1401
{
1402
    int asi;
1403
    TCGv_i32 r_asi;
1404

    
1405
    if (IS_IMM) {
1406
        r_asi = tcg_temp_new_i32();
1407
        tcg_gen_mov_i32(r_asi, cpu_asi);
1408
    } else {
1409
        asi = GET_FIELD(insn, 19, 26);
1410
        r_asi = tcg_const_i32(asi);
1411
    }
1412
    return r_asi;
1413
}
1414

    
1415
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1416
                              int sign)
1417
{
1418
    TCGv_i32 r_asi, r_size, r_sign;
1419

    
1420
    r_asi = gen_get_asi(insn, addr);
1421
    r_size = tcg_const_i32(size);
1422
    r_sign = tcg_const_i32(sign);
1423
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1424
    tcg_temp_free_i32(r_sign);
1425
    tcg_temp_free_i32(r_size);
1426
    tcg_temp_free_i32(r_asi);
1427
}
1428

    
1429
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1430
{
1431
    TCGv_i32 r_asi, r_size;
1432

    
1433
    r_asi = gen_get_asi(insn, addr);
1434
    r_size = tcg_const_i32(size);
1435
    gen_helper_st_asi(addr, src, r_asi, r_size);
1436
    tcg_temp_free_i32(r_size);
1437
    tcg_temp_free_i32(r_asi);
1438
}
1439

    
1440
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1441
{
1442
    TCGv_i32 r_asi, r_size, r_rd;
1443

    
1444
    r_asi = gen_get_asi(insn, addr);
1445
    r_size = tcg_const_i32(size);
1446
    r_rd = tcg_const_i32(rd);
1447
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1448
    tcg_temp_free_i32(r_rd);
1449
    tcg_temp_free_i32(r_size);
1450
    tcg_temp_free_i32(r_asi);
1451
}
1452

    
1453
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1454
{
1455
    TCGv_i32 r_asi, r_size, r_rd;
1456

    
1457
    r_asi = gen_get_asi(insn, addr);
1458
    r_size = tcg_const_i32(size);
1459
    r_rd = tcg_const_i32(rd);
1460
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1461
    tcg_temp_free_i32(r_rd);
1462
    tcg_temp_free_i32(r_size);
1463
    tcg_temp_free_i32(r_asi);
1464
}
1465

    
1466
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1467
{
1468
    TCGv_i32 r_asi, r_size, r_sign;
1469

    
1470
    r_asi = gen_get_asi(insn, addr);
1471
    r_size = tcg_const_i32(4);
1472
    r_sign = tcg_const_i32(0);
1473
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1474
    tcg_temp_free_i32(r_sign);
1475
    gen_helper_st_asi(addr, dst, r_asi, r_size);
1476
    tcg_temp_free_i32(r_size);
1477
    tcg_temp_free_i32(r_asi);
1478
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1479
}
1480

    
1481
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1482
{
1483
    TCGv_i32 r_asi, r_rd;
1484

    
1485
    r_asi = gen_get_asi(insn, addr);
1486
    r_rd = tcg_const_i32(rd);
1487
    gen_helper_ldda_asi(addr, r_asi, r_rd);
1488
    tcg_temp_free_i32(r_rd);
1489
    tcg_temp_free_i32(r_asi);
1490
}
1491

    
1492
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1493
{
1494
    TCGv_i32 r_asi, r_size;
1495

    
1496
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1497
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1498
    r_asi = gen_get_asi(insn, addr);
1499
    r_size = tcg_const_i32(8);
1500
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1501
    tcg_temp_free_i32(r_size);
1502
    tcg_temp_free_i32(r_asi);
1503
}
1504

    
1505
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1506
                               int rd)
1507
{
1508
    TCGv r_val1;
1509
    TCGv_i32 r_asi;
1510

    
1511
    r_val1 = tcg_temp_new();
1512
    gen_movl_reg_TN(rd, r_val1);
1513
    r_asi = gen_get_asi(insn, addr);
1514
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1515
    tcg_temp_free_i32(r_asi);
1516
    tcg_temp_free(r_val1);
1517
}
1518

    
1519
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1520
                                int rd)
1521
{
1522
    TCGv_i32 r_asi;
1523

    
1524
    gen_movl_reg_TN(rd, cpu_tmp64);
1525
    r_asi = gen_get_asi(insn, addr);
1526
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1527
    tcg_temp_free_i32(r_asi);
1528
}
1529

    
1530
#elif !defined(CONFIG_USER_ONLY)
1531

    
1532
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1533
                              int sign)
1534
{
1535
    TCGv_i32 r_asi, r_size, r_sign;
1536

    
1537
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1538
    r_size = tcg_const_i32(size);
1539
    r_sign = tcg_const_i32(sign);
1540
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1541
    tcg_temp_free(r_sign);
1542
    tcg_temp_free(r_size);
1543
    tcg_temp_free(r_asi);
1544
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1545
}
1546

    
1547
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1548
{
1549
    TCGv_i32 r_asi, r_size;
1550

    
1551
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
1552
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1553
    r_size = tcg_const_i32(size);
1554
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1555
    tcg_temp_free(r_size);
1556
    tcg_temp_free(r_asi);
1557
}
1558

    
1559
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1560
{
1561
    TCGv_i32 r_asi, r_size, r_sign;
1562
    TCGv_i64 r_val;
1563

    
1564
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1565
    r_size = tcg_const_i32(4);
1566
    r_sign = tcg_const_i32(0);
1567
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1568
    tcg_temp_free(r_sign);
1569
    r_val = tcg_temp_new_i64();
1570
    tcg_gen_extu_tl_i64(r_val, dst);
1571
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1572
    tcg_temp_free_i64(r_val);
1573
    tcg_temp_free(r_size);
1574
    tcg_temp_free(r_asi);
1575
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1576
}
1577

    
1578
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1579
{
1580
    TCGv_i32 r_asi, r_size, r_sign;
1581

    
1582
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1583
    r_size = tcg_const_i32(8);
1584
    r_sign = tcg_const_i32(0);
1585
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1586
    tcg_temp_free(r_sign);
1587
    tcg_temp_free(r_size);
1588
    tcg_temp_free(r_asi);
1589
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1590
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
1591
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1592
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1593
    gen_movl_TN_reg(rd, hi);
1594
}
1595

    
1596
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1597
{
1598
    TCGv_i32 r_asi, r_size;
1599

    
1600
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1601
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1602
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1603
    r_size = tcg_const_i32(8);
1604
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1605
    tcg_temp_free(r_size);
1606
    tcg_temp_free(r_asi);
1607
}
1608
#endif
1609

    
1610
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1611
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1612
{
1613
    TCGv_i64 r_val;
1614
    TCGv_i32 r_asi, r_size;
1615

    
1616
    gen_ld_asi(dst, addr, insn, 1, 0);
1617

    
1618
    r_val = tcg_const_i64(0xffULL);
1619
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1620
    r_size = tcg_const_i32(1);
1621
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1622
    tcg_temp_free_i32(r_size);
1623
    tcg_temp_free_i32(r_asi);
1624
    tcg_temp_free_i64(r_val);
1625
}
1626
#endif
1627

    
1628
static inline TCGv get_src1(unsigned int insn, TCGv def)
1629
{
1630
    TCGv r_rs1 = def;
1631
    unsigned int rs1;
1632

    
1633
    rs1 = GET_FIELD(insn, 13, 17);
1634
    if (rs1 == 0) {
1635
        tcg_gen_movi_tl(def, 0);
1636
    } else if (rs1 < 8) {
1637
        r_rs1 = cpu_gregs[rs1];
1638
    } else {
1639
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1640
    }
1641
    return r_rs1;
1642
}
1643

    
1644
static inline TCGv get_src2(unsigned int insn, TCGv def)
1645
{
1646
    TCGv r_rs2 = def;
1647

    
1648
    if (IS_IMM) { /* immediate */
1649
        target_long simm = GET_FIELDs(insn, 19, 31);
1650
        tcg_gen_movi_tl(def, simm);
1651
    } else { /* register */
1652
        unsigned int rs2 = GET_FIELD(insn, 27, 31);
1653
        if (rs2 == 0) {
1654
            tcg_gen_movi_tl(def, 0);
1655
        } else if (rs2 < 8) {
1656
            r_rs2 = cpu_gregs[rs2];
1657
        } else {
1658
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1659
        }
1660
    }
1661
    return r_rs2;
1662
}
1663

    
1664
#ifdef TARGET_SPARC64
1665
static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1666
{
1667
    TCGv_i32 r_tl = tcg_temp_new_i32();
1668

    
1669
    /* load env->tl into r_tl */
1670
    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1671

    
1672
    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1673
    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1674

    
1675
    /* calculate offset to current trap state from env->ts, reuse r_tl */
1676
    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1677
    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1678

    
1679
    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1680
    {
1681
        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1682
        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1683
        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1684
        tcg_temp_free_ptr(r_tl_tmp);
1685
    }
1686

    
1687
    tcg_temp_free_i32(r_tl);
1688
}
1689
#endif
1690

    
1691
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
1692
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1693
        goto illegal_insn;
1694
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1695
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1696
        goto nfpu_insn;
1697

    
1698
/* before an instruction, dc->pc must be static */
1699
static void disas_sparc_insn(DisasContext * dc)
1700
{
1701
    unsigned int insn, opc, rs1, rs2, rd;
1702
    TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1703
    target_long simm;
1704

    
1705
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1706
        tcg_gen_debug_insn_start(dc->pc);
1707
    insn = ldl_code(dc->pc);
1708
    opc = GET_FIELD(insn, 0, 1);
1709

    
1710
    rd = GET_FIELD(insn, 2, 6);
1711

    
1712
    cpu_tmp1 = cpu_src1 = tcg_temp_new();
1713
    cpu_tmp2 = cpu_src2 = tcg_temp_new();
1714

    
1715
    switch (opc) {
1716
    case 0:                     /* branches/sethi */
1717
        {
1718
            unsigned int xop = GET_FIELD(insn, 7, 9);
1719
            int32_t target;
1720
            switch (xop) {
1721
#ifdef TARGET_SPARC64
1722
            case 0x1:           /* V9 BPcc */
1723
                {
1724
                    int cc;
1725

    
1726
                    target = GET_FIELD_SP(insn, 0, 18);
1727
                    target = sign_extend(target, 18);
1728
                    target <<= 2;
1729
                    cc = GET_FIELD_SP(insn, 20, 21);
1730
                    if (cc == 0)
1731
                        do_branch(dc, target, insn, 0, cpu_cond);
1732
                    else if (cc == 2)
1733
                        do_branch(dc, target, insn, 1, cpu_cond);
1734
                    else
1735
                        goto illegal_insn;
1736
                    goto jmp_insn;
1737
                }
1738
            case 0x3:           /* V9 BPr */
1739
                {
1740
                    target = GET_FIELD_SP(insn, 0, 13) |
1741
                        (GET_FIELD_SP(insn, 20, 21) << 14);
1742
                    target = sign_extend(target, 16);
1743
                    target <<= 2;
1744
                    cpu_src1 = get_src1(insn, cpu_src1);
1745
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1746
                    goto jmp_insn;
1747
                }
1748
            case 0x5:           /* V9 FBPcc */
1749
                {
1750
                    int cc = GET_FIELD_SP(insn, 20, 21);
1751
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1752
                        goto jmp_insn;
1753
                    target = GET_FIELD_SP(insn, 0, 18);
1754
                    target = sign_extend(target, 19);
1755
                    target <<= 2;
1756
                    do_fbranch(dc, target, insn, cc, cpu_cond);
1757
                    goto jmp_insn;
1758
                }
1759
#else
1760
            case 0x7:           /* CBN+x */
1761
                {
1762
                    goto ncp_insn;
1763
                }
1764
#endif
1765
            case 0x2:           /* BN+x */
1766
                {
1767
                    target = GET_FIELD(insn, 10, 31);
1768
                    target = sign_extend(target, 22);
1769
                    target <<= 2;
1770
                    do_branch(dc, target, insn, 0, cpu_cond);
1771
                    goto jmp_insn;
1772
                }
1773
            case 0x6:           /* FBN+x */
1774
                {
1775
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1776
                        goto jmp_insn;
1777
                    target = GET_FIELD(insn, 10, 31);
1778
                    target = sign_extend(target, 22);
1779
                    target <<= 2;
1780
                    do_fbranch(dc, target, insn, 0, cpu_cond);
1781
                    goto jmp_insn;
1782
                }
1783
            case 0x4:           /* SETHI */
1784
                if (rd) { // nop
1785
                    uint32_t value = GET_FIELD(insn, 10, 31);
1786
                    TCGv r_const;
1787

    
1788
                    r_const = tcg_const_tl(value << 10);
1789
                    gen_movl_TN_reg(rd, r_const);
1790
                    tcg_temp_free(r_const);
1791
                }
1792
                break;
1793
            case 0x0:           /* UNIMPL */
1794
            default:
1795
                goto illegal_insn;
1796
            }
1797
            break;
1798
        }
1799
        break;
1800
    case 1:                     /*CALL*/
1801
        {
1802
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
1803
            TCGv r_const;
1804

    
1805
            r_const = tcg_const_tl(dc->pc);
1806
            gen_movl_TN_reg(15, r_const);
1807
            tcg_temp_free(r_const);
1808
            target += dc->pc;
1809
            gen_mov_pc_npc(dc, cpu_cond);
1810
            dc->npc = target;
1811
        }
1812
        goto jmp_insn;
1813
    case 2:                     /* FPU & Logical Operations */
1814
        {
1815
            unsigned int xop = GET_FIELD(insn, 7, 12);
1816
            if (xop == 0x3a) {  /* generate trap */
1817
                int cond;
1818

    
1819
                cpu_src1 = get_src1(insn, cpu_src1);
1820
                if (IS_IMM) {
1821
                    rs2 = GET_FIELD(insn, 25, 31);
1822
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1823
                } else {
1824
                    rs2 = GET_FIELD(insn, 27, 31);
1825
                    if (rs2 != 0) {
1826
                        gen_movl_reg_TN(rs2, cpu_src2);
1827
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1828
                    } else
1829
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
1830
                }
1831
                cond = GET_FIELD(insn, 3, 6);
1832
                if (cond == 0x8) {
1833
                    save_state(dc, cpu_cond);
1834
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
1835
                        supervisor(dc))
1836
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1837
                    else
1838
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1839
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1840
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1841
                    gen_helper_raise_exception(cpu_tmp32);
1842
                } else if (cond != 0) {
1843
                    TCGv r_cond = tcg_temp_new();
1844
                    int l1;
1845
#ifdef TARGET_SPARC64
1846
                    /* V9 icc/xcc */
1847
                    int cc = GET_FIELD_SP(insn, 11, 12);
1848

    
1849
                    save_state(dc, cpu_cond);
1850
                    if (cc == 0)
1851
                        gen_cond(r_cond, 0, cond, dc);
1852
                    else if (cc == 2)
1853
                        gen_cond(r_cond, 1, cond, dc);
1854
                    else
1855
                        goto illegal_insn;
1856
#else
1857
                    save_state(dc, cpu_cond);
1858
                    gen_cond(r_cond, 0, cond, dc);
1859
#endif
1860
                    l1 = gen_new_label();
1861
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1862

    
1863
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
1864
                        supervisor(dc))
1865
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1866
                    else
1867
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1868
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1869
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1870
                    gen_helper_raise_exception(cpu_tmp32);
1871

    
1872
                    gen_set_label(l1);
1873
                    tcg_temp_free(r_cond);
1874
                }
1875
                gen_op_next_insn();
1876
                tcg_gen_exit_tb(0);
1877
                dc->is_br = 1;
1878
                goto jmp_insn;
1879
            } else if (xop == 0x28) {
1880
                rs1 = GET_FIELD(insn, 13, 17);
1881
                switch(rs1) {
1882
                case 0: /* rdy */
1883
#ifndef TARGET_SPARC64
1884
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
1885
                                       manual, rdy on the microSPARC
1886
                                       II */
1887
                case 0x0f:          /* stbar in the SPARCv8 manual,
1888
                                       rdy on the microSPARC II */
1889
                case 0x10 ... 0x1f: /* implementation-dependent in the
1890
                                       SPARCv8 manual, rdy on the
1891
                                       microSPARC II */
1892
#endif
1893
                    gen_movl_TN_reg(rd, cpu_y);
1894
                    break;
1895
#ifdef TARGET_SPARC64
1896
                case 0x2: /* V9 rdccr */
1897
                    gen_helper_compute_psr();
1898
                    gen_helper_rdccr(cpu_dst);
1899
                    gen_movl_TN_reg(rd, cpu_dst);
1900
                    break;
1901
                case 0x3: /* V9 rdasi */
1902
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1903
                    gen_movl_TN_reg(rd, cpu_dst);
1904
                    break;
1905
                case 0x4: /* V9 rdtick */
1906
                    {
1907
                        TCGv_ptr r_tickptr;
1908

    
1909
                        r_tickptr = tcg_temp_new_ptr();
1910
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
1911
                                       offsetof(CPUState, tick));
1912
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
1913
                        tcg_temp_free_ptr(r_tickptr);
1914
                        gen_movl_TN_reg(rd, cpu_dst);
1915
                    }
1916
                    break;
1917
                case 0x5: /* V9 rdpc */
1918
                    {
1919
                        TCGv r_const;
1920

    
1921
                        r_const = tcg_const_tl(dc->pc);
1922
                        gen_movl_TN_reg(rd, r_const);
1923
                        tcg_temp_free(r_const);
1924
                    }
1925
                    break;
1926
                case 0x6: /* V9 rdfprs */
1927
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
1928
                    gen_movl_TN_reg(rd, cpu_dst);
1929
                    break;
1930
                case 0xf: /* V9 membar */
1931
                    break; /* no effect */
1932
                case 0x13: /* Graphics Status */
1933
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1934
                        goto jmp_insn;
1935
                    gen_movl_TN_reg(rd, cpu_gsr);
1936
                    break;
1937
                case 0x16: /* Softint */
1938
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
1939
                    gen_movl_TN_reg(rd, cpu_dst);
1940
                    break;
1941
                case 0x17: /* Tick compare */
1942
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
1943
                    break;
1944
                case 0x18: /* System tick */
1945
                    {
1946
                        TCGv_ptr r_tickptr;
1947

    
1948
                        r_tickptr = tcg_temp_new_ptr();
1949
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
1950
                                       offsetof(CPUState, stick));
1951
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
1952
                        tcg_temp_free_ptr(r_tickptr);
1953
                        gen_movl_TN_reg(rd, cpu_dst);
1954
                    }
1955
                    break;
1956
                case 0x19: /* System tick compare */
1957
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
1958
                    break;
1959
                case 0x10: /* Performance Control */
1960
                case 0x11: /* Performance Instrumentation Counter */
1961
                case 0x12: /* Dispatch Control */
1962
                case 0x14: /* Softint set, WO */
1963
                case 0x15: /* Softint clear, WO */
1964
#endif
1965
                default:
1966
                    goto illegal_insn;
1967
                }
1968
#if !defined(CONFIG_USER_ONLY)
1969
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1970
#ifndef TARGET_SPARC64
1971
                if (!supervisor(dc))
1972
                    goto priv_insn;
1973
                gen_helper_compute_psr();
1974
                dc->cc_op = CC_OP_FLAGS;
1975
                gen_helper_rdpsr(cpu_dst);
1976
#else
1977
                CHECK_IU_FEATURE(dc, HYPV);
1978
                if (!hypervisor(dc))
1979
                    goto priv_insn;
1980
                rs1 = GET_FIELD(insn, 13, 17);
1981
                switch (rs1) {
1982
                case 0: // hpstate
1983
                    // gen_op_rdhpstate();
1984
                    break;
1985
                case 1: // htstate
1986
                    // gen_op_rdhtstate();
1987
                    break;
1988
                case 3: // hintp
1989
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
1990
                    break;
1991
                case 5: // htba
1992
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
1993
                    break;
1994
                case 6: // hver
1995
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
1996
                    break;
1997
                case 31: // hstick_cmpr
1998
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
1999
                    break;
2000
                default:
2001
                    goto illegal_insn;
2002
                }
2003
#endif
2004
                gen_movl_TN_reg(rd, cpu_dst);
2005
                break;
2006
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2007
                if (!supervisor(dc))
2008
                    goto priv_insn;
2009
#ifdef TARGET_SPARC64
2010
                rs1 = GET_FIELD(insn, 13, 17);
2011
                switch (rs1) {
2012
                case 0: // tpc
2013
                    {
2014
                        TCGv_ptr r_tsptr;
2015

    
2016
                        r_tsptr = tcg_temp_new_ptr();
2017
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2018
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2019
                                      offsetof(trap_state, tpc));
2020
                        tcg_temp_free_ptr(r_tsptr);
2021
                    }
2022
                    break;
2023
                case 1: // tnpc
2024
                    {
2025
                        TCGv_ptr r_tsptr;
2026

    
2027
                        r_tsptr = tcg_temp_new_ptr();
2028
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2029
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2030
                                      offsetof(trap_state, tnpc));
2031
                        tcg_temp_free_ptr(r_tsptr);
2032
                    }
2033
                    break;
2034
                case 2: // tstate
2035
                    {
2036
                        TCGv_ptr r_tsptr;
2037

    
2038
                        r_tsptr = tcg_temp_new_ptr();
2039
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2040
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2041
                                      offsetof(trap_state, tstate));
2042
                        tcg_temp_free_ptr(r_tsptr);
2043
                    }
2044
                    break;
2045
                case 3: // tt
2046
                    {
2047
                        TCGv_ptr r_tsptr;
2048

    
2049
                        r_tsptr = tcg_temp_new_ptr();
2050
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2051
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2052
                                       offsetof(trap_state, tt));
2053
                        tcg_temp_free_ptr(r_tsptr);
2054
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2055
                    }
2056
                    break;
2057
                case 4: // tick
2058
                    {
2059
                        TCGv_ptr r_tickptr;
2060

    
2061
                        r_tickptr = tcg_temp_new_ptr();
2062
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2063
                                       offsetof(CPUState, tick));
2064
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2065
                        gen_movl_TN_reg(rd, cpu_tmp0);
2066
                        tcg_temp_free_ptr(r_tickptr);
2067
                    }
2068
                    break;
2069
                case 5: // tba
2070
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2071
                    break;
2072
                case 6: // pstate
2073
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2074
                                   offsetof(CPUSPARCState, pstate));
2075
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2076
                    break;
2077
                case 7: // tl
2078
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2079
                                   offsetof(CPUSPARCState, tl));
2080
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2081
                    break;
2082
                case 8: // pil
2083
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2084
                                   offsetof(CPUSPARCState, psrpil));
2085
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2086
                    break;
2087
                case 9: // cwp
2088
                    gen_helper_rdcwp(cpu_tmp0);
2089
                    break;
2090
                case 10: // cansave
2091
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2092
                                   offsetof(CPUSPARCState, cansave));
2093
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2094
                    break;
2095
                case 11: // canrestore
2096
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2097
                                   offsetof(CPUSPARCState, canrestore));
2098
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2099
                    break;
2100
                case 12: // cleanwin
2101
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2102
                                   offsetof(CPUSPARCState, cleanwin));
2103
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2104
                    break;
2105
                case 13: // otherwin
2106
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2107
                                   offsetof(CPUSPARCState, otherwin));
2108
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2109
                    break;
2110
                case 14: // wstate
2111
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2112
                                   offsetof(CPUSPARCState, wstate));
2113
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2114
                    break;
2115
                case 16: // UA2005 gl
2116
                    CHECK_IU_FEATURE(dc, GL);
2117
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2118
                                   offsetof(CPUSPARCState, gl));
2119
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2120
                    break;
2121
                case 26: // UA2005 strand status
2122
                    CHECK_IU_FEATURE(dc, HYPV);
2123
                    if (!hypervisor(dc))
2124
                        goto priv_insn;
2125
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2126
                    break;
2127
                case 31: // ver
2128
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2129
                    break;
2130
                case 15: // fq
2131
                default:
2132
                    goto illegal_insn;
2133
                }
2134
#else
2135
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2136
#endif
2137
                gen_movl_TN_reg(rd, cpu_tmp0);
2138
                break;
2139
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2140
#ifdef TARGET_SPARC64
2141
                save_state(dc, cpu_cond);
2142
                gen_helper_flushw();
2143
#else
2144
                if (!supervisor(dc))
2145
                    goto priv_insn;
2146
                gen_movl_TN_reg(rd, cpu_tbr);
2147
#endif
2148
                break;
2149
#endif
2150
            } else if (xop == 0x34) {   /* FPU Operations */
2151
                if (gen_trap_ifnofpu(dc, cpu_cond))
2152
                    goto jmp_insn;
2153
                gen_op_clear_ieee_excp_and_FTT();
2154
                rs1 = GET_FIELD(insn, 13, 17);
2155
                rs2 = GET_FIELD(insn, 27, 31);
2156
                xop = GET_FIELD(insn, 18, 26);
2157
                save_state(dc, cpu_cond);
2158
                switch (xop) {
2159
                case 0x1: /* fmovs */
2160
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2161
                    break;
2162
                case 0x5: /* fnegs */
2163
                    gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2164
                    break;
2165
                case 0x9: /* fabss */
2166
                    gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2167
                    break;
2168
                case 0x29: /* fsqrts */
2169
                    CHECK_FPU_FEATURE(dc, FSQRT);
2170
                    gen_clear_float_exceptions();
2171
                    gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2172
                    gen_helper_check_ieee_exceptions();
2173
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2174
                    break;
2175
                case 0x2a: /* fsqrtd */
2176
                    CHECK_FPU_FEATURE(dc, FSQRT);
2177
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2178
                    gen_clear_float_exceptions();
2179
                    gen_helper_fsqrtd();
2180
                    gen_helper_check_ieee_exceptions();
2181
                    gen_op_store_DT0_fpr(DFPREG(rd));
2182
                    break;
2183
                case 0x2b: /* fsqrtq */
2184
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2185
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2186
                    gen_clear_float_exceptions();
2187
                    gen_helper_fsqrtq();
2188
                    gen_helper_check_ieee_exceptions();
2189
                    gen_op_store_QT0_fpr(QFPREG(rd));
2190
                    break;
2191
                case 0x41: /* fadds */
2192
                    gen_clear_float_exceptions();
2193
                    gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2194
                    gen_helper_check_ieee_exceptions();
2195
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2196
                    break;
2197
                case 0x42: /* faddd */
2198
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2199
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2200
                    gen_clear_float_exceptions();
2201
                    gen_helper_faddd();
2202
                    gen_helper_check_ieee_exceptions();
2203
                    gen_op_store_DT0_fpr(DFPREG(rd));
2204
                    break;
2205
                case 0x43: /* faddq */
2206
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2207
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2208
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2209
                    gen_clear_float_exceptions();
2210
                    gen_helper_faddq();
2211
                    gen_helper_check_ieee_exceptions();
2212
                    gen_op_store_QT0_fpr(QFPREG(rd));
2213
                    break;
2214
                case 0x45: /* fsubs */
2215
                    gen_clear_float_exceptions();
2216
                    gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2217
                    gen_helper_check_ieee_exceptions();
2218
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2219
                    break;
2220
                case 0x46: /* fsubd */
2221
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2222
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2223
                    gen_clear_float_exceptions();
2224
                    gen_helper_fsubd();
2225
                    gen_helper_check_ieee_exceptions();
2226
                    gen_op_store_DT0_fpr(DFPREG(rd));
2227
                    break;
2228
                case 0x47: /* fsubq */
2229
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2230
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2231
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2232
                    gen_clear_float_exceptions();
2233
                    gen_helper_fsubq();
2234
                    gen_helper_check_ieee_exceptions();
2235
                    gen_op_store_QT0_fpr(QFPREG(rd));
2236
                    break;
2237
                case 0x49: /* fmuls */
2238
                    CHECK_FPU_FEATURE(dc, FMUL);
2239
                    gen_clear_float_exceptions();
2240
                    gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2241
                    gen_helper_check_ieee_exceptions();
2242
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2243
                    break;
2244
                case 0x4a: /* fmuld */
2245
                    CHECK_FPU_FEATURE(dc, FMUL);
2246
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2247
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2248
                    gen_clear_float_exceptions();
2249
                    gen_helper_fmuld();
2250
                    gen_helper_check_ieee_exceptions();
2251
                    gen_op_store_DT0_fpr(DFPREG(rd));
2252
                    break;
2253
                case 0x4b: /* fmulq */
2254
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2255
                    CHECK_FPU_FEATURE(dc, FMUL);
2256
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2257
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2258
                    gen_clear_float_exceptions();
2259
                    gen_helper_fmulq();
2260
                    gen_helper_check_ieee_exceptions();
2261
                    gen_op_store_QT0_fpr(QFPREG(rd));
2262
                    break;
2263
                case 0x4d: /* fdivs */
2264
                    gen_clear_float_exceptions();
2265
                    gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2266
                    gen_helper_check_ieee_exceptions();
2267
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2268
                    break;
2269
                case 0x4e: /* fdivd */
2270
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2271
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2272
                    gen_clear_float_exceptions();
2273
                    gen_helper_fdivd();
2274
                    gen_helper_check_ieee_exceptions();
2275
                    gen_op_store_DT0_fpr(DFPREG(rd));
2276
                    break;
2277
                case 0x4f: /* fdivq */
2278
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2279
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2280
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2281
                    gen_clear_float_exceptions();
2282
                    gen_helper_fdivq();
2283
                    gen_helper_check_ieee_exceptions();
2284
                    gen_op_store_QT0_fpr(QFPREG(rd));
2285
                    break;
2286
                case 0x69: /* fsmuld */
2287
                    CHECK_FPU_FEATURE(dc, FSMULD);
2288
                    gen_clear_float_exceptions();
2289
                    gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2290
                    gen_helper_check_ieee_exceptions();
2291
                    gen_op_store_DT0_fpr(DFPREG(rd));
2292
                    break;
2293
                case 0x6e: /* fdmulq */
2294
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2295
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2296
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2297
                    gen_clear_float_exceptions();
2298
                    gen_helper_fdmulq();
2299
                    gen_helper_check_ieee_exceptions();
2300
                    gen_op_store_QT0_fpr(QFPREG(rd));
2301
                    break;
2302
                case 0xc4: /* fitos */
2303
                    gen_clear_float_exceptions();
2304
                    gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2305
                    gen_helper_check_ieee_exceptions();
2306
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2307
                    break;
2308
                case 0xc6: /* fdtos */
2309
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2310
                    gen_clear_float_exceptions();
2311
                    gen_helper_fdtos(cpu_tmp32);
2312
                    gen_helper_check_ieee_exceptions();
2313
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2314
                    break;
2315
                case 0xc7: /* fqtos */
2316
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2317
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2318
                    gen_clear_float_exceptions();
2319
                    gen_helper_fqtos(cpu_tmp32);
2320
                    gen_helper_check_ieee_exceptions();
2321
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2322
                    break;
2323
                case 0xc8: /* fitod */
2324
                    gen_helper_fitod(cpu_fpr[rs2]);
2325
                    gen_op_store_DT0_fpr(DFPREG(rd));
2326
                    break;
2327
                case 0xc9: /* fstod */
2328
                    gen_helper_fstod(cpu_fpr[rs2]);
2329
                    gen_op_store_DT0_fpr(DFPREG(rd));
2330
                    break;
2331
                case 0xcb: /* fqtod */
2332
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2333
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2334
                    gen_clear_float_exceptions();
2335
                    gen_helper_fqtod();
2336
                    gen_helper_check_ieee_exceptions();
2337
                    gen_op_store_DT0_fpr(DFPREG(rd));
2338
                    break;
2339
                case 0xcc: /* fitoq */
2340
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2341
                    gen_helper_fitoq(cpu_fpr[rs2]);
2342
                    gen_op_store_QT0_fpr(QFPREG(rd));
2343
                    break;
2344
                case 0xcd: /* fstoq */
2345
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2346
                    gen_helper_fstoq(cpu_fpr[rs2]);
2347
                    gen_op_store_QT0_fpr(QFPREG(rd));
2348
                    break;
2349
                case 0xce: /* fdtoq */
2350
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2351
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2352
                    gen_helper_fdtoq();
2353
                    gen_op_store_QT0_fpr(QFPREG(rd));
2354
                    break;
2355
                case 0xd1: /* fstoi */
2356
                    gen_clear_float_exceptions();
2357
                    gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2358
                    gen_helper_check_ieee_exceptions();
2359
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2360
                    break;
2361
                case 0xd2: /* fdtoi */
2362
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2363
                    gen_clear_float_exceptions();
2364
                    gen_helper_fdtoi(cpu_tmp32);
2365
                    gen_helper_check_ieee_exceptions();
2366
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2367
                    break;
2368
                case 0xd3: /* fqtoi */
2369
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2370
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2371
                    gen_clear_float_exceptions();
2372
                    gen_helper_fqtoi(cpu_tmp32);
2373
                    gen_helper_check_ieee_exceptions();
2374
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2375
                    break;
2376
#ifdef TARGET_SPARC64
2377
                case 0x2: /* V9 fmovd */
2378
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2379
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2380
                                    cpu_fpr[DFPREG(rs2) + 1]);
2381
                    break;
2382
                case 0x3: /* V9 fmovq */
2383
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2384
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2385
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2386
                                    cpu_fpr[QFPREG(rs2) + 1]);
2387
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2388
                                    cpu_fpr[QFPREG(rs2) + 2]);
2389
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2390
                                    cpu_fpr[QFPREG(rs2) + 3]);
2391
                    break;
2392
                case 0x6: /* V9 fnegd */
2393
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2394
                    gen_helper_fnegd();
2395
                    gen_op_store_DT0_fpr(DFPREG(rd));
2396
                    break;
2397
                case 0x7: /* V9 fnegq */
2398
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2399
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2400
                    gen_helper_fnegq();
2401
                    gen_op_store_QT0_fpr(QFPREG(rd));
2402
                    break;
2403
                case 0xa: /* V9 fabsd */
2404
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2405
                    gen_helper_fabsd();
2406
                    gen_op_store_DT0_fpr(DFPREG(rd));
2407
                    break;
2408
                case 0xb: /* V9 fabsq */
2409
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2410
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2411
                    gen_helper_fabsq();
2412
                    gen_op_store_QT0_fpr(QFPREG(rd));
2413
                    break;
2414
                case 0x81: /* V9 fstox */
2415
                    gen_clear_float_exceptions();
2416
                    gen_helper_fstox(cpu_fpr[rs2]);
2417
                    gen_helper_check_ieee_exceptions();
2418
                    gen_op_store_DT0_fpr(DFPREG(rd));
2419
                    break;
2420
                case 0x82: /* V9 fdtox */
2421
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2422
                    gen_clear_float_exceptions();
2423
                    gen_helper_fdtox();
2424
                    gen_helper_check_ieee_exceptions();
2425
                    gen_op_store_DT0_fpr(DFPREG(rd));
2426
                    break;
2427
                case 0x83: /* V9 fqtox */
2428
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2429
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2430
                    gen_clear_float_exceptions();
2431
                    gen_helper_fqtox();
2432
                    gen_helper_check_ieee_exceptions();
2433
                    gen_op_store_DT0_fpr(DFPREG(rd));
2434
                    break;
2435
                case 0x84: /* V9 fxtos */
2436
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2437
                    gen_clear_float_exceptions();
2438
                    gen_helper_fxtos(cpu_tmp32);
2439
                    gen_helper_check_ieee_exceptions();
2440
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2441
                    break;
2442
                case 0x88: /* V9 fxtod */
2443
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2444
                    gen_clear_float_exceptions();
2445
                    gen_helper_fxtod();
2446
                    gen_helper_check_ieee_exceptions();
2447
                    gen_op_store_DT0_fpr(DFPREG(rd));
2448
                    break;
2449
                case 0x8c: /* V9 fxtoq */
2450
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2451
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2452
                    gen_clear_float_exceptions();
2453
                    gen_helper_fxtoq();
2454
                    gen_helper_check_ieee_exceptions();
2455
                    gen_op_store_QT0_fpr(QFPREG(rd));
2456
                    break;
2457
#endif
2458
                default:
2459
                    goto illegal_insn;
2460
                }
2461
            } else if (xop == 0x35) {   /* FPU Operations */
2462
#ifdef TARGET_SPARC64
2463
                int cond;
2464
#endif
2465
                if (gen_trap_ifnofpu(dc, cpu_cond))
2466
                    goto jmp_insn;
2467
                gen_op_clear_ieee_excp_and_FTT();
2468
                rs1 = GET_FIELD(insn, 13, 17);
2469
                rs2 = GET_FIELD(insn, 27, 31);
2470
                xop = GET_FIELD(insn, 18, 26);
2471
                save_state(dc, cpu_cond);
2472
#ifdef TARGET_SPARC64
2473
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2474
                    int l1;
2475

    
2476
                    l1 = gen_new_label();
2477
                    cond = GET_FIELD_SP(insn, 14, 17);
2478
                    cpu_src1 = get_src1(insn, cpu_src1);
2479
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2480
                                       0, l1);
2481
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2482
                    gen_set_label(l1);
2483
                    break;
2484
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2485
                    int l1;
2486

    
2487
                    l1 = gen_new_label();
2488
                    cond = GET_FIELD_SP(insn, 14, 17);
2489
                    cpu_src1 = get_src1(insn, cpu_src1);
2490
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2491
                                       0, l1);
2492
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2493
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2494
                    gen_set_label(l1);
2495
                    break;
2496
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2497
                    int l1;
2498

    
2499
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2500
                    l1 = gen_new_label();
2501
                    cond = GET_FIELD_SP(insn, 14, 17);
2502
                    cpu_src1 = get_src1(insn, cpu_src1);
2503
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2504
                                       0, l1);
2505
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2506
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2507
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2508
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2509
                    gen_set_label(l1);
2510
                    break;
2511
                }
2512
#endif
2513
                switch (xop) {
2514
#ifdef TARGET_SPARC64
2515
#define FMOVSCC(fcc)                                                    \
2516
                    {                                                   \
2517
                        TCGv r_cond;                                    \
2518
                        int l1;                                         \
2519
                                                                        \
2520
                        l1 = gen_new_label();                           \
2521
                        r_cond = tcg_temp_new();                        \
2522
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2523
                        gen_fcond(r_cond, fcc, cond);                   \
2524
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2525
                                           0, l1);                      \
2526
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2527
                        gen_set_label(l1);                              \
2528
                        tcg_temp_free(r_cond);                          \
2529
                    }
2530
#define FMOVDCC(fcc)                                                    \
2531
                    {                                                   \
2532
                        TCGv r_cond;                                    \
2533
                        int l1;                                         \
2534
                                                                        \
2535
                        l1 = gen_new_label();                           \
2536
                        r_cond = tcg_temp_new();                        \
2537
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2538
                        gen_fcond(r_cond, fcc, cond);                   \
2539
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2540
                                           0, l1);                      \
2541
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2542
                                        cpu_fpr[DFPREG(rs2)]);          \
2543
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2544
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2545
                        gen_set_label(l1);                              \
2546
                        tcg_temp_free(r_cond);                          \
2547
                    }
2548
#define FMOVQCC(fcc)                                                    \
2549
                    {                                                   \
2550
                        TCGv r_cond;                                    \
2551
                        int l1;                                         \
2552
                                                                        \
2553
                        l1 = gen_new_label();                           \
2554
                        r_cond = tcg_temp_new();                        \
2555
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2556
                        gen_fcond(r_cond, fcc, cond);                   \
2557
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2558
                                           0, l1);                      \
2559
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2560
                                        cpu_fpr[QFPREG(rs2)]);          \
2561
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2562
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2563
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2564
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2565
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2566
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2567
                        gen_set_label(l1);                              \
2568
                        tcg_temp_free(r_cond);                          \
2569
                    }
2570
                    case 0x001: /* V9 fmovscc %fcc0 */
2571
                        FMOVSCC(0);
2572
                        break;
2573
                    case 0x002: /* V9 fmovdcc %fcc0 */
2574
                        FMOVDCC(0);
2575
                        break;
2576
                    case 0x003: /* V9 fmovqcc %fcc0 */
2577
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2578
                        FMOVQCC(0);
2579
                        break;
2580
                    case 0x041: /* V9 fmovscc %fcc1 */
2581
                        FMOVSCC(1);
2582
                        break;
2583
                    case 0x042: /* V9 fmovdcc %fcc1 */
2584
                        FMOVDCC(1);
2585
                        break;
2586
                    case 0x043: /* V9 fmovqcc %fcc1 */
2587
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2588
                        FMOVQCC(1);
2589
                        break;
2590
                    case 0x081: /* V9 fmovscc %fcc2 */
2591
                        FMOVSCC(2);
2592
                        break;
2593
                    case 0x082: /* V9 fmovdcc %fcc2 */
2594
                        FMOVDCC(2);
2595
                        break;
2596
                    case 0x083: /* V9 fmovqcc %fcc2 */
2597
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2598
                        FMOVQCC(2);
2599
                        break;
2600
                    case 0x0c1: /* V9 fmovscc %fcc3 */
2601
                        FMOVSCC(3);
2602
                        break;
2603
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
2604
                        FMOVDCC(3);
2605
                        break;
2606
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
2607
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2608
                        FMOVQCC(3);
2609
                        break;
2610
#undef FMOVSCC
2611
#undef FMOVDCC
2612
#undef FMOVQCC
2613
#define FMOVSCC(icc)                                                    \
2614
                    {                                                   \
2615
                        TCGv r_cond;                                    \
2616
                        int l1;                                         \
2617
                                                                        \
2618
                        l1 = gen_new_label();                           \
2619
                        r_cond = tcg_temp_new();                        \
2620
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2621
                        gen_cond(r_cond, icc, cond, dc);                \
2622
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2623
                                           0, l1);                      \
2624
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2625
                        gen_set_label(l1);                              \
2626
                        tcg_temp_free(r_cond);                          \
2627
                    }
2628
#define FMOVDCC(icc)                                                    \
2629
                    {                                                   \
2630
                        TCGv r_cond;                                    \
2631
                        int l1;                                         \
2632
                                                                        \
2633
                        l1 = gen_new_label();                           \
2634
                        r_cond = tcg_temp_new();                        \
2635
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2636
                        gen_cond(r_cond, icc, cond, dc);                \
2637
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2638
                                           0, l1);                      \
2639
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2640
                                        cpu_fpr[DFPREG(rs2)]);          \
2641
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2642
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2643
                        gen_set_label(l1);                              \
2644
                        tcg_temp_free(r_cond);                          \
2645
                    }
2646
#define FMOVQCC(icc)                                                    \
2647
                    {                                                   \
2648
                        TCGv r_cond;                                    \
2649
                        int l1;                                         \
2650
                                                                        \
2651
                        l1 = gen_new_label();                           \
2652
                        r_cond = tcg_temp_new();                        \
2653
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2654
                        gen_cond(r_cond, icc, cond, dc);                \
2655
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2656
                                           0, l1);                      \
2657
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2658
                                        cpu_fpr[QFPREG(rs2)]);          \
2659
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2660
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2661
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2662
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2663
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2664
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2665
                        gen_set_label(l1);                              \
2666
                        tcg_temp_free(r_cond);                          \
2667
                    }
2668

    
2669
                    case 0x101: /* V9 fmovscc %icc */
2670
                        FMOVSCC(0);
2671
                        break;
2672
                    case 0x102: /* V9 fmovdcc %icc */
2673
                        FMOVDCC(0);
2674
                    case 0x103: /* V9 fmovqcc %icc */
2675
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2676
                        FMOVQCC(0);
2677
                        break;
2678
                    case 0x181: /* V9 fmovscc %xcc */
2679
                        FMOVSCC(1);
2680
                        break;
2681
                    case 0x182: /* V9 fmovdcc %xcc */
2682
                        FMOVDCC(1);
2683
                        break;
2684
                    case 0x183: /* V9 fmovqcc %xcc */
2685
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2686
                        FMOVQCC(1);
2687
                        break;
2688
#undef FMOVSCC
2689
#undef FMOVDCC
2690
#undef FMOVQCC
2691
#endif
2692
                    case 0x51: /* fcmps, V9 %fcc */
2693
                        gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2694
                        break;
2695
                    case 0x52: /* fcmpd, V9 %fcc */
2696
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2697
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2698
                        gen_op_fcmpd(rd & 3);
2699
                        break;
2700
                    case 0x53: /* fcmpq, V9 %fcc */
2701
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2702
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2703
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2704
                        gen_op_fcmpq(rd & 3);
2705
                        break;
2706
                    case 0x55: /* fcmpes, V9 %fcc */
2707
                        gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2708
                        break;
2709
                    case 0x56: /* fcmped, V9 %fcc */
2710
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2711
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2712
                        gen_op_fcmped(rd & 3);
2713
                        break;
2714
                    case 0x57: /* fcmpeq, V9 %fcc */
2715
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2716
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2717
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2718
                        gen_op_fcmpeq(rd & 3);
2719
                        break;
2720
                    default:
2721
                        goto illegal_insn;
2722
                }
2723
            } else if (xop == 0x2) {
2724
                // clr/mov shortcut
2725

    
2726
                rs1 = GET_FIELD(insn, 13, 17);
2727
                if (rs1 == 0) {
2728
                    // or %g0, x, y -> mov T0, x; mov y, T0
2729
                    if (IS_IMM) {       /* immediate */
2730
                        TCGv r_const;
2731

    
2732
                        simm = GET_FIELDs(insn, 19, 31);
2733
                        r_const = tcg_const_tl(simm);
2734
                        gen_movl_TN_reg(rd, r_const);
2735
                        tcg_temp_free(r_const);
2736
                    } else {            /* register */
2737
                        rs2 = GET_FIELD(insn, 27, 31);
2738
                        gen_movl_reg_TN(rs2, cpu_dst);
2739
                        gen_movl_TN_reg(rd, cpu_dst);
2740
                    }
2741
                } else {
2742
                    cpu_src1 = get_src1(insn, cpu_src1);
2743
                    if (IS_IMM) {       /* immediate */
2744
                        simm = GET_FIELDs(insn, 19, 31);
2745
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2746
                        gen_movl_TN_reg(rd, cpu_dst);
2747
                    } else {            /* register */
2748
                        // or x, %g0, y -> mov T1, x; mov y, T1
2749
                        rs2 = GET_FIELD(insn, 27, 31);
2750
                        if (rs2 != 0) {
2751
                            gen_movl_reg_TN(rs2, cpu_src2);
2752
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2753
                            gen_movl_TN_reg(rd, cpu_dst);
2754
                        } else
2755
                            gen_movl_TN_reg(rd, cpu_src1);
2756
                    }
2757
                }
2758
#ifdef TARGET_SPARC64
2759
            } else if (xop == 0x25) { /* sll, V9 sllx */
2760
                cpu_src1 = get_src1(insn, cpu_src1);
2761
                if (IS_IMM) {   /* immediate */
2762
                    simm = GET_FIELDs(insn, 20, 31);
2763
                    if (insn & (1 << 12)) {
2764
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2765
                    } else {
2766
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2767
                    }
2768
                } else {                /* register */
2769
                    rs2 = GET_FIELD(insn, 27, 31);
2770
                    gen_movl_reg_TN(rs2, cpu_src2);
2771
                    if (insn & (1 << 12)) {
2772
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2773
                    } else {
2774
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2775
                    }
2776
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2777
                }
2778
                gen_movl_TN_reg(rd, cpu_dst);
2779
            } else if (xop == 0x26) { /* srl, V9 srlx */
2780
                cpu_src1 = get_src1(insn, cpu_src1);
2781
                if (IS_IMM) {   /* immediate */
2782
                    simm = GET_FIELDs(insn, 20, 31);
2783
                    if (insn & (1 << 12)) {
2784
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2785
                    } else {
2786
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2787
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2788
                    }
2789
                } else {                /* register */
2790
                    rs2 = GET_FIELD(insn, 27, 31);
2791
                    gen_movl_reg_TN(rs2, cpu_src2);
2792
                    if (insn & (1 << 12)) {
2793
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2794
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2795
                    } else {
2796
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2797
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2798
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2799
                    }
2800
                }
2801
                gen_movl_TN_reg(rd, cpu_dst);
2802
            } else if (xop == 0x27) { /* sra, V9 srax */
2803
                cpu_src1 = get_src1(insn, cpu_src1);
2804
                if (IS_IMM) {   /* immediate */
2805
                    simm = GET_FIELDs(insn, 20, 31);
2806
                    if (insn & (1 << 12)) {
2807
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2808
                    } else {
2809
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2810
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2811
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2812
                    }
2813
                } else {                /* register */
2814
                    rs2 = GET_FIELD(insn, 27, 31);
2815
                    gen_movl_reg_TN(rs2, cpu_src2);
2816
                    if (insn & (1 << 12)) {
2817
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2818
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2819
                    } else {
2820
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2821
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2822
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2823
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2824
                    }
2825
                }
2826
                gen_movl_TN_reg(rd, cpu_dst);
2827
#endif
2828
            } else if (xop < 0x36) {
2829
                if (xop < 0x20) {
2830
                    cpu_src1 = get_src1(insn, cpu_src1);
2831
                    cpu_src2 = get_src2(insn, cpu_src2);
2832
                    switch (xop & ~0x10) {
2833
                    case 0x0: /* add */
2834
                        if (IS_IMM) {
2835
                            simm = GET_FIELDs(insn, 19, 31);
2836
                            if (xop & 0x10) {
2837
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2838
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2839
                                dc->cc_op = CC_OP_ADD;
2840
                            } else {
2841
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2842
                            }
2843
                        } else {
2844
                            if (xop & 0x10) {
2845
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2846
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2847
                                dc->cc_op = CC_OP_ADD;
2848
                            } else {
2849
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2850
                            }
2851
                        }
2852
                        break;
2853
                    case 0x1: /* and */
2854
                        if (IS_IMM) {
2855
                            simm = GET_FIELDs(insn, 19, 31);
2856
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2857
                        } else {
2858
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2859
                        }
2860
                        if (xop & 0x10) {
2861
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2862
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2863
                            dc->cc_op = CC_OP_LOGIC;
2864
                        }
2865
                        break;
2866
                    case 0x2: /* or */
2867
                        if (IS_IMM) {
2868
                            simm = GET_FIELDs(insn, 19, 31);
2869
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2870
                        } else {
2871
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2872
                        }
2873
                        if (xop & 0x10) {
2874
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2875
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2876
                            dc->cc_op = CC_OP_LOGIC;
2877
                        }
2878
                        break;
2879
                    case 0x3: /* xor */
2880
                        if (IS_IMM) {
2881
                            simm = GET_FIELDs(insn, 19, 31);
2882
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2883
                        } else {
2884
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2885
                        }
2886
                        if (xop & 0x10) {
2887
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2888
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2889
                            dc->cc_op = CC_OP_LOGIC;
2890
                        }
2891
                        break;
2892
                    case 0x4: /* sub */
2893
                        if (IS_IMM) {
2894
                            simm = GET_FIELDs(insn, 19, 31);
2895
                            if (xop & 0x10) {
2896
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2897
                            } else {
2898
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2899
                            }
2900
                        } else {
2901
                            if (xop & 0x10) {
2902
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2903
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2904
                                dc->cc_op = CC_OP_SUB;
2905
                            } else {
2906
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2907
                            }
2908
                        }
2909
                        break;
2910
                    case 0x5: /* andn */
2911
                        if (IS_IMM) {
2912
                            simm = GET_FIELDs(insn, 19, 31);
2913
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
2914
                        } else {
2915
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2916
                        }
2917
                        if (xop & 0x10) {
2918
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2919
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2920
                            dc->cc_op = CC_OP_LOGIC;
2921
                        }
2922
                        break;
2923
                    case 0x6: /* orn */
2924
                        if (IS_IMM) {
2925
                            simm = GET_FIELDs(insn, 19, 31);
2926
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
2927
                        } else {
2928
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2929
                        }
2930
                        if (xop & 0x10) {
2931
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2932
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2933
                            dc->cc_op = CC_OP_LOGIC;
2934
                        }
2935
                        break;
2936
                    case 0x7: /* xorn */
2937
                        if (IS_IMM) {
2938
                            simm = GET_FIELDs(insn, 19, 31);
2939
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
2940
                        } else {
2941
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
2942
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2943
                        }
2944
                        if (xop & 0x10) {
2945
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2946
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2947
                            dc->cc_op = CC_OP_LOGIC;
2948
                        }
2949
                        break;
2950
                    case 0x8: /* addx, V9 addc */
2951
                        if (IS_IMM) {
2952
                            simm = GET_FIELDs(insn, 19, 31);
2953
                            if (xop & 0x10) {
2954
                                gen_helper_compute_psr();
2955
                                gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
2956
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2957
                                dc->cc_op = CC_OP_ADDX;
2958
                            } else {
2959
                                gen_helper_compute_psr();
2960
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
2961
                                tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2962
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2963
                            }
2964
                        } else {
2965
                            if (xop & 0x10) {
2966
                                gen_helper_compute_psr();
2967
                                gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2968
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2969
                                dc->cc_op = CC_OP_ADDX;
2970
                            } else {
2971
                                gen_helper_compute_psr();
2972
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
2973
                                tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2974
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2975
                            }
2976
                        }
2977
                        break;
2978
#ifdef TARGET_SPARC64
2979
                    case 0x9: /* V9 mulx */
2980
                        if (IS_IMM) {
2981
                            simm = GET_FIELDs(insn, 19, 31);
2982
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
2983
                        } else {
2984
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2985
                        }
2986
                        break;
2987
#endif
2988
                    case 0xa: /* umul */
2989
                        CHECK_IU_FEATURE(dc, MUL);
2990
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2991
                        if (xop & 0x10) {
2992
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2993
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2994
                            dc->cc_op = CC_OP_LOGIC;
2995
                        }
2996
                        break;
2997
                    case 0xb: /* smul */
2998
                        CHECK_IU_FEATURE(dc, MUL);
2999
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3000
                        if (xop & 0x10) {
3001
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3002
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3003
                            dc->cc_op = CC_OP_LOGIC;
3004
                        }
3005
                        break;
3006
                    case 0xc: /* subx, V9 subc */
3007
                        if (IS_IMM) {
3008
                            simm = GET_FIELDs(insn, 19, 31);
3009
                            if (xop & 0x10) {
3010
                                gen_helper_compute_psr();
3011
                                gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3012
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3013
                                dc->cc_op = CC_OP_SUBX;
3014
                            } else {
3015
                                gen_helper_compute_psr();
3016
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3017
                                tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3018
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3019
                            }
3020
                        } else {
3021
                            if (xop & 0x10) {
3022
                                gen_helper_compute_psr();
3023
                                gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3024
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3025
                                dc->cc_op = CC_OP_SUBX;
3026
                            } else {
3027
                                gen_helper_compute_psr();
3028
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3029
                                tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3030
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3031
                            }
3032
                        }
3033
                        break;
3034
#ifdef TARGET_SPARC64
3035
                    case 0xd: /* V9 udivx */
3036
                        tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3037
                        tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3038
                        gen_trap_ifdivzero_tl(cpu_cc_src2);
3039
                        tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3040
                        break;
3041
#endif
3042
                    case 0xe: /* udiv */
3043
                        CHECK_IU_FEATURE(dc, DIV);
3044
                        gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3045
                        if (xop & 0x10) {
3046
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3047
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3048
                            dc->cc_op = CC_OP_DIV;
3049
                        }
3050
                        break;
3051
                    case 0xf: /* sdiv */
3052
                        CHECK_IU_FEATURE(dc, DIV);
3053
                        gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3054
                        if (xop & 0x10) {
3055
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3056
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3057
                            dc->cc_op = CC_OP_DIV;
3058
                        }
3059
                        break;
3060
                    default:
3061
                        goto illegal_insn;
3062
                    }
3063
                    gen_movl_TN_reg(rd, cpu_dst);
3064
                } else {
3065
                    cpu_src1 = get_src1(insn, cpu_src1);
3066
                    cpu_src2 = get_src2(insn, cpu_src2);
3067
                    switch (xop) {
3068
                    case 0x20: /* taddcc */
3069
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3070
                        gen_movl_TN_reg(rd, cpu_dst);
3071
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3072
                        dc->cc_op = CC_OP_TADD;
3073
                        break;
3074
                    case 0x21: /* tsubcc */
3075
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3076
                        gen_movl_TN_reg(rd, cpu_dst);
3077
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3078
                        dc->cc_op = CC_OP_TSUB;
3079
                        break;
3080
                    case 0x22: /* taddcctv */
3081
                        save_state(dc, cpu_cond);
3082
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3083
                        gen_movl_TN_reg(rd, cpu_dst);
3084
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3085
                        dc->cc_op = CC_OP_TADDTV;
3086
                        break;
3087
                    case 0x23: /* tsubcctv */
3088
                        save_state(dc, cpu_cond);
3089
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3090
                        gen_movl_TN_reg(rd, cpu_dst);
3091
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3092
                        dc->cc_op = CC_OP_TSUBTV;
3093
                        break;
3094
                    case 0x24: /* mulscc */
3095
                        gen_helper_compute_psr();
3096
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3097
                        gen_movl_TN_reg(rd, cpu_dst);
3098
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3099
                        dc->cc_op = CC_OP_ADD;
3100
                        break;
3101
#ifndef TARGET_SPARC64
3102
                    case 0x25:  /* sll */
3103
                        if (IS_IMM) { /* immediate */
3104
                            simm = GET_FIELDs(insn, 20, 31);
3105
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3106
                        } else { /* register */
3107
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3108
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3109
                        }
3110
                        gen_movl_TN_reg(rd, cpu_dst);
3111
                        break;
3112
                    case 0x26:  /* srl */
3113
                        if (IS_IMM) { /* immediate */
3114
                            simm = GET_FIELDs(insn, 20, 31);
3115
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3116
                        } else { /* register */
3117
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3118
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3119
                        }
3120
                        gen_movl_TN_reg(rd, cpu_dst);
3121
                        break;
3122
                    case 0x27:  /* sra */
3123
                        if (IS_IMM) { /* immediate */
3124
                            simm = GET_FIELDs(insn, 20, 31);
3125
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3126
                        } else { /* register */
3127
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3128
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3129
                        }
3130
                        gen_movl_TN_reg(rd, cpu_dst);
3131
                        break;
3132
#endif
3133
                    case 0x30:
3134
                        {
3135
                            switch(rd) {
3136
                            case 0: /* wry */
3137
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3138
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3139
                                break;
3140
#ifndef TARGET_SPARC64
3141
                            case 0x01 ... 0x0f: /* undefined in the
3142
                                                   SPARCv8 manual, nop
3143
                                                   on the microSPARC
3144
                                                   II */
3145
                            case 0x10 ... 0x1f: /* implementation-dependent
3146
                                                   in the SPARCv8
3147
                                                   manual, nop on the
3148
                                                   microSPARC II */
3149
                                break;
3150
#else
3151
                            case 0x2: /* V9 wrccr */
3152
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3153
                                gen_helper_wrccr(cpu_dst);
3154
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3155
                                dc->cc_op = CC_OP_FLAGS;
3156
                                break;
3157
                            case 0x3: /* V9 wrasi */
3158
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3159
                                tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3160
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3161
                                break;
3162
                            case 0x6: /* V9 wrfprs */
3163
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3164
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3165
                                save_state(dc, cpu_cond);
3166
                                gen_op_next_insn();
3167
                                tcg_gen_exit_tb(0);
3168
                                dc->is_br = 1;
3169
                                break;
3170
                            case 0xf: /* V9 sir, nop if user */
3171
#if !defined(CONFIG_USER_ONLY)
3172
                                if (supervisor(dc)) {
3173
                                    ; // XXX
3174
                                }
3175
#endif
3176
                                break;
3177
                            case 0x13: /* Graphics Status */
3178
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3179
                                    goto jmp_insn;
3180
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3181
                                break;
3182
                            case 0x14: /* Softint set */
3183
                                if (!supervisor(dc))
3184
                                    goto illegal_insn;
3185
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3186
                                gen_helper_set_softint(cpu_tmp64);
3187
                                break;
3188
                            case 0x15: /* Softint clear */
3189
                                if (!supervisor(dc))
3190
                                    goto illegal_insn;
3191
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3192
                                gen_helper_clear_softint(cpu_tmp64);
3193
                                break;
3194
                            case 0x16: /* Softint write */
3195
                                if (!supervisor(dc))
3196
                                    goto illegal_insn;
3197
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3198
                                gen_helper_write_softint(cpu_tmp64);
3199
                                break;
3200
                            case 0x17: /* Tick compare */
3201
#if !defined(CONFIG_USER_ONLY)
3202
                                if (!supervisor(dc))
3203
                                    goto illegal_insn;
3204
#endif
3205
                                {
3206
                                    TCGv_ptr r_tickptr;
3207

    
3208
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3209
                                                   cpu_src2);
3210
                                    r_tickptr = tcg_temp_new_ptr();
3211
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3212
                                                   offsetof(CPUState, tick));
3213
                                    gen_helper_tick_set_limit(r_tickptr,
3214
                                                              cpu_tick_cmpr);
3215
                                    tcg_temp_free_ptr(r_tickptr);
3216
                                }
3217
                                break;
3218
                            case 0x18: /* System tick */
3219
#if !defined(CONFIG_USER_ONLY)
3220
                                if (!supervisor(dc))
3221
                                    goto illegal_insn;
3222
#endif
3223
                                {
3224
                                    TCGv_ptr r_tickptr;
3225

    
3226
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3227
                                                   cpu_src2);
3228
                                    r_tickptr = tcg_temp_new_ptr();
3229
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3230
                                                   offsetof(CPUState, stick));
3231
                                    gen_helper_tick_set_count(r_tickptr,
3232
                                                              cpu_dst);
3233
                                    tcg_temp_free_ptr(r_tickptr);
3234
                                }
3235
                                break;
3236
                            case 0x19: /* System tick compare */
3237
#if !defined(CONFIG_USER_ONLY)
3238
                                if (!supervisor(dc))
3239
                                    goto illegal_insn;
3240
#endif
3241
                                {
3242
                                    TCGv_ptr r_tickptr;
3243

    
3244
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3245
                                                   cpu_src2);
3246
                                    r_tickptr = tcg_temp_new_ptr();
3247
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3248
                                                   offsetof(CPUState, stick));
3249
                                    gen_helper_tick_set_limit(r_tickptr,
3250
                                                              cpu_stick_cmpr);
3251
                                    tcg_temp_free_ptr(r_tickptr);
3252
                                }
3253
                                break;
3254

    
3255
                            case 0x10: /* Performance Control */
3256
                            case 0x11: /* Performance Instrumentation
3257
                                          Counter */
3258
                            case 0x12: /* Dispatch Control */
3259
#endif
3260
                            default:
3261
                                goto illegal_insn;
3262
                            }
3263
                        }
3264
                        break;
3265
#if !defined(CONFIG_USER_ONLY)
3266
                    case 0x31: /* wrpsr, V9 saved, restored */
3267
                        {
3268
                            if (!supervisor(dc))
3269
                                goto priv_insn;
3270
#ifdef TARGET_SPARC64
3271
                            switch (rd) {
3272
                            case 0:
3273
                                gen_helper_saved();
3274
                                break;
3275
                            case 1:
3276
                                gen_helper_restored();
3277
                                break;
3278
                            case 2: /* UA2005 allclean */
3279
                            case 3: /* UA2005 otherw */
3280
                            case 4: /* UA2005 normalw */
3281
                            case 5: /* UA2005 invalw */
3282
                                // XXX
3283
                            default:
3284
                                goto illegal_insn;
3285
                            }
3286
#else
3287
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3288
                            gen_helper_wrpsr(cpu_dst);
3289
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3290
                            dc->cc_op = CC_OP_FLAGS;
3291
                            save_state(dc, cpu_cond);
3292
                            gen_op_next_insn();
3293
                            tcg_gen_exit_tb(0);
3294
                            dc->is_br = 1;
3295
#endif
3296
                        }
3297
                        break;
3298
                    case 0x32: /* wrwim, V9 wrpr */
3299
                        {
3300
                            if (!supervisor(dc))
3301
                                goto priv_insn;
3302
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3303
#ifdef TARGET_SPARC64
3304
                            switch (rd) {
3305
                            case 0: // tpc
3306
                                {
3307
                                    TCGv_ptr r_tsptr;
3308

    
3309
                                    r_tsptr = tcg_temp_new_ptr();
3310
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3311
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3312
                                                  offsetof(trap_state, tpc));
3313
                                    tcg_temp_free_ptr(r_tsptr);
3314
                                }
3315
                                break;
3316
                            case 1: // tnpc
3317
                                {
3318
                                    TCGv_ptr r_tsptr;
3319

    
3320
                                    r_tsptr = tcg_temp_new_ptr();
3321
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3322
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3323
                                                  offsetof(trap_state, tnpc));
3324
                                    tcg_temp_free_ptr(r_tsptr);
3325
                                }
3326
                                break;
3327
                            case 2: // tstate
3328
                                {
3329
                                    TCGv_ptr r_tsptr;
3330

    
3331
                                    r_tsptr = tcg_temp_new_ptr();
3332
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3333
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3334
                                                  offsetof(trap_state,
3335
                                                           tstate));
3336
                                    tcg_temp_free_ptr(r_tsptr);
3337
                                }
3338
                                break;
3339
                            case 3: // tt
3340
                                {
3341
                                    TCGv_ptr r_tsptr;
3342

    
3343
                                    r_tsptr = tcg_temp_new_ptr();
3344
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3345
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3346
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3347
                                                   offsetof(trap_state, tt));
3348
                                    tcg_temp_free_ptr(r_tsptr);
3349
                                }
3350
                                break;
3351
                            case 4: // tick
3352
                                {
3353
                                    TCGv_ptr r_tickptr;
3354

    
3355
                                    r_tickptr = tcg_temp_new_ptr();
3356
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3357
                                                   offsetof(CPUState, tick));
3358
                                    gen_helper_tick_set_count(r_tickptr,
3359
                                                              cpu_tmp0);
3360
                                    tcg_temp_free_ptr(r_tickptr);
3361
                                }
3362
                                break;
3363
                            case 5: // tba
3364
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3365
                                break;
3366
                            case 6: // pstate
3367
                                save_state(dc, cpu_cond);
3368
                                gen_helper_wrpstate(cpu_tmp0);
3369
                                gen_op_next_insn();
3370
                                tcg_gen_exit_tb(0);
3371
                                dc->is_br = 1;
3372
                                break;
3373
                            case 7: // tl
3374
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3375
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3376
                                               offsetof(CPUSPARCState, tl));
3377
                                break;
3378
                            case 8: // pil
3379
                                gen_helper_wrpil(cpu_tmp0);
3380
                                break;
3381
                            case 9: // cwp
3382
                                gen_helper_wrcwp(cpu_tmp0);
3383
                                break;
3384
                            case 10: // cansave
3385
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3386
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3387
                                               offsetof(CPUSPARCState,
3388
                                                        cansave));
3389
                                break;
3390
                            case 11: // canrestore
3391
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3392
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3393
                                               offsetof(CPUSPARCState,
3394
                                                        canrestore));
3395
                                break;
3396
                            case 12: // cleanwin
3397
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3398
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3399
                                               offsetof(CPUSPARCState,
3400
                                                        cleanwin));
3401
                                break;
3402
                            case 13: // otherwin
3403
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3404
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3405
                                               offsetof(CPUSPARCState,
3406
                                                        otherwin));
3407
                                break;
3408
                            case 14: // wstate
3409
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3410
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3411
                                               offsetof(CPUSPARCState,
3412
                                                        wstate));
3413
                                break;
3414
                            case 16: // UA2005 gl
3415
                                CHECK_IU_FEATURE(dc, GL);
3416
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3417
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3418
                                               offsetof(CPUSPARCState, gl));
3419
                                break;
3420
                            case 26: // UA2005 strand status
3421
                                CHECK_IU_FEATURE(dc, HYPV);
3422
                                if (!hypervisor(dc))
3423
                                    goto priv_insn;
3424
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3425
                                break;
3426
                            default:
3427
                                goto illegal_insn;
3428
                            }
3429
#else
3430
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3431
                            if (dc->def->nwindows != 32)
3432
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3433
                                                (1 << dc->def->nwindows) - 1);
3434
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3435
#endif
3436
                        }
3437
                        break;
3438
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3439
                        {
3440
#ifndef TARGET_SPARC64
3441
                            if (!supervisor(dc))
3442
                                goto priv_insn;
3443
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3444
#else
3445
                            CHECK_IU_FEATURE(dc, HYPV);
3446
                            if (!hypervisor(dc))
3447
                                goto priv_insn;
3448
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3449
                            switch (rd) {
3450
                            case 0: // hpstate
3451
                                // XXX gen_op_wrhpstate();
3452
                                save_state(dc, cpu_cond);
3453
                                gen_op_next_insn();
3454
                                tcg_gen_exit_tb(0);
3455
                                dc->is_br = 1;
3456
                                break;
3457
                            case 1: // htstate
3458
                                // XXX gen_op_wrhtstate();
3459
                                break;
3460
                            case 3: // hintp
3461
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3462
                                break;
3463
                            case 5: // htba
3464
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3465
                                break;
3466
                            case 31: // hstick_cmpr
3467
                                {
3468
                                    TCGv_ptr r_tickptr;
3469

    
3470
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3471
                                    r_tickptr = tcg_temp_new_ptr();
3472
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3473
                                                   offsetof(CPUState, hstick));
3474
                                    gen_helper_tick_set_limit(r_tickptr,
3475
                                                              cpu_hstick_cmpr);
3476
                                    tcg_temp_free_ptr(r_tickptr);
3477
                                }
3478
                                break;
3479
                            case 6: // hver readonly
3480
                            default:
3481
                                goto illegal_insn;
3482
                            }
3483
#endif
3484
                        }
3485
                        break;
3486
#endif
3487
#ifdef TARGET_SPARC64
3488
                    case 0x2c: /* V9 movcc */
3489
                        {
3490
                            int cc = GET_FIELD_SP(insn, 11, 12);
3491
                            int cond = GET_FIELD_SP(insn, 14, 17);
3492
                            TCGv r_cond;
3493
                            int l1;
3494

    
3495
                            r_cond = tcg_temp_new();
3496
                            if (insn & (1 << 18)) {
3497
                                if (cc == 0)
3498
                                    gen_cond(r_cond, 0, cond, dc);
3499
                                else if (cc == 2)
3500
                                    gen_cond(r_cond, 1, cond, dc);
3501
                                else
3502
                                    goto illegal_insn;
3503
                            } else {
3504
                                gen_fcond(r_cond, cc, cond);
3505
                            }
3506

    
3507
                            l1 = gen_new_label();
3508

    
3509
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3510
                            if (IS_IMM) {       /* immediate */
3511
                                TCGv r_const;
3512

    
3513
                                simm = GET_FIELD_SPs(insn, 0, 10);
3514
                                r_const = tcg_const_tl(simm);
3515
                                gen_movl_TN_reg(rd, r_const);
3516
                                tcg_temp_free(r_const);
3517
                            } else {
3518
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3519
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3520
                                gen_movl_TN_reg(rd, cpu_tmp0);
3521
                            }
3522
                            gen_set_label(l1);
3523
                            tcg_temp_free(r_cond);
3524
                            break;
3525
                        }
3526
                    case 0x2d: /* V9 sdivx */
3527
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3528
                        gen_movl_TN_reg(rd, cpu_dst);
3529
                        break;
3530
                    case 0x2e: /* V9 popc */
3531
                        {
3532
                            cpu_src2 = get_src2(insn, cpu_src2);
3533
                            gen_helper_popc(cpu_dst, cpu_src2);
3534
                            gen_movl_TN_reg(rd, cpu_dst);
3535
                        }
3536
                    case 0x2f: /* V9 movr */
3537
                        {
3538
                            int cond = GET_FIELD_SP(insn, 10, 12);
3539
                            int l1;
3540

    
3541
                            cpu_src1 = get_src1(insn, cpu_src1);
3542

    
3543
                            l1 = gen_new_label();
3544

    
3545
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3546
                                              cpu_src1, 0, l1);
3547
                            if (IS_IMM) {       /* immediate */
3548
                                TCGv r_const;
3549

    
3550
                                simm = GET_FIELD_SPs(insn, 0, 9);
3551
                                r_const = tcg_const_tl(simm);
3552
                                gen_movl_TN_reg(rd, r_const);
3553
                                tcg_temp_free(r_const);
3554
                            } else {
3555
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3556
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3557
                                gen_movl_TN_reg(rd, cpu_tmp0);
3558
                            }
3559
                            gen_set_label(l1);
3560
                            break;
3561
                        }
3562
#endif
3563
                    default:
3564
                        goto illegal_insn;
3565
                    }
3566
                }
3567
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3568
#ifdef TARGET_SPARC64
3569
                int opf = GET_FIELD_SP(insn, 5, 13);
3570
                rs1 = GET_FIELD(insn, 13, 17);
3571
                rs2 = GET_FIELD(insn, 27, 31);
3572
                if (gen_trap_ifnofpu(dc, cpu_cond))
3573
                    goto jmp_insn;
3574

    
3575
                switch (opf) {
3576
                case 0x000: /* VIS I edge8cc */
3577
                case 0x001: /* VIS II edge8n */
3578
                case 0x002: /* VIS I edge8lcc */
3579
                case 0x003: /* VIS II edge8ln */
3580
                case 0x004: /* VIS I edge16cc */
3581
                case 0x005: /* VIS II edge16n */
3582
                case 0x006: /* VIS I edge16lcc */
3583
                case 0x007: /* VIS II edge16ln */
3584
                case 0x008: /* VIS I edge32cc */
3585
                case 0x009: /* VIS II edge32n */
3586
                case 0x00a: /* VIS I edge32lcc */
3587
                case 0x00b: /* VIS II edge32ln */
3588
                    // XXX
3589
                    goto illegal_insn;
3590
                case 0x010: /* VIS I array8 */
3591
                    CHECK_FPU_FEATURE(dc, VIS1);
3592
                    cpu_src1 = get_src1(insn, cpu_src1);
3593
                    gen_movl_reg_TN(rs2, cpu_src2);
3594
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3595
                    gen_movl_TN_reg(rd, cpu_dst);
3596
                    break;
3597
                case 0x012: /* VIS I array16 */
3598
                    CHECK_FPU_FEATURE(dc, VIS1);
3599
                    cpu_src1 = get_src1(insn, cpu_src1);
3600
                    gen_movl_reg_TN(rs2, cpu_src2);
3601
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3602
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3603
                    gen_movl_TN_reg(rd, cpu_dst);
3604
                    break;
3605
                case 0x014: /* VIS I array32 */
3606
                    CHECK_FPU_FEATURE(dc, VIS1);
3607
                    cpu_src1 = get_src1(insn, cpu_src1);
3608
                    gen_movl_reg_TN(rs2, cpu_src2);
3609
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3610
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3611
                    gen_movl_TN_reg(rd, cpu_dst);
3612
                    break;
3613
                case 0x018: /* VIS I alignaddr */
3614
                    CHECK_FPU_FEATURE(dc, VIS1);
3615
                    cpu_src1 = get_src1(insn, cpu_src1);
3616
                    gen_movl_reg_TN(rs2, cpu_src2);
3617
                    gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3618
                    gen_movl_TN_reg(rd, cpu_dst);
3619
                    break;
3620
                case 0x019: /* VIS II bmask */
3621
                case 0x01a: /* VIS I alignaddrl */
3622
                    // XXX
3623
                    goto illegal_insn;
3624
                case 0x020: /* VIS I fcmple16 */
3625
                    CHECK_FPU_FEATURE(dc, VIS1);
3626
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3627
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3628
                    gen_helper_fcmple16();
3629
                    gen_op_store_DT0_fpr(DFPREG(rd));
3630
                    break;
3631
                case 0x022: /* VIS I fcmpne16 */
3632
                    CHECK_FPU_FEATURE(dc, VIS1);
3633
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3634
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3635
                    gen_helper_fcmpne16();
3636
                    gen_op_store_DT0_fpr(DFPREG(rd));
3637
                    break;
3638
                case 0x024: /* VIS I fcmple32 */
3639
                    CHECK_FPU_FEATURE(dc, VIS1);
3640
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3641
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3642
                    gen_helper_fcmple32();
3643
                    gen_op_store_DT0_fpr(DFPREG(rd));
3644
                    break;
3645
                case 0x026: /* VIS I fcmpne32 */
3646
                    CHECK_FPU_FEATURE(dc, VIS1);
3647
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3648
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3649
                    gen_helper_fcmpne32();
3650
                    gen_op_store_DT0_fpr(DFPREG(rd));
3651
                    break;
3652
                case 0x028: /* VIS I fcmpgt16 */
3653
                    CHECK_FPU_FEATURE(dc, VIS1);
3654
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3655
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3656
                    gen_helper_fcmpgt16();
3657
                    gen_op_store_DT0_fpr(DFPREG(rd));
3658
                    break;
3659
                case 0x02a: /* VIS I fcmpeq16 */
3660
                    CHECK_FPU_FEATURE(dc, VIS1);
3661
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3662
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3663
                    gen_helper_fcmpeq16();
3664
                    gen_op_store_DT0_fpr(DFPREG(rd));
3665
                    break;
3666
                case 0x02c: /* VIS I fcmpgt32 */
3667
                    CHECK_FPU_FEATURE(dc, VIS1);
3668
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3669
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3670
                    gen_helper_fcmpgt32();
3671
                    gen_op_store_DT0_fpr(DFPREG(rd));
3672
                    break;
3673
                case 0x02e: /* VIS I fcmpeq32 */
3674
                    CHECK_FPU_FEATURE(dc, VIS1);
3675
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3676
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3677
                    gen_helper_fcmpeq32();
3678
                    gen_op_store_DT0_fpr(DFPREG(rd));
3679
                    break;
3680
                case 0x031: /* VIS I fmul8x16 */
3681
                    CHECK_FPU_FEATURE(dc, VIS1);
3682
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3683
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3684
                    gen_helper_fmul8x16();
3685
                    gen_op_store_DT0_fpr(DFPREG(rd));
3686
                    break;
3687
                case 0x033: /* VIS I fmul8x16au */
3688
                    CHECK_FPU_FEATURE(dc, VIS1);
3689
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3690
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3691
                    gen_helper_fmul8x16au();
3692
                    gen_op_store_DT0_fpr(DFPREG(rd));
3693
                    break;
3694
                case 0x035: /* VIS I fmul8x16al */
3695
                    CHECK_FPU_FEATURE(dc, VIS1);
3696
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3697
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3698
                    gen_helper_fmul8x16al();
3699
                    gen_op_store_DT0_fpr(DFPREG(rd));
3700
                    break;
3701
                case 0x036: /* VIS I fmul8sux16 */
3702
                    CHECK_FPU_FEATURE(dc, VIS1);
3703
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3704
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3705
                    gen_helper_fmul8sux16();
3706
                    gen_op_store_DT0_fpr(DFPREG(rd));
3707
                    break;
3708
                case 0x037: /* VIS I fmul8ulx16 */
3709
                    CHECK_FPU_FEATURE(dc, VIS1);
3710
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3711
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3712
                    gen_helper_fmul8ulx16();
3713
                    gen_op_store_DT0_fpr(DFPREG(rd));
3714
                    break;
3715
                case 0x038: /* VIS I fmuld8sux16 */
3716
                    CHECK_FPU_FEATURE(dc, VIS1);
3717
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3718
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3719
                    gen_helper_fmuld8sux16();
3720
                    gen_op_store_DT0_fpr(DFPREG(rd));
3721
                    break;
3722
                case 0x039: /* VIS I fmuld8ulx16 */
3723
                    CHECK_FPU_FEATURE(dc, VIS1);
3724
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3725
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3726
                    gen_helper_fmuld8ulx16();
3727
                    gen_op_store_DT0_fpr(DFPREG(rd));
3728
                    break;
3729
                case 0x03a: /* VIS I fpack32 */
3730
                case 0x03b: /* VIS I fpack16 */
3731
                case 0x03d: /* VIS I fpackfix */
3732
                case 0x03e: /* VIS I pdist */
3733
                    // XXX
3734
                    goto illegal_insn;
3735
                case 0x048: /* VIS I faligndata */
3736
                    CHECK_FPU_FEATURE(dc, VIS1);
3737
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3738
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3739
                    gen_helper_faligndata();
3740
                    gen_op_store_DT0_fpr(DFPREG(rd));
3741
                    break;
3742
                case 0x04b: /* VIS I fpmerge */
3743
                    CHECK_FPU_FEATURE(dc, VIS1);
3744
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3745
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3746
                    gen_helper_fpmerge();
3747
                    gen_op_store_DT0_fpr(DFPREG(rd));
3748
                    break;
3749
                case 0x04c: /* VIS II bshuffle */
3750
                    // XXX
3751
                    goto illegal_insn;
3752
                case 0x04d: /* VIS I fexpand */
3753
                    CHECK_FPU_FEATURE(dc, VIS1);
3754
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3755
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3756
                    gen_helper_fexpand();
3757
                    gen_op_store_DT0_fpr(DFPREG(rd));
3758
                    break;
3759
                case 0x050: /* VIS I fpadd16 */
3760
                    CHECK_FPU_FEATURE(dc, VIS1);
3761
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3762
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3763
                    gen_helper_fpadd16();
3764
                    gen_op_store_DT0_fpr(DFPREG(rd));
3765
                    break;
3766
                case 0x051: /* VIS I fpadd16s */
3767
                    CHECK_FPU_FEATURE(dc, VIS1);
3768
                    gen_helper_fpadd16s(cpu_fpr[rd],
3769
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3770
                    break;
3771
                case 0x052: /* VIS I fpadd32 */
3772
                    CHECK_FPU_FEATURE(dc, VIS1);
3773
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3774
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3775
                    gen_helper_fpadd32();
3776
                    gen_op_store_DT0_fpr(DFPREG(rd));
3777
                    break;
3778
                case 0x053: /* VIS I fpadd32s */
3779
                    CHECK_FPU_FEATURE(dc, VIS1);
3780
                    gen_helper_fpadd32s(cpu_fpr[rd],
3781
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3782
                    break;
3783
                case 0x054: /* VIS I fpsub16 */
3784
                    CHECK_FPU_FEATURE(dc, VIS1);
3785
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3786
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3787
                    gen_helper_fpsub16();
3788
                    gen_op_store_DT0_fpr(DFPREG(rd));
3789
                    break;
3790
                case 0x055: /* VIS I fpsub16s */
3791
                    CHECK_FPU_FEATURE(dc, VIS1);
3792
                    gen_helper_fpsub16s(cpu_fpr[rd],
3793
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3794
                    break;
3795
                case 0x056: /* VIS I fpsub32 */
3796
                    CHECK_FPU_FEATURE(dc, VIS1);
3797
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3798
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3799
                    gen_helper_fpsub32();
3800
                    gen_op_store_DT0_fpr(DFPREG(rd));
3801
                    break;
3802
                case 0x057: /* VIS I fpsub32s */
3803
                    CHECK_FPU_FEATURE(dc, VIS1);
3804
                    gen_helper_fpsub32s(cpu_fpr[rd],
3805
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3806
                    break;
3807
                case 0x060: /* VIS I fzero */
3808
                    CHECK_FPU_FEATURE(dc, VIS1);
3809
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3810
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3811
                    break;
3812
                case 0x061: /* VIS I fzeros */
3813
                    CHECK_FPU_FEATURE(dc, VIS1);
3814
                    tcg_gen_movi_i32(cpu_fpr[rd], 0);
3815
                    break;
3816
                case 0x062: /* VIS I fnor */
3817
                    CHECK_FPU_FEATURE(dc, VIS1);
3818
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3819
                                    cpu_fpr[DFPREG(rs2)]);
3820
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3821
                                    cpu_fpr[DFPREG(rs2) + 1]);
3822
                    break;
3823
                case 0x063: /* VIS I fnors */
3824
                    CHECK_FPU_FEATURE(dc, VIS1);
3825
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3826
                    break;
3827
                case 0x064: /* VIS I fandnot2 */
3828
                    CHECK_FPU_FEATURE(dc, VIS1);
3829
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3830
                                     cpu_fpr[DFPREG(rs2)]);
3831
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3832
                                     cpu_fpr[DFPREG(rs1) + 1],
3833
                                     cpu_fpr[DFPREG(rs2) + 1]);
3834
                    break;
3835
                case 0x065: /* VIS I fandnot2s */
3836
                    CHECK_FPU_FEATURE(dc, VIS1);
3837
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3838
                    break;
3839
                case 0x066: /* VIS I fnot2 */
3840
                    CHECK_FPU_FEATURE(dc, VIS1);
3841
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3842
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3843
                                    cpu_fpr[DFPREG(rs2) + 1]);
3844
                    break;
3845
                case 0x067: /* VIS I fnot2s */
3846
                    CHECK_FPU_FEATURE(dc, VIS1);
3847
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3848
                    break;
3849
                case 0x068: /* VIS I fandnot1 */
3850
                    CHECK_FPU_FEATURE(dc, VIS1);
3851
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3852
                                     cpu_fpr[DFPREG(rs1)]);
3853
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3854
                                     cpu_fpr[DFPREG(rs2) + 1],
3855
                                     cpu_fpr[DFPREG(rs1) + 1]);
3856
                    break;
3857
                case 0x069: /* VIS I fandnot1s */
3858
                    CHECK_FPU_FEATURE(dc, VIS1);
3859
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3860
                    break;
3861
                case 0x06a: /* VIS I fnot1 */
3862
                    CHECK_FPU_FEATURE(dc, VIS1);
3863
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3864
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3865
                                    cpu_fpr[DFPREG(rs1) + 1]);
3866
                    break;
3867
                case 0x06b: /* VIS I fnot1s */
3868
                    CHECK_FPU_FEATURE(dc, VIS1);
3869
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3870
                    break;
3871
                case 0x06c: /* VIS I fxor */
3872
                    CHECK_FPU_FEATURE(dc, VIS1);
3873
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3874
                                    cpu_fpr[DFPREG(rs2)]);
3875
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3876
                                    cpu_fpr[DFPREG(rs1) + 1],
3877
                                    cpu_fpr[DFPREG(rs2) + 1]);
3878
                    break;
3879
                case 0x06d: /* VIS I fxors */
3880
                    CHECK_FPU_FEATURE(dc, VIS1);
3881
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3882
                    break;
3883
                case 0x06e: /* VIS I fnand */
3884
                    CHECK_FPU_FEATURE(dc, VIS1);
3885
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3886
                                     cpu_fpr[DFPREG(rs2)]);
3887
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3888
                                     cpu_fpr[DFPREG(rs2) + 1]);
3889
                    break;
3890
                case 0x06f: /* VIS I fnands */
3891
                    CHECK_FPU_FEATURE(dc, VIS1);
3892
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3893
                    break;
3894
                case 0x070: /* VIS I fand */
3895
                    CHECK_FPU_FEATURE(dc, VIS1);
3896
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3897
                                    cpu_fpr[DFPREG(rs2)]);
3898
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3899
                                    cpu_fpr[DFPREG(rs1) + 1],
3900
                                    cpu_fpr[DFPREG(rs2) + 1]);
3901
                    break;
3902
                case 0x071: /* VIS I fands */
3903
                    CHECK_FPU_FEATURE(dc, VIS1);
3904
                    tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3905
                    break;
3906
                case 0x072: /* VIS I fxnor */
3907
                    CHECK_FPU_FEATURE(dc, VIS1);
3908
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3909
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3910
                                    cpu_fpr[DFPREG(rs1)]);
3911
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3912
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3913
                                    cpu_fpr[DFPREG(rs1) + 1]);
3914
                    break;
3915
                case 0x073: /* VIS I fxnors */
3916
                    CHECK_FPU_FEATURE(dc, VIS1);
3917
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3918
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3919
                    break;
3920
                case 0x074: /* VIS I fsrc1 */
3921
                    CHECK_FPU_FEATURE(dc, VIS1);
3922
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3923
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3924
                                    cpu_fpr[DFPREG(rs1) + 1]);
3925
                    break;
3926
                case 0x075: /* VIS I fsrc1s */
3927
                    CHECK_FPU_FEATURE(dc, VIS1);
3928
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3929
                    break;
3930
                case 0x076: /* VIS I fornot2 */
3931
                    CHECK_FPU_FEATURE(dc, VIS1);
3932
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3933
                                    cpu_fpr[DFPREG(rs2)]);
3934
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3935
                                    cpu_fpr[DFPREG(rs1) + 1],
3936
                                    cpu_fpr[DFPREG(rs2) + 1]);
3937
                    break;
3938
                case 0x077: /* VIS I fornot2s */
3939
                    CHECK_FPU_FEATURE(dc, VIS1);
3940
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3941
                    break;
3942
                case 0x078: /* VIS I fsrc2 */
3943
                    CHECK_FPU_FEATURE(dc, VIS1);
3944
                    gen_op_load_fpr_DT0(DFPREG(rs2));
3945
                    gen_op_store_DT0_fpr(DFPREG(rd));
3946
                    break;
3947
                case 0x079: /* VIS I fsrc2s */
3948
                    CHECK_FPU_FEATURE(dc, VIS1);
3949
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3950
                    break;
3951
                case 0x07a: /* VIS I fornot1 */
3952
                    CHECK_FPU_FEATURE(dc, VIS1);
3953
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3954
                                    cpu_fpr[DFPREG(rs1)]);
3955
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3956
                                    cpu_fpr[DFPREG(rs2) + 1],
3957
                                    cpu_fpr[DFPREG(rs1) + 1]);
3958
                    break;
3959
                case 0x07b: /* VIS I fornot1s */
3960
                    CHECK_FPU_FEATURE(dc, VIS1);
3961
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3962
                    break;
3963
                case 0x07c: /* VIS I for */
3964
                    CHECK_FPU_FEATURE(dc, VIS1);
3965
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3966
                                   cpu_fpr[DFPREG(rs2)]);
3967
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
3968
                                   cpu_fpr[DFPREG(rs1) + 1],
3969
                                   cpu_fpr[DFPREG(rs2) + 1]);
3970
                    break;
3971
                case 0x07d: /* VIS I fors */
3972
                    CHECK_FPU_FEATURE(dc, VIS1);
3973
                    tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3974
                    break;
3975
                case 0x07e: /* VIS I fone */
3976
                    CHECK_FPU_FEATURE(dc, VIS1);
3977
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
3978
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3979
                    break;
3980
                case 0x07f: /* VIS I fones */
3981
                    CHECK_FPU_FEATURE(dc, VIS1);
3982
                    tcg_gen_movi_i32(cpu_fpr[rd], -1);
3983
                    break;
3984
                case 0x080: /* VIS I shutdown */
3985
                case 0x081: /* VIS II siam */
3986
                    // XXX
3987
                    goto illegal_insn;
3988
                default:
3989
                    goto illegal_insn;
3990
                }
3991
#else
3992
                goto ncp_insn;
3993
#endif
3994
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3995
#ifdef TARGET_SPARC64
3996
                goto illegal_insn;
3997
#else
3998
                goto ncp_insn;
3999
#endif
4000
#ifdef TARGET_SPARC64
4001
            } else if (xop == 0x39) { /* V9 return */
4002
                TCGv_i32 r_const;
4003

    
4004
                save_state(dc, cpu_cond);
4005
                cpu_src1 = get_src1(insn, cpu_src1);
4006
                if (IS_IMM) {   /* immediate */
4007
                    simm = GET_FIELDs(insn, 19, 31);
4008
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4009
                } else {                /* register */
4010
                    rs2 = GET_FIELD(insn, 27, 31);
4011
                    if (rs2) {
4012
                        gen_movl_reg_TN(rs2, cpu_src2);
4013
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4014
                    } else
4015
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4016
                }
4017
                gen_helper_restore();
4018
                gen_mov_pc_npc(dc, cpu_cond);
4019
                r_const = tcg_const_i32(3);
4020
                gen_helper_check_align(cpu_dst, r_const);
4021
                tcg_temp_free_i32(r_const);
4022
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4023
                dc->npc = DYNAMIC_PC;
4024
                goto jmp_insn;
4025
#endif
4026
            } else {
4027
                cpu_src1 = get_src1(insn, cpu_src1);
4028
                if (IS_IMM) {   /* immediate */
4029
                    simm = GET_FIELDs(insn, 19, 31);
4030
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4031
                } else {                /* register */
4032
                    rs2 = GET_FIELD(insn, 27, 31);
4033
                    if (rs2) {
4034
                        gen_movl_reg_TN(rs2, cpu_src2);
4035
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4036
                    } else
4037
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4038
                }
4039
                switch (xop) {
4040
                case 0x38:      /* jmpl */
4041
                    {
4042
                        TCGv r_pc;
4043
                        TCGv_i32 r_const;
4044

    
4045
                        r_pc = tcg_const_tl(dc->pc);
4046
                        gen_movl_TN_reg(rd, r_pc);
4047
                        tcg_temp_free(r_pc);
4048
                        gen_mov_pc_npc(dc, cpu_cond);
4049
                        r_const = tcg_const_i32(3);
4050
                        gen_helper_check_align(cpu_dst, r_const);
4051
                        tcg_temp_free_i32(r_const);
4052
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4053
                        dc->npc = DYNAMIC_PC;
4054
                    }
4055
                    goto jmp_insn;
4056
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4057
                case 0x39:      /* rett, V9 return */
4058
                    {
4059
                        TCGv_i32 r_const;
4060

    
4061
                        if (!supervisor(dc))
4062
                            goto priv_insn;
4063
                        gen_mov_pc_npc(dc, cpu_cond);
4064
                        r_const = tcg_const_i32(3);
4065
                        gen_helper_check_align(cpu_dst, r_const);
4066
                        tcg_temp_free_i32(r_const);
4067
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4068
                        dc->npc = DYNAMIC_PC;
4069
                        gen_helper_rett();
4070
                    }
4071
                    goto jmp_insn;
4072
#endif
4073
                case 0x3b: /* flush */
4074
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4075
                        goto unimp_flush;
4076
                    gen_helper_flush(cpu_dst);
4077
                    break;
4078
                case 0x3c:      /* save */
4079
                    save_state(dc, cpu_cond);
4080
                    gen_helper_save();
4081
                    gen_movl_TN_reg(rd, cpu_dst);
4082
                    break;
4083
                case 0x3d:      /* restore */
4084
                    save_state(dc, cpu_cond);
4085
                    gen_helper_restore();
4086
                    gen_movl_TN_reg(rd, cpu_dst);
4087
                    break;
4088
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4089
                case 0x3e:      /* V9 done/retry */
4090
                    {
4091
                        switch (rd) {
4092
                        case 0:
4093
                            if (!supervisor(dc))
4094
                                goto priv_insn;
4095
                            dc->npc = DYNAMIC_PC;
4096
                            dc->pc = DYNAMIC_PC;
4097
                            gen_helper_done();
4098
                            goto jmp_insn;
4099
                        case 1:
4100
                            if (!supervisor(dc))
4101
                                goto priv_insn;
4102
                            dc->npc = DYNAMIC_PC;
4103
                            dc->pc = DYNAMIC_PC;
4104
                            gen_helper_retry();
4105
                            goto jmp_insn;
4106
                        default:
4107
                            goto illegal_insn;
4108
                        }
4109
                    }
4110
                    break;
4111
#endif
4112
                default:
4113
                    goto illegal_insn;
4114
                }
4115
            }
4116
            break;
4117
        }
4118
        break;
4119
    case 3:                     /* load/store instructions */
4120
        {
4121
            unsigned int xop = GET_FIELD(insn, 7, 12);
4122

    
4123
            /* flush pending conditional evaluations before exposing
4124
               cpu state */
4125
            if (dc->cc_op != CC_OP_FLAGS) {
4126
                dc->cc_op = CC_OP_FLAGS;
4127
                gen_helper_compute_psr();
4128
            }
4129
            cpu_src1 = get_src1(insn, cpu_src1);
4130
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4131
                rs2 = GET_FIELD(insn, 27, 31);
4132
                gen_movl_reg_TN(rs2, cpu_src2);
4133
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4134
            } else if (IS_IMM) {     /* immediate */
4135
                simm = GET_FIELDs(insn, 19, 31);
4136
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4137
            } else {            /* register */
4138
                rs2 = GET_FIELD(insn, 27, 31);
4139
                if (rs2 != 0) {
4140
                    gen_movl_reg_TN(rs2, cpu_src2);
4141
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4142
                } else
4143
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4144
            }
4145
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4146
                (xop > 0x17 && xop <= 0x1d ) ||
4147
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4148
                switch (xop) {
4149
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4150
                    gen_address_mask(dc, cpu_addr);
4151
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4152
                    break;
4153
                case 0x1:       /* ldub, load unsigned byte */
4154
                    gen_address_mask(dc, cpu_addr);
4155
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4156
                    break;
4157
                case 0x2:       /* lduh, load unsigned halfword */
4158
                    gen_address_mask(dc, cpu_addr);
4159
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4160
                    break;
4161
                case 0x3:       /* ldd, load double word */
4162
                    if (rd & 1)
4163
                        goto illegal_insn;
4164
                    else {
4165
                        TCGv_i32 r_const;
4166

    
4167
                        save_state(dc, cpu_cond);
4168
                        r_const = tcg_const_i32(7);
4169
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4170
                        tcg_temp_free_i32(r_const);
4171
                        gen_address_mask(dc, cpu_addr);
4172
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4173
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4174
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4175
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4176
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4177
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4178
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4179
                    }
4180
                    break;
4181
                case 0x9:       /* ldsb, load signed byte */
4182
                    gen_address_mask(dc, cpu_addr);
4183
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4184
                    break;
4185
                case 0xa:       /* ldsh, load signed halfword */
4186
                    gen_address_mask(dc, cpu_addr);
4187
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4188
                    break;
4189
                case 0xd:       /* ldstub -- XXX: should be atomically */
4190
                    {
4191
                        TCGv r_const;
4192

    
4193
                        gen_address_mask(dc, cpu_addr);
4194
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4195
                        r_const = tcg_const_tl(0xff);
4196
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4197
                        tcg_temp_free(r_const);
4198
                    }
4199
                    break;
4200
                case 0x0f:      /* swap, swap register with memory. Also
4201
                                   atomically */
4202
                    CHECK_IU_FEATURE(dc, SWAP);
4203
                    gen_movl_reg_TN(rd, cpu_val);
4204
                    gen_address_mask(dc, cpu_addr);
4205
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4206
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4207
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4208
                    break;
4209
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4210
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4211
#ifndef TARGET_SPARC64
4212
                    if (IS_IMM)
4213
                        goto illegal_insn;
4214
                    if (!supervisor(dc))
4215
                        goto priv_insn;
4216
#endif
4217
                    save_state(dc, cpu_cond);
4218
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4219
                    break;
4220
                case 0x11:      /* lduba, load unsigned byte alternate */
4221
#ifndef TARGET_SPARC64
4222
                    if (IS_IMM)
4223
                        goto illegal_insn;
4224
                    if (!supervisor(dc))
4225
                        goto priv_insn;
4226
#endif
4227
                    save_state(dc, cpu_cond);
4228
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4229
                    break;
4230
                case 0x12:      /* lduha, load unsigned halfword alternate */
4231
#ifndef TARGET_SPARC64
4232
                    if (IS_IMM)
4233
                        goto illegal_insn;
4234
                    if (!supervisor(dc))
4235
                        goto priv_insn;
4236
#endif
4237
                    save_state(dc, cpu_cond);
4238
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4239
                    break;
4240
                case 0x13:      /* ldda, load double word alternate */
4241
#ifndef TARGET_SPARC64
4242
                    if (IS_IMM)
4243
                        goto illegal_insn;
4244
                    if (!supervisor(dc))
4245
                        goto priv_insn;
4246
#endif
4247
                    if (rd & 1)
4248
                        goto illegal_insn;
4249
                    save_state(dc, cpu_cond);
4250
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4251
                    goto skip_move;
4252
                case 0x19:      /* ldsba, load signed byte alternate */
4253
#ifndef TARGET_SPARC64
4254
                    if (IS_IMM)
4255
                        goto illegal_insn;
4256
                    if (!supervisor(dc))
4257
                        goto priv_insn;
4258
#endif
4259
                    save_state(dc, cpu_cond);
4260
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4261
                    break;
4262
                case 0x1a:      /* ldsha, load signed halfword alternate */
4263
#ifndef TARGET_SPARC64
4264
                    if (IS_IMM)
4265
                        goto illegal_insn;
4266
                    if (!supervisor(dc))
4267
                        goto priv_insn;
4268
#endif
4269
                    save_state(dc, cpu_cond);
4270
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4271
                    break;
4272
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4273
#ifndef TARGET_SPARC64
4274
                    if (IS_IMM)
4275
                        goto illegal_insn;
4276
                    if (!supervisor(dc))
4277
                        goto priv_insn;
4278
#endif
4279
                    save_state(dc, cpu_cond);
4280
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4281
                    break;
4282
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4283
                                   atomically */
4284
                    CHECK_IU_FEATURE(dc, SWAP);
4285
#ifndef TARGET_SPARC64
4286
                    if (IS_IMM)
4287
                        goto illegal_insn;
4288
                    if (!supervisor(dc))
4289
                        goto priv_insn;
4290
#endif
4291
                    save_state(dc, cpu_cond);
4292
                    gen_movl_reg_TN(rd, cpu_val);
4293
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4294
                    break;
4295

    
4296
#ifndef TARGET_SPARC64
4297
                case 0x30: /* ldc */
4298
                case 0x31: /* ldcsr */
4299
                case 0x33: /* lddc */
4300
                    goto ncp_insn;
4301
#endif
4302
#endif
4303
#ifdef TARGET_SPARC64
4304
                case 0x08: /* V9 ldsw */
4305
                    gen_address_mask(dc, cpu_addr);
4306
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4307
                    break;
4308
                case 0x0b: /* V9 ldx */
4309
                    gen_address_mask(dc, cpu_addr);
4310
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4311
                    break;
4312
                case 0x18: /* V9 ldswa */
4313
                    save_state(dc, cpu_cond);
4314
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4315
                    break;
4316
                case 0x1b: /* V9 ldxa */
4317
                    save_state(dc, cpu_cond);
4318
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4319
                    break;
4320
                case 0x2d: /* V9 prefetch, no effect */
4321
                    goto skip_move;
4322
                case 0x30: /* V9 ldfa */
4323
                    save_state(dc, cpu_cond);
4324
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4325
                    goto skip_move;
4326
                case 0x33: /* V9 lddfa */
4327
                    save_state(dc, cpu_cond);
4328
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4329
                    goto skip_move;
4330
                case 0x3d: /* V9 prefetcha, no effect */
4331
                    goto skip_move;
4332
                case 0x32: /* V9 ldqfa */
4333
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4334
                    save_state(dc, cpu_cond);
4335
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4336
                    goto skip_move;
4337
#endif
4338
                default:
4339
                    goto illegal_insn;
4340
                }
4341
                gen_movl_TN_reg(rd, cpu_val);
4342
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4343
            skip_move: ;
4344
#endif
4345
            } else if (xop >= 0x20 && xop < 0x24) {
4346
                if (gen_trap_ifnofpu(dc, cpu_cond))
4347
                    goto jmp_insn;
4348
                save_state(dc, cpu_cond);
4349
                switch (xop) {
4350
                case 0x20:      /* ldf, load fpreg */
4351
                    gen_address_mask(dc, cpu_addr);
4352
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4353
                    tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4354
                    break;
4355
                case 0x21:      /* ldfsr, V9 ldxfsr */
4356
#ifdef TARGET_SPARC64
4357
                    gen_address_mask(dc, cpu_addr);
4358
                    if (rd == 1) {
4359
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4360
                        gen_helper_ldxfsr(cpu_tmp64);
4361
                    } else
4362
#else
4363
                    {
4364
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4365
                        gen_helper_ldfsr(cpu_tmp32);
4366
                    }
4367
#endif
4368
                    break;
4369
                case 0x22:      /* ldqf, load quad fpreg */
4370
                    {
4371
                        TCGv_i32 r_const;
4372

    
4373
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4374
                        r_const = tcg_const_i32(dc->mem_idx);
4375
                        gen_helper_ldqf(cpu_addr, r_const);
4376
                        tcg_temp_free_i32(r_const);
4377
                        gen_op_store_QT0_fpr(QFPREG(rd));
4378
                    }
4379
                    break;
4380
                case 0x23:      /* lddf, load double fpreg */
4381
                    {
4382
                        TCGv_i32 r_const;
4383

    
4384
                        r_const = tcg_const_i32(dc->mem_idx);
4385
                        gen_helper_lddf(cpu_addr, r_const);
4386
                        tcg_temp_free_i32(r_const);
4387
                        gen_op_store_DT0_fpr(DFPREG(rd));
4388
                    }
4389
                    break;
4390
                default:
4391
                    goto illegal_insn;
4392
                }
4393
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4394
                       xop == 0xe || xop == 0x1e) {
4395
                gen_movl_reg_TN(rd, cpu_val);
4396
                switch (xop) {
4397
                case 0x4: /* st, store word */
4398
                    gen_address_mask(dc, cpu_addr);
4399
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4400
                    break;
4401
                case 0x5: /* stb, store byte */
4402
                    gen_address_mask(dc, cpu_addr);
4403
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4404
                    break;
4405
                case 0x6: /* sth, store halfword */
4406
                    gen_address_mask(dc, cpu_addr);
4407
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4408
                    break;
4409
                case 0x7: /* std, store double word */
4410
                    if (rd & 1)
4411
                        goto illegal_insn;
4412
                    else {
4413
                        TCGv_i32 r_const;
4414

    
4415
                        save_state(dc, cpu_cond);
4416
                        gen_address_mask(dc, cpu_addr);
4417
                        r_const = tcg_const_i32(7);
4418
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4419
                        tcg_temp_free_i32(r_const);
4420
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4421
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4422
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4423
                    }
4424
                    break;
4425
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4426
                case 0x14: /* sta, V9 stwa, store word alternate */
4427
#ifndef TARGET_SPARC64
4428
                    if (IS_IMM)
4429
                        goto illegal_insn;
4430
                    if (!supervisor(dc))
4431
                        goto priv_insn;
4432
#endif
4433
                    save_state(dc, cpu_cond);
4434
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4435
                    break;
4436
                case 0x15: /* stba, store byte alternate */
4437
#ifndef TARGET_SPARC64
4438
                    if (IS_IMM)
4439
                        goto illegal_insn;
4440
                    if (!supervisor(dc))
4441
                        goto priv_insn;
4442
#endif
4443
                    save_state(dc, cpu_cond);
4444
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4445
                    break;
4446
                case 0x16: /* stha, store halfword alternate */
4447
#ifndef TARGET_SPARC64
4448
                    if (IS_IMM)
4449
                        goto illegal_insn;
4450
                    if (!supervisor(dc))
4451
                        goto priv_insn;
4452
#endif
4453
                    save_state(dc, cpu_cond);
4454
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4455
                    break;
4456
                case 0x17: /* stda, store double word alternate */
4457
#ifndef TARGET_SPARC64
4458
                    if (IS_IMM)
4459
                        goto illegal_insn;
4460
                    if (!supervisor(dc))
4461
                        goto priv_insn;
4462
#endif
4463
                    if (rd & 1)
4464
                        goto illegal_insn;
4465
                    else {
4466
                        save_state(dc, cpu_cond);
4467
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4468
                    }
4469
                    break;
4470
#endif
4471
#ifdef TARGET_SPARC64
4472
                case 0x0e: /* V9 stx */
4473
                    gen_address_mask(dc, cpu_addr);
4474
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4475
                    break;
4476
                case 0x1e: /* V9 stxa */
4477
                    save_state(dc, cpu_cond);
4478
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4479
                    break;
4480
#endif
4481
                default:
4482
                    goto illegal_insn;
4483
                }
4484
            } else if (xop > 0x23 && xop < 0x28) {
4485
                if (gen_trap_ifnofpu(dc, cpu_cond))
4486
                    goto jmp_insn;
4487
                save_state(dc, cpu_cond);
4488
                switch (xop) {
4489
                case 0x24: /* stf, store fpreg */
4490
                    gen_address_mask(dc, cpu_addr);
4491
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4492
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4493
                    break;
4494
                case 0x25: /* stfsr, V9 stxfsr */
4495
#ifdef TARGET_SPARC64
4496
                    gen_address_mask(dc, cpu_addr);
4497
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4498
                    if (rd == 1)
4499
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4500
                    else
4501
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4502
#else
4503
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4504
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4505
#endif
4506
                    break;
4507
                case 0x26:
4508
#ifdef TARGET_SPARC64
4509
                    /* V9 stqf, store quad fpreg */
4510
                    {
4511
                        TCGv_i32 r_const;
4512

    
4513
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4514
                        gen_op_load_fpr_QT0(QFPREG(rd));
4515
                        r_const = tcg_const_i32(dc->mem_idx);
4516
                        gen_helper_stqf(cpu_addr, r_const);
4517
                        tcg_temp_free_i32(r_const);
4518
                    }
4519
                    break;
4520
#else /* !TARGET_SPARC64 */
4521
                    /* stdfq, store floating point queue */
4522
#if defined(CONFIG_USER_ONLY)
4523
                    goto illegal_insn;
4524
#else
4525
                    if (!supervisor(dc))
4526
                        goto priv_insn;
4527
                    if (gen_trap_ifnofpu(dc, cpu_cond))
4528
                        goto jmp_insn;
4529
                    goto nfq_insn;
4530
#endif
4531
#endif
4532
                case 0x27: /* stdf, store double fpreg */
4533
                    {
4534
                        TCGv_i32 r_const;
4535

    
4536
                        gen_op_load_fpr_DT0(DFPREG(rd));
4537
                        r_const = tcg_const_i32(dc->mem_idx);
4538
                        gen_helper_stdf(cpu_addr, r_const);
4539
                        tcg_temp_free_i32(r_const);
4540
                    }
4541
                    break;
4542
                default:
4543
                    goto illegal_insn;
4544
                }
4545
            } else if (xop > 0x33 && xop < 0x3f) {
4546
                save_state(dc, cpu_cond);
4547
                switch (xop) {
4548
#ifdef TARGET_SPARC64
4549
                case 0x34: /* V9 stfa */
4550
                    gen_stf_asi(cpu_addr, insn, 4, rd);
4551
                    break;
4552
                case 0x36: /* V9 stqfa */
4553
                    {
4554
                        TCGv_i32 r_const;
4555

    
4556
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4557
                        r_const = tcg_const_i32(7);
4558
                        gen_helper_check_align(cpu_addr, r_const);
4559
                        tcg_temp_free_i32(r_const);
4560
                        gen_op_load_fpr_QT0(QFPREG(rd));
4561
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4562
                    }
4563
                    break;
4564
                case 0x37: /* V9 stdfa */
4565
                    gen_op_load_fpr_DT0(DFPREG(rd));
4566
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4567
                    break;
4568
                case 0x3c: /* V9 casa */
4569
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4570
                    gen_movl_TN_reg(rd, cpu_val);
4571
                    break;
4572
                case 0x3e: /* V9 casxa */
4573
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4574
                    gen_movl_TN_reg(rd, cpu_val);
4575
                    break;
4576
#else
4577
                case 0x34: /* stc */
4578
                case 0x35: /* stcsr */
4579
                case 0x36: /* stdcq */
4580
                case 0x37: /* stdc */
4581
                    goto ncp_insn;
4582
#endif
4583
                default:
4584
                    goto illegal_insn;
4585
                }
4586
            } else
4587
                goto illegal_insn;
4588
        }
4589
        break;
4590
    }
4591
    /* default case for non jump instructions */
4592
    if (dc->npc == DYNAMIC_PC) {
4593
        dc->pc = DYNAMIC_PC;
4594
        gen_op_next_insn();
4595
    } else if (dc->npc == JUMP_PC) {
4596
        /* we can do a static jump */
4597
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4598
        dc->is_br = 1;
4599
    } else {
4600
        dc->pc = dc->npc;
4601
        dc->npc = dc->npc + 4;
4602
    }
4603
 jmp_insn:
4604
    goto egress;
4605
 illegal_insn:
4606
    {
4607
        TCGv_i32 r_const;
4608

    
4609
        save_state(dc, cpu_cond);
4610
        r_const = tcg_const_i32(TT_ILL_INSN);
4611
        gen_helper_raise_exception(r_const);
4612
        tcg_temp_free_i32(r_const);
4613
        dc->is_br = 1;
4614
    }
4615
    goto egress;
4616
 unimp_flush:
4617
    {
4618
        TCGv_i32 r_const;
4619

    
4620
        save_state(dc, cpu_cond);
4621
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4622
        gen_helper_raise_exception(r_const);
4623
        tcg_temp_free_i32(r_const);
4624
        dc->is_br = 1;
4625
    }
4626
    goto egress;
4627
#if !defined(CONFIG_USER_ONLY)
4628
 priv_insn:
4629
    {
4630
        TCGv_i32 r_const;
4631

    
4632
        save_state(dc, cpu_cond);
4633
        r_const = tcg_const_i32(TT_PRIV_INSN);
4634
        gen_helper_raise_exception(r_const);
4635
        tcg_temp_free_i32(r_const);
4636
        dc->is_br = 1;
4637
    }
4638
    goto egress;
4639
#endif
4640
 nfpu_insn:
4641
    save_state(dc, cpu_cond);
4642
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4643
    dc->is_br = 1;
4644
    goto egress;
4645
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4646
 nfq_insn:
4647
    save_state(dc, cpu_cond);
4648
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4649
    dc->is_br = 1;
4650
    goto egress;
4651
#endif
4652
#ifndef TARGET_SPARC64
4653
 ncp_insn:
4654
    {
4655
        TCGv r_const;
4656

    
4657
        save_state(dc, cpu_cond);
4658
        r_const = tcg_const_i32(TT_NCP_INSN);
4659
        gen_helper_raise_exception(r_const);
4660
        tcg_temp_free(r_const);
4661
        dc->is_br = 1;
4662
    }
4663
    goto egress;
4664
#endif
4665
 egress:
4666
    tcg_temp_free(cpu_tmp1);
4667
    tcg_temp_free(cpu_tmp2);
4668
}
4669

    
4670
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4671
                                                  int spc, CPUSPARCState *env)
4672
{
4673
    target_ulong pc_start, last_pc;
4674
    uint16_t *gen_opc_end;
4675
    DisasContext dc1, *dc = &dc1;
4676
    CPUBreakpoint *bp;
4677
    int j, lj = -1;
4678
    int num_insns;
4679
    int max_insns;
4680

    
4681
    memset(dc, 0, sizeof(DisasContext));
4682
    dc->tb = tb;
4683
    pc_start = tb->pc;
4684
    dc->pc = pc_start;
4685
    last_pc = dc->pc;
4686
    dc->npc = (target_ulong) tb->cs_base;
4687
    dc->cc_op = CC_OP_DYNAMIC;
4688
    dc->mem_idx = cpu_mmu_index(env);
4689
    dc->def = env->def;
4690
    if ((dc->def->features & CPU_FEATURE_FLOAT))
4691
        dc->fpu_enabled = cpu_fpu_enabled(env);
4692
    else
4693
        dc->fpu_enabled = 0;
4694
#ifdef TARGET_SPARC64
4695
    dc->address_mask_32bit = env->pstate & PS_AM;
4696
#endif
4697
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4698

    
4699
    cpu_tmp0 = tcg_temp_new();
4700
    cpu_tmp32 = tcg_temp_new_i32();
4701
    cpu_tmp64 = tcg_temp_new_i64();
4702

    
4703
    cpu_dst = tcg_temp_local_new();
4704

    
4705
    // loads and stores
4706
    cpu_val = tcg_temp_local_new();
4707
    cpu_addr = tcg_temp_local_new();
4708

    
4709
    num_insns = 0;
4710
    max_insns = tb->cflags & CF_COUNT_MASK;
4711
    if (max_insns == 0)
4712
        max_insns = CF_COUNT_MASK;
4713
    gen_icount_start();
4714
    do {
4715
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4716
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4717
                if (bp->pc == dc->pc) {
4718
                    if (dc->pc != pc_start)
4719
                        save_state(dc, cpu_cond);
4720
                    gen_helper_debug();
4721
                    tcg_gen_exit_tb(0);
4722
                    dc->is_br = 1;
4723
                    goto exit_gen_loop;
4724
                }
4725
            }
4726
        }
4727
        if (spc) {
4728
            qemu_log("Search PC...\n");
4729
            j = gen_opc_ptr - gen_opc_buf;
4730
            if (lj < j) {
4731
                lj++;
4732
                while (lj < j)
4733
                    gen_opc_instr_start[lj++] = 0;
4734
                gen_opc_pc[lj] = dc->pc;
4735
                gen_opc_npc[lj] = dc->npc;
4736
                gen_opc_instr_start[lj] = 1;
4737
                gen_opc_icount[lj] = num_insns;
4738
            }
4739
        }
4740
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4741
            gen_io_start();
4742
        last_pc = dc->pc;
4743
        disas_sparc_insn(dc);
4744
        num_insns++;
4745

    
4746
        if (dc->is_br)
4747
            break;
4748
        /* if the next PC is different, we abort now */
4749
        if (dc->pc != (last_pc + 4))
4750
            break;
4751
        /* if we reach a page boundary, we stop generation so that the
4752
           PC of a TT_TFAULT exception is always in the right page */
4753
        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4754
            break;
4755
        /* if single step mode, we generate only one instruction and
4756
           generate an exception */
4757
        if (env->singlestep_enabled || singlestep) {
4758
            tcg_gen_movi_tl(cpu_pc, dc->pc);
4759
            tcg_gen_exit_tb(0);
4760
            break;
4761
        }
4762
    } while ((gen_opc_ptr < gen_opc_end) &&
4763
             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4764
             num_insns < max_insns);
4765

    
4766
 exit_gen_loop:
4767
    tcg_temp_free(cpu_addr);
4768
    tcg_temp_free(cpu_val);
4769
    tcg_temp_free(cpu_dst);
4770
    tcg_temp_free_i64(cpu_tmp64);
4771
    tcg_temp_free_i32(cpu_tmp32);
4772
    tcg_temp_free(cpu_tmp0);
4773
    if (tb->cflags & CF_LAST_IO)
4774
        gen_io_end();
4775
    if (!dc->is_br) {
4776
        if (dc->pc != DYNAMIC_PC &&
4777
            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4778
            /* static PC and NPC: we can use direct chaining */
4779
            gen_goto_tb(dc, 0, dc->pc, dc->npc);
4780
        } else {
4781
            if (dc->pc != DYNAMIC_PC)
4782
                tcg_gen_movi_tl(cpu_pc, dc->pc);
4783
            save_npc(dc, cpu_cond);
4784
            tcg_gen_exit_tb(0);
4785
        }
4786
    }
4787
    gen_icount_end(tb, num_insns);
4788
    *gen_opc_ptr = INDEX_op_end;
4789
    if (spc) {
4790
        j = gen_opc_ptr - gen_opc_buf;
4791
        lj++;
4792
        while (lj <= j)
4793
            gen_opc_instr_start[lj++] = 0;
4794
#if 0
4795
        log_page_dump();
4796
#endif
4797
        gen_opc_jump_pc[0] = dc->jump_pc[0];
4798
        gen_opc_jump_pc[1] = dc->jump_pc[1];
4799
    } else {
4800
        tb->size = last_pc + 4 - pc_start;
4801
        tb->icount = num_insns;
4802
    }
4803
#ifdef DEBUG_DISAS
4804
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4805
        qemu_log("--------------\n");
4806
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
4807
        log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4808
        qemu_log("\n");
4809
    }
4810
#endif
4811
}
4812

    
4813
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4814
{
4815
    gen_intermediate_code_internal(tb, 0, env);
4816
}
4817

    
4818
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4819
{
4820
    gen_intermediate_code_internal(tb, 1, env);
4821
}
4822

    
4823
void gen_intermediate_code_init(CPUSPARCState *env)
4824
{
4825
    unsigned int i;
4826
    static int inited;
4827
    static const char * const gregnames[8] = {
4828
        NULL, // g0 not used
4829
        "g1",
4830
        "g2",
4831
        "g3",
4832
        "g4",
4833
        "g5",
4834
        "g6",
4835
        "g7",
4836
    };
4837
    static const char * const fregnames[64] = {
4838
        "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4839
        "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4840
        "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4841
        "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4842
        "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4843
        "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4844
        "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4845
        "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4846
    };
4847

    
4848
    /* init various static tables */
4849
    if (!inited) {
4850
        inited = 1;
4851

    
4852
        cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4853
        cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4854
                                             offsetof(CPUState, regwptr),
4855
                                             "regwptr");
4856
#ifdef TARGET_SPARC64
4857
        cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4858
                                         "xcc");
4859
        cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4860
                                         "asi");
4861
        cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4862
                                          "fprs");
4863
        cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4864
                                     "gsr");
4865
        cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4866
                                           offsetof(CPUState, tick_cmpr),
4867
                                           "tick_cmpr");
4868
        cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4869
                                            offsetof(CPUState, stick_cmpr),
4870
                                            "stick_cmpr");
4871
        cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4872
                                             offsetof(CPUState, hstick_cmpr),
4873
                                             "hstick_cmpr");
4874
        cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4875
                                       "hintp");
4876
        cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4877
                                      "htba");
4878
        cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4879
                                      "hver");
4880
        cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4881
                                     offsetof(CPUState, ssr), "ssr");
4882
        cpu_ver = tcg_global_mem_new(TCG_AREG0,
4883
                                     offsetof(CPUState, version), "ver");
4884
        cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4885
                                             offsetof(CPUState, softint),
4886
                                             "softint");
4887
#else
4888
        cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4889
                                     "wim");
4890
#endif
4891
        cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4892
                                      "cond");
4893
        cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4894
                                        "cc_src");
4895
        cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4896
                                         offsetof(CPUState, cc_src2),
4897
                                         "cc_src2");
4898
        cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4899
                                        "cc_dst");
4900
        cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
4901
                                           "cc_op");
4902
        cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4903
                                         "psr");
4904
        cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4905
                                     "fsr");
4906
        cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4907
                                    "pc");
4908
        cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
4909
                                     "npc");
4910
        cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
4911
#ifndef CONFIG_USER_ONLY
4912
        cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
4913
                                     "tbr");
4914
#endif
4915
        for (i = 1; i < 8; i++)
4916
            cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
4917
                                              offsetof(CPUState, gregs[i]),
4918
                                              gregnames[i]);
4919
        for (i = 0; i < TARGET_FPREGS; i++)
4920
            cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
4921
                                                offsetof(CPUState, fpr[i]),
4922
                                                fregnames[i]);
4923

    
4924
        /* register helpers */
4925

    
4926
#define GEN_HELPER 2
4927
#include "helper.h"
4928
    }
4929
}
4930

    
4931
void gen_pc_load(CPUState *env, TranslationBlock *tb,
4932
                unsigned long searched_pc, int pc_pos, void *puc)
4933
{
4934
    target_ulong npc;
4935
    env->pc = gen_opc_pc[pc_pos];
4936
    npc = gen_opc_npc[pc_pos];
4937
    if (npc == 1) {
4938
        /* dynamic NPC: already stored */
4939
    } else if (npc == 2) {
4940
        /* jump PC: use 'cond' and the jump targets of the translation */
4941
        if (env->cond) {
4942
            env->npc = gen_opc_jump_pc[0];
4943
        } else {
4944
            env->npc = gen_opc_jump_pc[1];
4945
        }
4946
    } else {
4947
        env->npc = npc;
4948
    }
4949

    
4950
    /* flush pending conditional evaluations before exposing cpu state */
4951
    if (CC_OP != CC_OP_FLAGS) {
4952
        helper_compute_psr();
4953
    }
4954
}